Compare commits

...

33 Commits
v0.1.2 ... main

Author SHA1 Message Date
anonpenguin23
d744e26a19 Update package version to 0.3.4 2025-11-28 22:31:21 +02:00
anonpenguin23
681299efdd Add TLS configuration for fetch in HttpClient
- Introduced a new function to create a fetch instance with TLS settings for staging certificates in Node.js environments.
- Updated HttpClient to use this fetch function, allowing self-signed certificates in development and staging environments.
- Enhanced security handling by ensuring that staging certificates are only accepted in non-production settings.
2025-11-28 22:31:01 +02:00
anonpenguin23
3db9f4d8b8 Update package version to 0.3.2 2025-11-22 09:38:59 +02:00
anonpenguin23
ca81e60bcb Refactor HttpClient error logging and SQL query handling
- Improved logging for expected 404 responses in find-one calls, now logged as warnings instead of errors for better clarity.
- Enhanced SQL query detail logging by refining the formatting of arguments in the console output.
- Cleaned up code by removing unnecessary checks for expected 404 errors related to blocked users and conversation participants.
2025-11-21 14:41:04 +02:00
anonpenguin23
091a6d5751 Enhance HttpClient to log SQL query details for rqlite operations
- Added functionality to extract and log SQL query details when making requests to rqlite endpoints, including direct SQL queries and table-based queries.
- Improved error handling for expected 404 responses, logging them as warnings instead of errors for better visibility.
- Updated console logging to include query details when available, enhancing debugging capabilities.
2025-11-21 14:39:07 +02:00
anonpenguin23
c7ef91f8d6 Update package version to 0.3.1 2025-11-11 17:51:03 +02:00
anonpenguin23
54c61e6a47 Add getBinary method to StorageClient for IPFS content retrieval
- Implemented a new method `getBinary` to retrieve content from IPFS by CID, returning the full Response object.
- Added retry logic with exponential backoff for handling eventual consistency in IPFS Cluster, allowing up to 8 attempts for content retrieval.
- Enhanced documentation with usage examples and detailed parameter descriptions for better clarity.
2025-11-08 12:02:35 +02:00
anonpenguin23
a02c7474ab Enhance StorageClient upload method to support optional pinning
- Updated the upload method to accept an options parameter for controlling the pinning behavior of uploaded content.
- Default pinning behavior remains true, but can be set to false through the options.
- Improved documentation to reflect the new options parameter and its usage in examples.
2025-11-08 08:34:29 +02:00
anonpenguin23
06d58fe85b Enhance HttpClient error handling for expected 404 responses
- Added logic to identify and handle expected 404 errors for conversation participants without logging them as errors.
- Updated comments to clarify the expected behavior for cache misses and non-participant status, improving code readability.
2025-11-07 08:17:51 +02:00
anonpenguin23
2cdb78ee1d Add multiGet functionality to CacheClient for batch retrieval of cache values
- Introduced CacheMultiGetRequest and CacheMultiGetResponse interfaces to define the structure for multi-get operations.
- Implemented multiGet method in CacheClient to retrieve multiple cache values in a single request, handling cache misses gracefully.
- Enhanced error handling to manage 404 errors when the backend does not support multiGet, returning null for missing keys.
2025-11-07 06:47:42 +02:00
anonpenguin23
cee0cd62a9 Enhance CacheClient to handle cache misses gracefully
- Updated the `get` method in CacheClient to return null for cache misses instead of throwing an error.
- Improved error handling to differentiate between expected 404 errors and other exceptions.
- Adjusted related tests to verify null returns for non-existent keys, ensuring robust cache behavior.
2025-11-07 06:03:47 +02:00
anonpenguin23
51f7c433c7 Refactor HttpClient to improve API key and JWT handling for various operations
- Removed redundant console logging in setApiKey method.
- Updated getAuthHeaders method to include cache operations in API key usage logic.
- Enhanced request logging to track request duration and handle expected 404 errors gracefully.
- Improved code readability by formatting SQL queries in the Repository class.
2025-11-06 11:16:10 +02:00
anonpenguin23
64cfe078f0 Remove redundant test for handling get errors with non-existent CID in storage tests 2025-11-06 06:25:18 +02:00
anonpenguin23
5bfb042646 Implement retry logic for content retrieval in StorageClient
- Added retry mechanism for the `get` method to handle eventual consistency in IPFS Cluster.
- Introduced exponential backoff strategy for retries, allowing up to 8 attempts with increasing wait times.
- Enhanced error handling to differentiate between 404 errors and other failures, ensuring robust content retrieval.
2025-11-05 17:30:58 +02:00
anonpenguin23
e233696166 feat: integrate Olric distributed cache support
- Added Olric cache server integration, including configuration options for Olric servers and timeout settings.
- Implemented HTTP handlers for cache operations: health check, get, put, delete, and scan.
- Enhanced Makefile with commands to run the Olric server and manage its configuration.
- Updated README and setup scripts to include Olric installation and configuration instructions.
- Introduced tests for cache handlers to ensure proper functionality and error handling.
2025-11-05 07:30:59 +02:00
anonpenguin23
e30e81d0c9 Update package version to 0.3.0 and introduce CacheClient with caching functionality
- Added CacheClient to manage cache operations including get, put, delete, and scan.
- Updated createClient function to include cache client.
- Added new types and interfaces for cache requests and responses.
- Implemented comprehensive tests for cache functionality, covering health checks, value storage, retrieval, deletion, and scanning.
2025-11-03 15:28:30 +02:00
anonpenguin23
272c6b872c Update package version from 0.1.5 to 0.2.5 in package.json 2025-10-30 09:18:18 +02:00
anonpenguin23
8c9a900e88 Update HttpClient to include proxy operations in API key usage logic and enhance request header logging for proxy paths. 2025-10-30 06:52:29 +02:00
anonpenguin23
23a742e5d4 Add proxy request functionality to NetworkClient and update README with usage examples
- Introduced `proxyAnon` method in `NetworkClient` to facilitate anonymous HTTP requests through the Anyone network.
- Added `ProxyRequest` and `ProxyResponse` interfaces to define the structure of proxy requests and responses.
- Updated README.md with detailed examples on how to use the new proxy functionality, including GET and POST requests.
- Enhanced error handling for proxy requests to ensure graceful failure in case of issues.
2025-10-30 06:21:29 +02:00
anonpenguin23
6f5be86a02 Update HttpClient to support API key usage for both database and pubsub operations; enhance request header logging to include pubsub paths for better debugging. 2025-10-29 14:17:26 +02:00
anonpenguin23
a97fb21200 Enhance AuthClient to support optional refresh tokens and API keys in the authentication response; update token persistence logic for improved session management. Remove debug logging from PubSubClient to reduce console output. 2025-10-29 07:02:46 +02:00
anonpenguin23
0e95ad49c5 Remove debug logging for publish headers in HttpClient to streamline header management and reduce console output. 2025-10-29 06:54:02 +02:00
anonpenguin23
fa357668cb Add getApiKey method to HttpClient and update PubSubClient to use it for authentication. Change WebSocket URL from localhost to 127.0.0.1 for improved compatibility. 2025-10-29 06:53:49 +02:00
anonpenguin23
5c564b6327 Add chain_type property to AuthClient for enhanced blockchain support 2025-10-28 19:18:18 +02:00
anonpenguin23
76bb82d4f8 Refactor AuthClient and HttpClient to allow coexistence of API key and JWT; implement logoutUser method to clear JWT while preserving API key, enhancing user logout functionality. Improve header management in HttpClient for database operations and add debug logging for request headers. 2025-10-28 13:35:54 +02:00
anonpenguin23
dcf8efe428 Refactor AuthClient to streamline imports and add wallet authentication methods including challenge, verify, and getApiKey; improve error logging during logout process. 2025-10-28 13:08:52 +02:00
anonpenguin23
c6dfb0bfed Increase default timeout in HttpClient to 60 seconds for pub/sub operations; simplify WSClient by removing unused configuration options and enhancing connection handling with open and close event handlers; refactor PubSubClient to improve WebSocket subscription management and message handling. 2025-10-28 10:25:10 +02:00
anonpenguin23
2de0cb1983 Enhance README with detailed Pub/Sub messaging features, including multi-subscriber support and message interface; improve HttpClient with per-request timeout handling; update PubSubClient to support raw message handling and validate message envelopes. 2025-10-27 17:30:49 +02:00
anonpenguin23
eab542952e Enhance WebSocket error handling and add base64 encoding/decoding utilities in PubSubClient; refactor message publishing logic to support base64 data 2025-10-27 09:01:14 +02:00
anonpenguin23
acf9540daa Refactor network status interface and update related tests; add dotenv for environment variable management 2025-10-26 15:21:00 +02:00
anonpenguin23
f2d8d35a35 Increment version to 0.1.5 and update package name references in README.md to '@debros/network-ts-sdk'; remove GitHub Actions workflow for GitHub Packages publishing 2025-10-22 09:27:31 +03:00
anonpenguin23
9d9f64c11e Update package name to '@debros/network-ts-sdk' and increment version to 0.1.4; adjust GitHub Actions workflow scope accordingly 2025-10-22 09:24:17 +03:00
anonpenguin23
dab820bb25 Update package version to 0.1.3 and modify GitHub Actions workflow to use 'npm run build' and correct package scope 2025-10-22 09:21:48 +03:00
17 changed files with 1804 additions and 192 deletions

View File

@ -24,36 +24,9 @@ jobs:
run: npm install --frozen-lockfile
- name: Build SDK
run: npm build
run: npm run build
- name: Publish to npm
run: npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-github:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js for GitHub Packages
uses: actions/setup-node@v4
with:
node-version: '18'
registry-url: 'https://npm.pkg.github.com'
scope: '@network'
- name: Install dependencies
run: npm install --frozen-lockfile
- name: Build SDK
run: npm build
- name: Publish to GitHub Packages
run: npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

109
README.md
View File

@ -1,4 +1,4 @@
# @network/sdk - TypeScript SDK for DeBros Network
# @debros/network-ts-sdk - TypeScript SDK for DeBros Network
A modern, isomorphic TypeScript SDK for the DeBros Network gateway. Works seamlessly in both Node.js and browser environments with support for database operations, pub/sub messaging, and network management.
@ -14,7 +14,7 @@ A modern, isomorphic TypeScript SDK for the DeBros Network gateway. Works seamle
## Installation
```bash
npm install @network/network-ts-sdk
npm install @debros/network-ts-sdk
```
## Quick Start
@ -22,7 +22,7 @@ npm install @network/network-ts-sdk
### Initialize the Client
```typescript
import { createClient } from "@network/sdk";
import { createClient } from "@debros/network-ts-sdk";
const client = createClient({
baseURL: "http://localhost:6001",
@ -122,10 +122,23 @@ const results = await client.db.transaction([
### Pub/Sub Messaging
The SDK provides a robust pub/sub client with:
- **Multi-subscriber support**: Multiple connections can subscribe to the same topic
- **Namespace isolation**: Topics are scoped to your authenticated namespace
- **Server timestamps**: Messages preserve server-side timestamps
- **Binary-safe**: Supports both string and binary (`Uint8Array`) payloads
- **Strict envelope validation**: Type-safe message parsing with error handling
#### Publish a Message
```typescript
// Publish a string message
await client.pubsub.publish("notifications", "Hello, Network!");
// Publish binary data
const binaryData = new Uint8Array([1, 2, 3, 4]);
await client.pubsub.publish("binary-topic", binaryData);
```
#### Subscribe to Topics
@ -133,7 +146,9 @@ await client.pubsub.publish("notifications", "Hello, Network!");
```typescript
const subscription = await client.pubsub.subscribe("notifications", {
onMessage: (msg) => {
console.log("Received:", msg.data);
console.log("Topic:", msg.topic);
console.log("Data:", msg.data);
console.log("Server timestamp:", new Date(msg.timestamp));
},
onError: (err) => {
console.error("Subscription error:", err);
@ -147,6 +162,52 @@ const subscription = await client.pubsub.subscribe("notifications", {
subscription.close();
```
**Message Interface:**
```typescript
interface Message {
data: string; // Decoded message payload (string)
topic: string; // Topic name
timestamp: number; // Server timestamp in milliseconds
}
```
#### Debug Raw Envelopes
For debugging, you can inspect raw message envelopes before decoding:
```typescript
const subscription = await client.pubsub.subscribe("notifications", {
onMessage: (msg) => {
console.log("Decoded message:", msg.data);
},
onRaw: (envelope) => {
console.log("Raw envelope:", envelope);
// { data: "base64...", timestamp: 1234567890, topic: "notifications" }
},
});
```
#### Multi-Subscriber Support
Multiple subscriptions to the same topic are supported. Each receives its own copy of messages:
```typescript
// First subscriber
const sub1 = await client.pubsub.subscribe("events", {
onMessage: (msg) => console.log("Sub1:", msg.data),
});
// Second subscriber (both receive messages)
const sub2 = await client.pubsub.subscribe("events", {
onMessage: (msg) => console.log("Sub2:", msg.data),
});
// Unsubscribe independently
sub1.close(); // sub2 still active
sub2.close(); // fully unsubscribed
```
#### List Topics
```typescript
@ -211,6 +272,40 @@ peers.forEach((peer) => {
});
```
#### Proxy Requests Through Anyone Network
Make anonymous HTTP requests through the Anyone network:
```typescript
// Simple GET request
const response = await client.network.proxyAnon({
url: "https://api.example.com/data",
method: "GET",
headers: {
Accept: "application/json",
},
});
console.log(response.status_code); // 200
console.log(response.body); // Response data as string
console.log(response.headers); // Response headers
// POST request with body
const postResponse = await client.network.proxyAnon({
url: "https://api.example.com/submit",
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ key: "value" }),
});
// Parse JSON response
const data = JSON.parse(postResponse.body);
```
**Note:** The proxy endpoint requires authentication (API key or JWT) and only works when the Anyone relay is running on the gateway server.
## Configuration
### ClientConfig
@ -234,7 +329,7 @@ interface ClientConfig {
By default, credentials are stored in memory. For browser apps, use localStorage:
```typescript
import { createClient, LocalStorageAdapter } from "@network/sdk";
import { createClient, LocalStorageAdapter } from "@debros/network-ts-sdk";
const client = createClient({
baseURL: "http://localhost:6001",
@ -248,7 +343,7 @@ const client = createClient({
The SDK throws `SDKError` for all errors:
```typescript
import { SDKError } from "@network/sdk";
import { SDKError } from "@debros/network-ts-sdk";
try {
await client.db.query("SELECT * FROM nonexistent");
@ -268,7 +363,7 @@ The SDK works in browsers with minimal setup:
```typescript
// Browser example
import { createClient } from "@network/sdk";
import { createClient } from "@debros/network-ts-sdk";
const client = createClient({
baseURL: "https://gateway.example.com",

View File

@ -1,6 +1,6 @@
{
"name": "network-ts-sdk",
"version": "0.1.2",
"name": "@debros/network-ts-sdk",
"version": "0.3.4",
"description": "TypeScript SDK for DeBros Network Gateway",
"type": "module",
"main": "./dist/index.js",
@ -36,12 +36,13 @@
},
"devDependencies": {
"@types/node": "^20.0.0",
"typescript": "^5.3.0",
"tsup": "^8.0.0",
"vitest": "^1.0.0",
"eslint": "^8.0.0",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0"
"@typescript-eslint/parser": "^6.0.0",
"dotenv": "^17.2.3",
"eslint": "^8.0.0",
"tsup": "^8.0.0",
"typescript": "^5.3.0",
"vitest": "^1.0.0"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",

9
pnpm-lock.yaml generated
View File

@ -21,6 +21,9 @@ importers:
'@typescript-eslint/parser':
specifier: ^6.0.0
version: 6.21.0(eslint@8.57.1)(typescript@5.9.3)
dotenv:
specifier: ^17.2.3
version: 17.2.3
eslint:
specifier: ^8.0.0
version: 8.57.1
@ -744,6 +747,10 @@ packages:
resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
engines: {node: '>=6.0.0'}
dotenv@17.2.3:
resolution: {integrity: sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==}
engines: {node: '>=12'}
eastasianwidth@0.2.0:
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
@ -2054,6 +2061,8 @@ snapshots:
dependencies:
esutils: 2.0.3
dotenv@17.2.3: {}
eastasianwidth@0.2.0: {}
emoji-regex@8.0.0: {}

View File

@ -1,10 +1,5 @@
import { HttpClient } from "../core/http";
import {
AuthConfig,
WhoAmI,
StorageAdapter,
MemoryStorage,
} from "./types";
import { AuthConfig, WhoAmI, StorageAdapter, MemoryStorage } from "./types";
export class AuthClient {
private httpClient: HttpClient;
@ -33,14 +28,14 @@ export class AuthClient {
setApiKey(apiKey: string) {
this.currentApiKey = apiKey;
this.currentJwt = undefined;
// Don't clear JWT - it will be cleared explicitly on logout
this.httpClient.setApiKey(apiKey);
this.storage.set("apiKey", apiKey);
}
setJwt(jwt: string) {
this.currentJwt = jwt;
this.currentApiKey = undefined;
// Don't clear API key - keep it as fallback for after logout
this.httpClient.setJwt(jwt);
this.storage.set("jwt", jwt);
}
@ -67,6 +62,51 @@ export class AuthClient {
return token;
}
/**
* Logout user and clear JWT, but preserve API key
* Use this for user logout in apps where API key is app-level credential
*/
async logoutUser(): Promise<void> {
// Attempt server-side logout if using JWT
if (this.currentJwt) {
try {
await this.httpClient.post("/v1/auth/logout", { all: true });
} catch (error) {
// Log warning but don't fail - local cleanup is more important
console.warn(
"Server-side logout failed, continuing with local cleanup:",
error
);
}
}
// Clear JWT only, preserve API key
this.currentJwt = undefined;
this.httpClient.setJwt(undefined);
await this.storage.set("jwt", ""); // Clear JWT from storage
// Ensure API key is loaded and set as active auth method
if (!this.currentApiKey) {
// Try to load from storage
const storedApiKey = await this.storage.get("apiKey");
if (storedApiKey) {
this.currentApiKey = storedApiKey;
}
}
// Restore API key as the active auth method
if (this.currentApiKey) {
this.httpClient.setApiKey(this.currentApiKey);
console.log("[Auth] API key restored after user logout");
} else {
console.warn("[Auth] No API key available after logout");
}
}
/**
* Full logout - clears both JWT and API key
* Use this to completely reset authentication state
*/
async logout(): Promise<void> {
// Only attempt server-side logout if using JWT
// API keys don't support server-side logout with all=true
@ -75,10 +115,13 @@ export class AuthClient {
await this.httpClient.post("/v1/auth/logout", { all: true });
} catch (error) {
// Log warning but don't fail - local cleanup is more important
console.warn('Server-side logout failed, continuing with local cleanup:', error);
console.warn(
"Server-side logout failed, continuing with local cleanup:",
error
);
}
}
// Always clear local state
this.currentApiKey = undefined;
this.currentJwt = undefined;
@ -94,4 +137,95 @@ export class AuthClient {
this.httpClient.setJwt(undefined);
await this.storage.clear();
}
/**
* Request a challenge nonce for wallet authentication
*/
async challenge(params: {
wallet: string;
purpose?: string;
namespace?: string;
}): Promise<{
nonce: string;
wallet: string;
namespace: string;
expires_at: string;
}> {
const response = await this.httpClient.post("/v1/auth/challenge", {
wallet: params.wallet,
purpose: params.purpose || "authentication",
namespace: params.namespace || "default",
});
return response;
}
/**
* Verify wallet signature and get JWT token
*/
async verify(params: {
wallet: string;
nonce: string;
signature: string;
namespace?: string;
chain_type?: "ETH" | "SOL";
}): Promise<{
access_token: string;
refresh_token?: string;
subject: string;
namespace: string;
api_key?: string;
expires_in?: number;
token_type?: string;
}> {
const response = await this.httpClient.post("/v1/auth/verify", {
wallet: params.wallet,
nonce: params.nonce,
signature: params.signature,
namespace: params.namespace || "default",
chain_type: params.chain_type || "ETH",
});
// Persist JWT
this.setJwt(response.access_token);
// Persist API key if server provided it (created in verifyHandler)
if ((response as any).api_key) {
this.setApiKey((response as any).api_key);
}
// Persist refresh token if present (optional, for silent renewal)
if ((response as any).refresh_token) {
await this.storage.set("refreshToken", (response as any).refresh_token);
}
return response as any;
}
/**
* Get API key for wallet (creates namespace ownership)
*/
async getApiKey(params: {
wallet: string;
nonce: string;
signature: string;
namespace?: string;
chain_type?: "ETH" | "SOL";
}): Promise<{
api_key: string;
namespace: string;
wallet: string;
}> {
const response = await this.httpClient.post("/v1/auth/api-key", {
wallet: params.wallet,
nonce: params.nonce,
signature: params.signature,
namespace: params.namespace || "default",
chain_type: params.chain_type || "ETH",
});
// Automatically set the API key
this.setApiKey(response.api_key);
return response;
}
}

203
src/cache/client.ts vendored Normal file
View File

@ -0,0 +1,203 @@
import { HttpClient } from "../core/http";
import { SDKError } from "../errors";
export interface CacheGetRequest {
dmap: string;
key: string;
}
export interface CacheGetResponse {
key: string;
value: any;
dmap: string;
}
export interface CachePutRequest {
dmap: string;
key: string;
value: any;
ttl?: string; // Duration string like "1h", "30m"
}
export interface CachePutResponse {
status: string;
key: string;
dmap: string;
}
export interface CacheDeleteRequest {
dmap: string;
key: string;
}
export interface CacheDeleteResponse {
status: string;
key: string;
dmap: string;
}
export interface CacheMultiGetRequest {
dmap: string;
keys: string[];
}
export interface CacheMultiGetResponse {
results: Array<{
key: string;
value: any;
}>;
dmap: string;
}
export interface CacheScanRequest {
dmap: string;
match?: string; // Optional regex pattern
}
export interface CacheScanResponse {
keys: string[];
count: number;
dmap: string;
}
export interface CacheHealthResponse {
status: string;
service: string;
}
export class CacheClient {
private httpClient: HttpClient;
constructor(httpClient: HttpClient) {
this.httpClient = httpClient;
}
/**
* Check cache service health
*/
async health(): Promise<CacheHealthResponse> {
return this.httpClient.get("/v1/cache/health");
}
/**
* Get a value from cache
* Returns null if the key is not found (cache miss/expired), which is normal behavior
*/
async get(dmap: string, key: string): Promise<CacheGetResponse | null> {
try {
return await this.httpClient.post<CacheGetResponse>("/v1/cache/get", {
dmap,
key,
});
} catch (error) {
// Cache misses (404 or "key not found" messages) are normal behavior - return null instead of throwing
if (
error instanceof SDKError &&
(error.httpStatus === 404 ||
(error.httpStatus === 500 &&
error.message?.toLowerCase().includes("key not found")))
) {
return null;
}
// Re-throw other errors (network issues, server errors, etc.)
throw error;
}
}
/**
* Put a value into cache
*/
async put(
dmap: string,
key: string,
value: any,
ttl?: string
): Promise<CachePutResponse> {
return this.httpClient.post<CachePutResponse>("/v1/cache/put", {
dmap,
key,
value,
ttl,
});
}
/**
* Delete a value from cache
*/
async delete(dmap: string, key: string): Promise<CacheDeleteResponse> {
return this.httpClient.post<CacheDeleteResponse>("/v1/cache/delete", {
dmap,
key,
});
}
/**
* Get multiple values from cache in a single request
* Returns a map of key -> value (or null if not found)
* Gracefully handles 404 errors (endpoint not implemented) by returning empty results
*/
async multiGet(
dmap: string,
keys: string[]
): Promise<Map<string, any | null>> {
try {
if (keys.length === 0) {
return new Map();
}
const response = await this.httpClient.post<CacheMultiGetResponse>(
"/v1/cache/mget",
{
dmap,
keys,
}
);
// Convert array to Map
const resultMap = new Map<string, any | null>();
// First, mark all keys as null (cache miss)
keys.forEach((key) => {
resultMap.set(key, null);
});
// Then, update with found values
if (response.results) {
response.results.forEach(({ key, value }) => {
resultMap.set(key, value);
});
}
return resultMap;
} catch (error) {
// Handle 404 errors silently (endpoint not implemented on backend)
// This is expected behavior when the backend doesn't support multiGet yet
if (error instanceof SDKError && error.httpStatus === 404) {
// Return map with all nulls silently - caller can fall back to individual gets
const resultMap = new Map<string, any | null>();
keys.forEach((key) => {
resultMap.set(key, null);
});
return resultMap;
}
// Log and return empty results for other errors
const resultMap = new Map<string, any | null>();
keys.forEach((key) => {
resultMap.set(key, null);
});
console.error(`[CacheClient] Error in multiGet for ${dmap}:`, error);
return resultMap;
}
}
/**
* Scan keys in a distributed map, optionally matching a regex pattern
*/
async scan(dmap: string, match?: string): Promise<CacheScanResponse> {
return this.httpClient.post<CacheScanResponse>("/v1/cache/scan", {
dmap,
match,
});
}
}

View File

@ -8,6 +8,29 @@ export interface HttpClientConfig {
fetch?: typeof fetch;
}
/**
* Create a fetch function with proper TLS configuration for staging certificates
* In Node.js, we need to configure TLS to accept Let's Encrypt staging certificates
*/
function createFetchWithTLSConfig(): typeof fetch {
// Check if we're in a Node.js environment
if (typeof process !== "undefined" && process.versions?.node) {
// For testing/staging/development: allow staging certificates
// Let's Encrypt staging certificates are self-signed and not trusted by default
const isDevelopmentOrStaging =
process.env.NODE_ENV !== "production" ||
process.env.DEBROS_ALLOW_STAGING_CERTS === "true" ||
process.env.DEBROS_USE_HTTPS === "true";
if (isDevelopmentOrStaging) {
// Allow self-signed/staging certificates
// WARNING: Only use this in development/testing environments
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
}
return globalThis.fetch;
}
export class HttpClient {
private baseURL: string;
private timeout: number;
@ -19,28 +42,73 @@ export class HttpClient {
constructor(config: HttpClientConfig) {
this.baseURL = config.baseURL.replace(/\/$/, "");
this.timeout = config.timeout ?? 30000;
this.timeout = config.timeout ?? 60000; // Increased from 30s to 60s for pub/sub operations
this.maxRetries = config.maxRetries ?? 3;
this.retryDelayMs = config.retryDelayMs ?? 1000;
this.fetch = config.fetch ?? globalThis.fetch;
// Use provided fetch or create one with proper TLS configuration for staging certificates
this.fetch = config.fetch ?? createFetchWithTLSConfig();
}
setApiKey(apiKey?: string) {
this.apiKey = apiKey;
this.jwt = undefined;
// Don't clear JWT - allow both to coexist
}
setJwt(jwt?: string) {
this.jwt = jwt;
this.apiKey = undefined;
// Don't clear API key - allow both to coexist
if (typeof console !== "undefined") {
console.log(
"[HttpClient] JWT set:",
!!jwt,
"API key still present:",
!!this.apiKey
);
}
}
private getAuthHeaders(): Record<string, string> {
private getAuthHeaders(path: string): Record<string, string> {
const headers: Record<string, string> = {};
if (this.jwt) {
headers["Authorization"] = `Bearer ${this.jwt}`;
} else if (this.apiKey) {
headers["X-API-Key"] = this.apiKey;
// For database, pubsub, proxy, and cache operations, ONLY use API key to avoid JWT user context
// interfering with namespace-level authorization
const isDbOperation = path.includes("/v1/rqlite/");
const isPubSubOperation = path.includes("/v1/pubsub/");
const isProxyOperation = path.includes("/v1/proxy/");
const isCacheOperation = path.includes("/v1/cache/");
// For auth operations, prefer API key over JWT to ensure proper authentication
const isAuthOperation = path.includes("/v1/auth/");
if (
isDbOperation ||
isPubSubOperation ||
isProxyOperation ||
isCacheOperation
) {
// For database/pubsub/proxy/cache operations: use only API key (preferred for namespace operations)
if (this.apiKey) {
headers["X-API-Key"] = this.apiKey;
} else if (this.jwt) {
// Fallback to JWT if no API key
headers["Authorization"] = `Bearer ${this.jwt}`;
}
} else if (isAuthOperation) {
// For auth operations: prefer API key over JWT (auth endpoints should use explicit API key)
if (this.apiKey) {
headers["X-API-Key"] = this.apiKey;
}
if (this.jwt) {
headers["Authorization"] = `Bearer ${this.jwt}`;
}
} else {
// For other operations: send both JWT and API key
if (this.jwt) {
headers["Authorization"] = `Bearer ${this.jwt}`;
}
if (this.apiKey) {
headers["X-API-Key"] = this.apiKey;
}
}
return headers;
}
@ -49,6 +117,10 @@ export class HttpClient {
return this.jwt || this.apiKey;
}
getApiKey(): string | undefined {
return this.apiKey;
}
async request<T = any>(
method: "GET" | "POST" | "PUT" | "DELETE",
path: string,
@ -56,8 +128,10 @@ export class HttpClient {
body?: any;
headers?: Record<string, string>;
query?: Record<string, string | number | boolean>;
timeout?: number; // Per-request timeout override
} = {}
): Promise<T> {
const startTime = performance.now(); // Track request start time
const url = new URL(this.baseURL + path);
if (options.query) {
Object.entries(options.query).forEach(([key, value]) => {
@ -67,27 +141,127 @@ export class HttpClient {
const headers: Record<string, string> = {
"Content-Type": "application/json",
...this.getAuthHeaders(),
...this.getAuthHeaders(path),
...options.headers,
};
const controller = new AbortController();
const requestTimeout = options.timeout ?? this.timeout; // Use override or default
const timeoutId = setTimeout(() => controller.abort(), requestTimeout);
const fetchOptions: RequestInit = {
method,
headers,
signal: AbortSignal.timeout(this.timeout),
signal: controller.signal,
};
if (options.body !== undefined) {
fetchOptions.body = JSON.stringify(options.body);
}
return this.requestWithRetry(url.toString(), fetchOptions);
// Extract and log SQL query details for rqlite operations
const isRqliteOperation = path.includes("/v1/rqlite/");
let queryDetails: string | null = null;
if (isRqliteOperation && options.body) {
try {
const body =
typeof options.body === "string"
? JSON.parse(options.body)
: options.body;
if (body.sql) {
// Direct SQL query (query/exec endpoints)
queryDetails = `SQL: ${body.sql}`;
if (body.args && body.args.length > 0) {
queryDetails += ` | Args: [${body.args
.map((a: any) => (typeof a === "string" ? `"${a}"` : a))
.join(", ")}]`;
}
} else if (body.table) {
// Table-based query (find/find-one/select endpoints)
queryDetails = `Table: ${body.table}`;
if (body.criteria && Object.keys(body.criteria).length > 0) {
queryDetails += ` | Criteria: ${JSON.stringify(body.criteria)}`;
}
if (body.options) {
queryDetails += ` | Options: ${JSON.stringify(body.options)}`;
}
if (body.select) {
queryDetails += ` | Select: ${JSON.stringify(body.select)}`;
}
if (body.where) {
queryDetails += ` | Where: ${JSON.stringify(body.where)}`;
}
if (body.limit) {
queryDetails += ` | Limit: ${body.limit}`;
}
if (body.offset) {
queryDetails += ` | Offset: ${body.offset}`;
}
}
} catch (e) {
// Failed to parse body, ignore
}
}
try {
const result = await this.requestWithRetry(
url.toString(),
fetchOptions,
0,
startTime
);
const duration = performance.now() - startTime;
if (typeof console !== "undefined") {
const logMessage = `[HttpClient] ${method} ${path} completed in ${duration.toFixed(
2
)}ms`;
if (queryDetails) {
console.log(logMessage);
console.log(`[HttpClient] ${queryDetails}`);
} else {
console.log(logMessage);
}
}
return result;
} catch (error) {
const duration = performance.now() - startTime;
if (typeof console !== "undefined") {
// For 404 errors on find-one calls, log at warn level (not error) since "not found" is expected
// Application layer handles these cases in try-catch blocks
const is404FindOne =
path === "/v1/rqlite/find-one" &&
error instanceof SDKError &&
error.httpStatus === 404;
if (is404FindOne) {
// Log as warning for visibility, but not as error since it's expected behavior
console.warn(
`[HttpClient] ${method} ${path} returned 404 after ${duration.toFixed(
2
)}ms (expected for optional lookups)`
);
} else {
const errorMessage = `[HttpClient] ${method} ${path} failed after ${duration.toFixed(
2
)}ms:`;
console.error(errorMessage, error);
if (queryDetails) {
console.error(`[HttpClient] ${queryDetails}`);
}
}
}
throw error;
} finally {
clearTimeout(timeoutId);
}
}
private async requestWithRetry(
url: string,
options: RequestInit,
attempt: number = 0
attempt: number = 0,
startTime?: number // Track start time for timing across retries
): Promise<any> {
try {
const response = await this.fetch(url, options);
@ -116,7 +290,7 @@ export class HttpClient {
await new Promise((resolve) =>
setTimeout(resolve, this.retryDelayMs * (attempt + 1))
);
return this.requestWithRetry(url, options, attempt + 1);
return this.requestWithRetry(url, options, attempt + 1, startTime);
}
throw error;
}
@ -152,6 +326,104 @@ export class HttpClient {
return this.request<T>("DELETE", path, options);
}
/**
* Upload a file using multipart/form-data
* This is a special method for file uploads that bypasses JSON serialization
*/
async uploadFile<T = any>(
path: string,
formData: FormData,
options?: {
timeout?: number;
}
): Promise<T> {
const startTime = performance.now(); // Track upload start time
const url = new URL(this.baseURL + path);
const headers: Record<string, string> = {
...this.getAuthHeaders(path),
// Don't set Content-Type - browser will set it with boundary
};
const controller = new AbortController();
const requestTimeout = options?.timeout ?? this.timeout * 5; // 5x timeout for uploads
const timeoutId = setTimeout(() => controller.abort(), requestTimeout);
const fetchOptions: RequestInit = {
method: "POST",
headers,
body: formData,
signal: controller.signal,
};
try {
const result = await this.requestWithRetry(
url.toString(),
fetchOptions,
0,
startTime
);
const duration = performance.now() - startTime;
if (typeof console !== "undefined") {
console.log(
`[HttpClient] POST ${path} (upload) completed in ${duration.toFixed(
2
)}ms`
);
}
return result;
} catch (error) {
const duration = performance.now() - startTime;
if (typeof console !== "undefined") {
console.error(
`[HttpClient] POST ${path} (upload) failed after ${duration.toFixed(
2
)}ms:`,
error
);
}
throw error;
} finally {
clearTimeout(timeoutId);
}
}
/**
* Get a binary response (returns Response object for streaming)
*/
async getBinary(path: string): Promise<Response> {
const url = new URL(this.baseURL + path);
const headers: Record<string, string> = {
...this.getAuthHeaders(path),
};
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), this.timeout * 5); // 5x timeout for downloads
const fetchOptions: RequestInit = {
method: "GET",
headers,
signal: controller.signal,
};
try {
const response = await this.fetch(url.toString(), fetchOptions);
if (!response.ok) {
clearTimeout(timeoutId);
const error = await response.json().catch(() => ({
error: response.statusText,
}));
throw SDKError.fromResponse(response.status, error);
}
return response;
} catch (error) {
clearTimeout(timeoutId);
if (error instanceof SDKError) {
throw error;
}
throw error;
}
}
getToken(): string | undefined {
return this.getAuthToken();
}

View File

@ -4,10 +4,6 @@ import { SDKError } from "../errors";
export interface WSClientConfig {
wsURL: string;
timeout?: number;
maxReconnectAttempts?: number;
reconnectDelayMs?: number;
heartbeatIntervalMs?: number;
authMode?: "header" | "query";
authToken?: string;
WebSocket?: typeof WebSocket;
}
@ -15,54 +11,53 @@ export interface WSClientConfig {
export type WSMessageHandler = (data: string) => void;
export type WSErrorHandler = (error: Error) => void;
export type WSCloseHandler = () => void;
export type WSOpenHandler = () => void;
/**
* Simple WebSocket client with minimal abstractions
* No complex reconnection, no heartbeats - keep it simple
*/
export class WSClient {
private url: string;
private timeout: number;
private maxReconnectAttempts: number;
private reconnectDelayMs: number;
private heartbeatIntervalMs: number;
private authMode: "header" | "query";
private authToken?: string;
private WebSocketClass: typeof WebSocket;
private ws?: WebSocket;
private reconnectAttempts = 0;
private heartbeatInterval?: NodeJS.Timeout;
private messageHandlers: Set<WSMessageHandler> = new Set();
private errorHandlers: Set<WSErrorHandler> = new Set();
private closeHandlers: Set<WSCloseHandler> = new Set();
private isManuallyClosed = false;
private openHandlers: Set<WSOpenHandler> = new Set();
private isClosed = false;
constructor(config: WSClientConfig) {
this.url = config.wsURL;
this.timeout = config.timeout ?? 30000;
this.maxReconnectAttempts = config.maxReconnectAttempts ?? 5;
this.reconnectDelayMs = config.reconnectDelayMs ?? 1000;
this.heartbeatIntervalMs = config.heartbeatIntervalMs ?? 30000;
this.authMode = config.authMode ?? "header";
this.authToken = config.authToken;
this.WebSocketClass = config.WebSocket ?? WebSocket;
}
/**
* Connect to WebSocket server
*/
connect(): Promise<void> {
return new Promise((resolve, reject) => {
try {
const wsUrl = this.buildWSUrl();
this.ws = new this.WebSocketClass(wsUrl);
// Note: Custom headers via ws library in Node.js are not sent with WebSocket upgrade requests
// so we rely on query parameters for authentication
this.isClosed = false;
const timeout = setTimeout(() => {
this.ws?.close();
reject(new SDKError("WebSocket connection timeout", 408, "WS_TIMEOUT"));
reject(
new SDKError("WebSocket connection timeout", 408, "WS_TIMEOUT")
);
}, this.timeout);
this.ws.addEventListener("open", () => {
clearTimeout(timeout);
this.reconnectAttempts = 0;
this.startHeartbeat();
console.log("[WSClient] Connected to", this.url);
this.openHandlers.forEach((handler) => handler());
resolve();
});
@ -72,24 +67,16 @@ export class WSClient {
});
this.ws.addEventListener("error", (event: Event) => {
console.error("[WSClient] WebSocket error:", event);
clearTimeout(timeout);
const error = new SDKError(
"WebSocket error",
500,
"WS_ERROR",
event
);
const error = new SDKError("WebSocket error", 500, "WS_ERROR", event);
this.errorHandlers.forEach((handler) => handler(error));
});
this.ws.addEventListener("close", () => {
clearTimeout(timeout);
this.stopHeartbeat();
if (!this.isManuallyClosed) {
this.attemptReconnect();
} else {
this.closeHandlers.forEach((handler) => handler());
}
console.log("[WSClient] Connection closed");
this.closeHandlers.forEach((handler) => handler());
});
} catch (error) {
reject(error);
@ -97,86 +84,106 @@ export class WSClient {
});
}
/**
* Build WebSocket URL with auth token
*/
private buildWSUrl(): string {
let url = this.url;
// Always append auth token as query parameter for compatibility
// Works in both Node.js and browser environments
if (this.authToken) {
const separator = url.includes("?") ? "&" : "?";
const paramName = this.authToken.startsWith("ak_") ? "api_key" : "token";
url += `${separator}${paramName}=${encodeURIComponent(this.authToken)}`;
}
return url;
}
private startHeartbeat() {
this.heartbeatInterval = setInterval(() => {
if (this.ws?.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify({ type: "ping" }));
}
}, this.heartbeatIntervalMs);
}
private stopHeartbeat() {
if (this.heartbeatInterval) {
clearInterval(this.heartbeatInterval);
this.heartbeatInterval = undefined;
}
}
private attemptReconnect() {
if (this.reconnectAttempts < this.maxReconnectAttempts) {
this.reconnectAttempts++;
const delayMs = this.reconnectDelayMs * this.reconnectAttempts;
setTimeout(() => {
this.connect().catch((error) => {
this.errorHandlers.forEach((handler) => handler(error));
});
}, delayMs);
} else {
this.closeHandlers.forEach((handler) => handler());
}
}
onMessage(handler: WSMessageHandler) {
/**
* Register message handler
*/
onMessage(handler: WSMessageHandler): () => void {
this.messageHandlers.add(handler);
return () => this.messageHandlers.delete(handler);
}
onError(handler: WSErrorHandler) {
/**
* Unregister message handler
*/
offMessage(handler: WSMessageHandler): void {
this.messageHandlers.delete(handler);
}
/**
* Register error handler
*/
onError(handler: WSErrorHandler): () => void {
this.errorHandlers.add(handler);
return () => this.errorHandlers.delete(handler);
}
onClose(handler: WSCloseHandler) {
/**
* Unregister error handler
*/
offError(handler: WSErrorHandler): void {
this.errorHandlers.delete(handler);
}
/**
* Register close handler
*/
onClose(handler: WSCloseHandler): () => void {
this.closeHandlers.add(handler);
return () => this.closeHandlers.delete(handler);
}
send(data: string) {
/**
* Unregister close handler
*/
offClose(handler: WSCloseHandler): void {
this.closeHandlers.delete(handler);
}
/**
* Register open handler
*/
onOpen(handler: WSOpenHandler): () => void {
this.openHandlers.add(handler);
return () => this.openHandlers.delete(handler);
}
/**
* Send data through WebSocket
*/
send(data: string): void {
if (this.ws?.readyState !== WebSocket.OPEN) {
throw new SDKError(
"WebSocket is not connected",
500,
"WS_NOT_CONNECTED"
);
throw new SDKError("WebSocket is not connected", 500, "WS_NOT_CONNECTED");
}
this.ws.send(data);
}
close() {
this.isManuallyClosed = true;
this.stopHeartbeat();
/**
* Close WebSocket connection
*/
close(): void {
if (this.isClosed) {
return;
}
this.isClosed = true;
this.ws?.close();
}
/**
* Check if WebSocket is connected
*/
isConnected(): boolean {
return this.ws?.readyState === WebSocket.OPEN;
return !this.isClosed && this.ws?.readyState === WebSocket.OPEN;
}
setAuthToken(token?: string) {
/**
* Update auth token
*/
setAuthToken(token?: string): void {
this.authToken = token;
}
}

View File

@ -98,7 +98,9 @@ export class Repository<T extends Record<string, any>> {
private buildInsertSql(entity: T): string {
const columns = Object.keys(entity).filter((k) => entity[k] !== undefined);
const placeholders = columns.map(() => "?").join(", ");
return `INSERT INTO ${this.tableName} (${columns.join(", ")}) VALUES (${placeholders})`;
return `INSERT INTO ${this.tableName} (${columns.join(
", "
)}) VALUES (${placeholders})`;
}
private buildInsertArgs(entity: T): any[] {
@ -111,7 +113,9 @@ export class Repository<T extends Record<string, any>> {
const columns = Object.keys(entity)
.filter((k) => entity[k] !== undefined && k !== this.primaryKey)
.map((k) => `${k} = ?`);
return `UPDATE ${this.tableName} SET ${columns.join(", ")} WHERE ${this.primaryKey} = ?`;
return `UPDATE ${this.tableName} SET ${columns.join(", ")} WHERE ${
this.primaryKey
} = ?`;
}
private buildUpdateArgs(entity: T): any[] {

View File

@ -3,8 +3,14 @@ import { AuthClient } from "./auth/client";
import { DBClient } from "./db/client";
import { PubSubClient } from "./pubsub/client";
import { NetworkClient } from "./network/client";
import { CacheClient } from "./cache/client";
import { StorageClient } from "./storage/client";
import { WSClientConfig } from "./core/ws";
import { StorageAdapter, MemoryStorage, LocalStorageAdapter } from "./auth/types";
import {
StorageAdapter,
MemoryStorage,
LocalStorageAdapter,
} from "./auth/types";
export interface ClientConfig extends Omit<HttpClientConfig, "fetch"> {
apiKey?: string;
@ -19,6 +25,8 @@ export interface Client {
db: DBClient;
pubsub: PubSubClient;
network: NetworkClient;
cache: CacheClient;
storage: StorageClient;
}
export function createClient(config: ClientConfig): Client {
@ -38,8 +46,9 @@ export function createClient(config: ClientConfig): Client {
});
// Derive WebSocket URL from baseURL if not explicitly provided
const wsURL = config.wsConfig?.wsURL ??
config.baseURL.replace(/^http/, 'ws').replace(/\/$/, '');
const wsURL =
config.wsConfig?.wsURL ??
config.baseURL.replace(/^http/, "ws").replace(/\/$/, "");
const db = new DBClient(httpClient);
const pubsub = new PubSubClient(httpClient, {
@ -47,16 +56,19 @@ export function createClient(config: ClientConfig): Client {
wsURL,
});
const network = new NetworkClient(httpClient);
const cache = new CacheClient(httpClient);
const storage = new StorageClient(httpClient);
return {
auth,
db,
pubsub,
network,
cache,
storage,
};
}
// Re-exports
export { HttpClient } from "./core/http";
export { WSClient } from "./core/ws";
export { AuthClient } from "./auth/client";
@ -65,13 +77,11 @@ export { QueryBuilder } from "./db/qb";
export { Repository } from "./db/repository";
export { PubSubClient, Subscription } from "./pubsub/client";
export { NetworkClient } from "./network/client";
export { CacheClient } from "./cache/client";
export { StorageClient } from "./storage/client";
export { SDKError } from "./errors";
export { MemoryStorage, LocalStorageAdapter } from "./auth/types";
export type {
StorageAdapter,
AuthConfig,
WhoAmI,
} from "./auth/types";
export type { StorageAdapter, AuthConfig, WhoAmI } from "./auth/types";
export type * from "./db/types";
export type {
Message,
@ -79,4 +89,28 @@ export type {
ErrorHandler,
CloseHandler,
} from "./pubsub/client";
export type { PeerInfo, NetworkStatus } from "./network/client";
export type {
PeerInfo,
NetworkStatus,
ProxyRequest,
ProxyResponse,
} from "./network/client";
export type {
CacheGetRequest,
CacheGetResponse,
CachePutRequest,
CachePutResponse,
CacheDeleteRequest,
CacheDeleteResponse,
CacheMultiGetRequest,
CacheMultiGetResponse,
CacheScanRequest,
CacheScanResponse,
CacheHealthResponse,
} from "./cache/client";
export type {
StorageUploadResponse,
StoragePinRequest,
StoragePinResponse,
StorageStatus,
} from "./storage/client";

View File

@ -7,9 +7,25 @@ export interface PeerInfo {
}
export interface NetworkStatus {
healthy: boolean;
peers: number;
uptime?: number;
node_id: string;
connected: boolean;
peer_count: number;
database_size: number;
uptime: number;
}
export interface ProxyRequest {
url: string;
method: string;
headers?: Record<string, string>;
body?: string;
}
export interface ProxyResponse {
status_code: number;
headers: Record<string, string>;
body: string;
error?: string;
}
export class NetworkClient {
@ -35,7 +51,9 @@ export class NetworkClient {
* Get network status.
*/
async status(): Promise<NetworkStatus> {
const response = await this.httpClient.get<NetworkStatus>("/v1/status");
const response = await this.httpClient.get<NetworkStatus>(
"/v1/network/status"
);
return response;
}
@ -62,4 +80,40 @@ export class NetworkClient {
async disconnect(peerId: string): Promise<void> {
await this.httpClient.post("/v1/network/disconnect", { peer_id: peerId });
}
/**
* Proxy an HTTP request through the Anyone network.
* Requires authentication (API key or JWT).
*
* @param request - The proxy request configuration
* @returns The proxied response
* @throws {SDKError} If the Anyone proxy is not available or the request fails
*
* @example
* ```ts
* const response = await client.network.proxyAnon({
* url: 'https://api.example.com/data',
* method: 'GET',
* headers: {
* 'Accept': 'application/json'
* }
* });
*
* console.log(response.status_code); // 200
* console.log(response.body); // Response data
* ```
*/
async proxyAnon(request: ProxyRequest): Promise<ProxyResponse> {
const response = await this.httpClient.post<ProxyResponse>(
"/v1/proxy/anon",
request
);
// Check if the response contains an error
if (response.error) {
throw new Error(`Proxy request failed: ${response.error}`);
}
return response;
}
}

View File

@ -4,13 +4,64 @@ import { WSClient, WSClientConfig } from "../core/ws";
export interface Message {
data: string;
topic: string;
timestamp?: number;
timestamp: number;
}
export interface RawEnvelope {
data: string; // base64-encoded
timestamp: number;
topic: string;
}
// Cross-platform base64 encoding/decoding utilities
function base64Encode(str: string): string {
if (typeof Buffer !== "undefined") {
return Buffer.from(str).toString("base64");
} else if (typeof btoa !== "undefined") {
return btoa(
encodeURIComponent(str).replace(/%([0-9A-F]{2})/g, (match, p1) =>
String.fromCharCode(parseInt(p1, 16))
)
);
}
throw new Error("No base64 encoding method available");
}
function base64EncodeBytes(bytes: Uint8Array): string {
if (typeof Buffer !== "undefined") {
return Buffer.from(bytes).toString("base64");
} else if (typeof btoa !== "undefined") {
let binary = "";
for (let i = 0; i < bytes.length; i++) {
binary += String.fromCharCode(bytes[i]);
}
return btoa(binary);
}
throw new Error("No base64 encoding method available");
}
function base64Decode(b64: string): string {
if (typeof Buffer !== "undefined") {
return Buffer.from(b64, "base64").toString("utf-8");
} else if (typeof atob !== "undefined") {
const binary = atob(b64);
const bytes = new Uint8Array(binary.length);
for (let i = 0; i < binary.length; i++) {
bytes[i] = binary.charCodeAt(i);
}
return new TextDecoder().decode(bytes);
}
throw new Error("No base64 decoding method available");
}
export type MessageHandler = (message: Message) => void;
export type ErrorHandler = (error: Error) => void;
export type CloseHandler = () => void;
/**
* Simple PubSub client - one WebSocket connection per topic
* No connection pooling, no reference counting - keep it simple
*/
export class PubSubClient {
private httpClient: HttpClient;
private wsConfig: Partial<WSClientConfig>;
@ -21,20 +72,30 @@ export class PubSubClient {
}
/**
* Publish a message to a topic.
* Publish a message to a topic via HTTP
*/
async publish(topic: string, data: string | Uint8Array): Promise<void> {
const dataBase64 =
typeof data === "string" ? Buffer.from(data).toString("base64") : Buffer.from(data).toString("base64");
let dataBase64: string;
if (typeof data === "string") {
dataBase64 = base64Encode(data);
} else {
dataBase64 = base64EncodeBytes(data);
}
await this.httpClient.post("/v1/pubsub/publish", {
topic,
data_base64: dataBase64,
});
await this.httpClient.post(
"/v1/pubsub/publish",
{
topic,
data_base64: dataBase64,
},
{
timeout: 30000,
}
);
}
/**
* List active topics in the current namespace.
* List active topics in the current namespace
*/
async topics(): Promise<string[]> {
const response = await this.httpClient.get<{ topics: string[] }>(
@ -44,8 +105,8 @@ export class PubSubClient {
}
/**
* Subscribe to a topic via WebSocket.
* Returns a subscription object with event handlers.
* Subscribe to a topic via WebSocket
* Creates one WebSocket connection per topic
*/
async subscribe(
topic: string,
@ -55,16 +116,23 @@ export class PubSubClient {
onClose?: CloseHandler;
} = {}
): Promise<Subscription> {
const wsUrl = new URL(this.wsConfig.wsURL || "ws://localhost:6001");
// Build WebSocket URL for this topic
const wsUrl = new URL(this.wsConfig.wsURL || "ws://127.0.0.1:6001");
wsUrl.pathname = "/v1/pubsub/ws";
wsUrl.searchParams.set("topic", topic);
const authToken = this.httpClient.getApiKey() ?? this.httpClient.getToken();
// Create WebSocket client
const wsClient = new WSClient({
...this.wsConfig,
wsURL: wsUrl.toString(),
authToken: this.httpClient.getToken(),
authToken,
});
await wsClient.connect();
// Create subscription wrapper
const subscription = new Subscription(wsClient, topic);
if (handlers.onMessage) {
@ -77,66 +145,144 @@ export class PubSubClient {
subscription.onClose(handlers.onClose);
}
await wsClient.connect();
return subscription;
}
}
/**
* Subscription represents an active WebSocket subscription to a topic
*/
export class Subscription {
private wsClient: WSClient;
private topic: string;
private messageHandlers: Set<MessageHandler> = new Set();
private errorHandlers: Set<ErrorHandler> = new Set();
private closeHandlers: Set<CloseHandler> = new Set();
private isClosed = false;
private wsMessageHandler: ((data: string) => void) | null = null;
private wsErrorHandler: ((error: Error) => void) | null = null;
private wsCloseHandler: (() => void) | null = null;
constructor(wsClient: WSClient, topic: string) {
this.wsClient = wsClient;
this.topic = topic;
this.wsClient.onMessage((data) => {
// Register message handler
this.wsMessageHandler = (data) => {
try {
// Parse gateway JSON envelope: {data: base64String, timestamp, topic}
const envelope: RawEnvelope = JSON.parse(data);
// Validate envelope structure
if (!envelope || typeof envelope !== "object") {
throw new Error("Invalid envelope: not an object");
}
if (!envelope.data || typeof envelope.data !== "string") {
throw new Error("Invalid envelope: missing or invalid data field");
}
if (!envelope.topic || typeof envelope.topic !== "string") {
throw new Error("Invalid envelope: missing or invalid topic field");
}
if (typeof envelope.timestamp !== "number") {
throw new Error(
"Invalid envelope: missing or invalid timestamp field"
);
}
// Decode base64 data
const messageData = base64Decode(envelope.data);
const message: Message = {
topic: this.topic,
data: data,
timestamp: Date.now(),
topic: envelope.topic,
data: messageData,
timestamp: envelope.timestamp,
};
console.log("[Subscription] Received message on topic:", this.topic);
this.messageHandlers.forEach((handler) => handler(message));
} catch (error) {
console.error("[Subscription] Error processing message:", error);
this.errorHandlers.forEach((handler) =>
handler(error instanceof Error ? error : new Error(String(error)))
);
}
});
};
this.wsClient.onError((error) => {
this.wsClient.onMessage(this.wsMessageHandler);
// Register error handler
this.wsErrorHandler = (error) => {
this.errorHandlers.forEach((handler) => handler(error));
});
};
this.wsClient.onError(this.wsErrorHandler);
this.wsClient.onClose(() => {
// Register close handler
this.wsCloseHandler = () => {
this.closeHandlers.forEach((handler) => handler());
});
};
this.wsClient.onClose(this.wsCloseHandler);
}
onMessage(handler: MessageHandler) {
/**
* Register message handler
*/
onMessage(handler: MessageHandler): () => void {
this.messageHandlers.add(handler);
return () => this.messageHandlers.delete(handler);
}
onError(handler: ErrorHandler) {
/**
* Register error handler
*/
onError(handler: ErrorHandler): () => void {
this.errorHandlers.add(handler);
return () => this.errorHandlers.delete(handler);
}
onClose(handler: CloseHandler) {
/**
* Register close handler
*/
onClose(handler: CloseHandler): () => void {
this.closeHandlers.add(handler);
return () => this.closeHandlers.delete(handler);
}
close() {
/**
* Close subscription and underlying WebSocket
*/
close(): void {
if (this.isClosed) {
return;
}
this.isClosed = true;
// Remove handlers from WSClient
if (this.wsMessageHandler) {
this.wsClient.offMessage(this.wsMessageHandler);
this.wsMessageHandler = null;
}
if (this.wsErrorHandler) {
this.wsClient.offError(this.wsErrorHandler);
this.wsErrorHandler = null;
}
if (this.wsCloseHandler) {
this.wsClient.offClose(this.wsCloseHandler);
this.wsCloseHandler = null;
}
// Clear all local handlers
this.messageHandlers.clear();
this.errorHandlers.clear();
this.closeHandlers.clear();
// Close WebSocket connection
this.wsClient.close();
}
/**
* Check if subscription is active
*/
isConnected(): boolean {
return this.wsClient.isConnected();
return !this.isClosed && this.wsClient.isConnected();
}
}

270
src/storage/client.ts Normal file
View File

@ -0,0 +1,270 @@
import { HttpClient } from "../core/http";
export interface StorageUploadResponse {
cid: string;
name: string;
size: number;
}
export interface StoragePinRequest {
cid: string;
name?: string;
}
export interface StoragePinResponse {
cid: string;
name: string;
}
export interface StorageStatus {
cid: string;
name: string;
status: string; // "pinned", "pinning", "queued", "unpinned", "error"
replication_min: number;
replication_max: number;
replication_factor: number;
peers: string[];
error?: string;
}
export class StorageClient {
private httpClient: HttpClient;
constructor(httpClient: HttpClient) {
this.httpClient = httpClient;
}
/**
* Upload content to IPFS and optionally pin it.
* Supports both File objects (browser) and Buffer/ReadableStream (Node.js).
*
* @param file - File to upload (File, Blob, or Buffer)
* @param name - Optional filename
* @param options - Optional upload options
* @param options.pin - Whether to pin the content (default: true). Pinning happens asynchronously on the backend.
* @returns Upload result with CID
*
* @example
* ```ts
* // Browser
* const fileInput = document.querySelector('input[type="file"]');
* const file = fileInput.files[0];
* const result = await client.storage.upload(file, file.name);
* console.log(result.cid);
*
* // Node.js
* const fs = require('fs');
* const fileBuffer = fs.readFileSync('image.jpg');
* const result = await client.storage.upload(fileBuffer, 'image.jpg', { pin: true });
* ```
*/
async upload(
file: File | Blob | ArrayBuffer | Uint8Array | ReadableStream<Uint8Array>,
name?: string,
options?: {
pin?: boolean;
}
): Promise<StorageUploadResponse> {
// Create FormData for multipart upload
const formData = new FormData();
// Handle different input types
if (file instanceof File) {
formData.append("file", file);
} else if (file instanceof Blob) {
formData.append("file", file, name);
} else if (file instanceof ArrayBuffer) {
const blob = new Blob([file]);
formData.append("file", blob, name);
} else if (file instanceof Uint8Array) {
// Convert Uint8Array to ArrayBuffer for Blob constructor
const buffer = file.buffer.slice(
file.byteOffset,
file.byteOffset + file.byteLength
) as ArrayBuffer;
const blob = new Blob([buffer], { type: "application/octet-stream" });
formData.append("file", blob, name);
} else if (file instanceof ReadableStream) {
// For ReadableStream, we need to read it into a blob first
// This is a limitation - in practice, pass File/Blob/Buffer
const chunks: ArrayBuffer[] = [];
const reader = file.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const buffer = value.buffer.slice(
value.byteOffset,
value.byteOffset + value.byteLength
) as ArrayBuffer;
chunks.push(buffer);
}
const blob = new Blob(chunks);
formData.append("file", blob, name);
} else {
throw new Error(
"Unsupported file type. Use File, Blob, ArrayBuffer, Uint8Array, or ReadableStream."
);
}
// Add pin flag (default: true)
const shouldPin = options?.pin !== false; // Default to true
formData.append("pin", shouldPin ? "true" : "false");
return this.httpClient.uploadFile<StorageUploadResponse>(
"/v1/storage/upload",
formData,
{ timeout: 300000 } // 5 minute timeout for large files
);
}
/**
* Pin an existing CID
*
* @param cid - Content ID to pin
* @param name - Optional name for the pin
* @returns Pin result
*/
async pin(cid: string, name?: string): Promise<StoragePinResponse> {
return this.httpClient.post<StoragePinResponse>("/v1/storage/pin", {
cid,
name,
});
}
/**
* Get the pin status for a CID
*
* @param cid - Content ID to check
* @returns Pin status information
*/
async status(cid: string): Promise<StorageStatus> {
return this.httpClient.get<StorageStatus>(`/v1/storage/status/${cid}`);
}
/**
* Retrieve content from IPFS by CID
*
* @param cid - Content ID to retrieve
* @returns ReadableStream of the content
*
* @example
* ```ts
* const stream = await client.storage.get(cid);
* const reader = stream.getReader();
* while (true) {
* const { done, value } = await reader.read();
* if (done) break;
* // Process chunk
* }
* ```
*/
async get(cid: string): Promise<ReadableStream<Uint8Array>> {
// Retry logic for content retrieval - content may not be immediately available
// after upload due to eventual consistency in IPFS Cluster
// IPFS Cluster pins can take 2-3+ seconds to complete across all nodes
const maxAttempts = 8;
let lastError: Error | null = null;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
const response = await this.httpClient.getBinary(
`/v1/storage/get/${cid}`
);
if (!response.body) {
throw new Error("Response body is null");
}
return response.body;
} catch (error: any) {
lastError = error;
// Check if this is a 404 error (content not found)
const isNotFound =
error?.httpStatus === 404 ||
error?.message?.includes("not found") ||
error?.message?.includes("404");
// If it's not a 404 error, or this is the last attempt, give up
if (!isNotFound || attempt === maxAttempts) {
throw error;
}
// Wait before retrying (exponential backoff: 400ms, 800ms, 1200ms, etc.)
// This gives up to ~12 seconds total wait time, covering typical pin completion
const backoffMs = attempt * 2500;
await new Promise((resolve) => setTimeout(resolve, backoffMs));
}
}
// This should never be reached, but TypeScript needs it
throw lastError || new Error("Failed to retrieve content");
}
/**
* Retrieve content from IPFS by CID and return the full Response object
* Useful when you need access to response headers (e.g., content-length)
*
* @param cid - Content ID to retrieve
* @returns Response object with body stream and headers
*
* @example
* ```ts
* const response = await client.storage.getBinary(cid);
* const contentLength = response.headers.get('content-length');
* const reader = response.body.getReader();
* // ... read stream
* ```
*/
async getBinary(cid: string): Promise<Response> {
// Retry logic for content retrieval - content may not be immediately available
// after upload due to eventual consistency in IPFS Cluster
// IPFS Cluster pins can take 2-3+ seconds to complete across all nodes
const maxAttempts = 8;
let lastError: Error | null = null;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
const response = await this.httpClient.getBinary(
`/v1/storage/get/${cid}`
);
if (!response) {
throw new Error("Response is null");
}
return response;
} catch (error: any) {
lastError = error;
// Check if this is a 404 error (content not found)
const isNotFound =
error?.httpStatus === 404 ||
error?.message?.includes("not found") ||
error?.message?.includes("404");
// If it's not a 404 error, or this is the last attempt, give up
if (!isNotFound || attempt === maxAttempts) {
throw error;
}
// Wait before retrying (exponential backoff: 400ms, 800ms, 1200ms, etc.)
// This gives up to ~12 seconds total wait time, covering typical pin completion
const backoffMs = attempt * 2500;
await new Promise((resolve) => setTimeout(resolve, backoffMs));
}
}
// This should never be reached, but TypeScript needs it
throw lastError || new Error("Failed to retrieve content");
}
/**
* Unpin a CID
*
* @param cid - Content ID to unpin
*/
async unpin(cid: string): Promise<void> {
await this.httpClient.delete(`/v1/storage/unpin/${cid}`);
}
}

166
tests/e2e/cache.test.ts Normal file
View File

@ -0,0 +1,166 @@
import { describe, it, expect, beforeEach } from "vitest";
import { createTestClient, skipIfNoGateway } from "./setup";
describe("Cache", () => {
if (skipIfNoGateway()) {
console.log("Skipping cache tests - gateway not available");
return;
}
const testDMap = "test-cache";
beforeEach(async () => {
// Clean up test keys before each test
const client = await createTestClient();
try {
const keys = await client.cache.scan(testDMap);
for (const key of keys.keys) {
await client.cache.delete(testDMap, key);
}
} catch (err) {
// Ignore errors during cleanup
}
});
it("should check cache health", async () => {
const client = await createTestClient();
const health = await client.cache.health();
expect(health.status).toBe("ok");
expect(health.service).toBe("olric");
});
it("should put and get a value", async () => {
const client = await createTestClient();
const testKey = "test-key-1";
const testValue = "test-value-1";
// Put value
const putResult = await client.cache.put(testDMap, testKey, testValue);
expect(putResult.status).toBe("ok");
expect(putResult.key).toBe(testKey);
expect(putResult.dmap).toBe(testDMap);
// Get value
const getResult = await client.cache.get(testDMap, testKey);
expect(getResult).not.toBeNull();
expect(getResult!.key).toBe(testKey);
expect(getResult!.value).toBe(testValue);
expect(getResult!.dmap).toBe(testDMap);
});
it("should put and get complex objects", async () => {
const client = await createTestClient();
const testKey = "test-key-2";
const testValue = {
name: "John",
age: 30,
tags: ["developer", "golang"],
};
// Put object
await client.cache.put(testDMap, testKey, testValue);
// Get object
const getResult = await client.cache.get(testDMap, testKey);
expect(getResult).not.toBeNull();
expect(getResult!.value).toBeDefined();
expect(getResult!.value.name).toBe(testValue.name);
expect(getResult!.value.age).toBe(testValue.age);
});
it("should put value with TTL", async () => {
const client = await createTestClient();
const testKey = "test-key-ttl";
const testValue = "ttl-value";
// Put with TTL
const putResult = await client.cache.put(
testDMap,
testKey,
testValue,
"5m"
);
expect(putResult.status).toBe("ok");
// Verify value exists
const getResult = await client.cache.get(testDMap, testKey);
expect(getResult).not.toBeNull();
expect(getResult!.value).toBe(testValue);
});
it("should delete a value", async () => {
const client = await createTestClient();
const testKey = "test-key-delete";
const testValue = "delete-me";
// Put value
await client.cache.put(testDMap, testKey, testValue);
// Verify it exists
const before = await client.cache.get(testDMap, testKey);
expect(before).not.toBeNull();
expect(before!.value).toBe(testValue);
// Delete value
const deleteResult = await client.cache.delete(testDMap, testKey);
expect(deleteResult.status).toBe("ok");
expect(deleteResult.key).toBe(testKey);
// Verify it's deleted (should return null, not throw)
const after = await client.cache.get(testDMap, testKey);
expect(after).toBeNull();
});
it("should scan keys", async () => {
const client = await createTestClient();
// Put multiple keys
await client.cache.put(testDMap, "key-1", "value-1");
await client.cache.put(testDMap, "key-2", "value-2");
await client.cache.put(testDMap, "key-3", "value-3");
// Scan all keys
const scanResult = await client.cache.scan(testDMap);
expect(scanResult.count).toBeGreaterThanOrEqual(3);
expect(scanResult.keys).toContain("key-1");
expect(scanResult.keys).toContain("key-2");
expect(scanResult.keys).toContain("key-3");
expect(scanResult.dmap).toBe(testDMap);
});
it("should scan keys with regex match", async () => {
const client = await createTestClient();
// Put keys with different patterns
await client.cache.put(testDMap, "user-1", "value-1");
await client.cache.put(testDMap, "user-2", "value-2");
await client.cache.put(testDMap, "session-1", "value-3");
// Scan with regex match
const scanResult = await client.cache.scan(testDMap, "^user-");
expect(scanResult.count).toBeGreaterThanOrEqual(2);
expect(scanResult.keys).toContain("user-1");
expect(scanResult.keys).toContain("user-2");
expect(scanResult.keys).not.toContain("session-1");
});
it("should handle non-existent key gracefully", async () => {
const client = await createTestClient();
const nonExistentKey = "non-existent-key";
// Cache misses should return null, not throw an error
const result = await client.cache.get(testDMap, nonExistentKey);
expect(result).toBeNull();
});
it("should handle empty dmap name", async () => {
const client = await createTestClient();
try {
await client.cache.get("", "test-key");
expect.fail("Expected get to fail with empty dmap");
} catch (err: any) {
expect(err.message).toBeDefined();
}
});
});

View File

@ -18,8 +18,8 @@ describe("Network", () => {
const client = await createTestClient();
const status = await client.network.status();
expect(status).toBeDefined();
expect(typeof status.healthy).toBe("boolean");
expect(typeof status.peers).toBe("number");
expect(typeof status.connected).toBe("boolean");
expect(typeof status.peer_count).toBe("number");
});
it("should list peers", async () => {
@ -27,4 +27,34 @@ describe("Network", () => {
const peers = await client.network.peers();
expect(Array.isArray(peers)).toBe(true);
});
it("should proxy request through Anyone network", async () => {
const client = await createTestClient();
// Test with a simple GET request
const response = await client.network.proxyAnon({
url: "https://httpbin.org/get",
method: "GET",
headers: {
"User-Agent": "DeBros-SDK-Test/1.0",
},
});
expect(response).toBeDefined();
expect(response.status_code).toBe(200);
expect(response.body).toBeDefined();
expect(typeof response.body).toBe("string");
});
it("should handle proxy errors gracefully", async () => {
const client = await createTestClient();
// Test with invalid URL
await expect(
client.network.proxyAnon({
url: "http://localhost:1/invalid",
method: "GET",
})
).rejects.toThrow();
});
});

211
tests/e2e/storage.test.ts Normal file
View File

@ -0,0 +1,211 @@
import { describe, it, expect, beforeAll } from "vitest";
import { createTestClient, skipIfNoGateway } from "./setup";
describe("Storage", () => {
beforeAll(() => {
if (skipIfNoGateway()) {
console.log("Skipping storage tests");
}
});
it("should upload a file", async () => {
const client = await createTestClient();
const testContent = "Hello, IPFS!";
const testFile = new File([testContent], "test.txt", {
type: "text/plain",
});
const result = await client.storage.upload(testFile);
expect(result).toBeDefined();
expect(result.cid).toBeDefined();
expect(typeof result.cid).toBe("string");
expect(result.cid.length).toBeGreaterThan(0);
expect(result.name).toBe("test.txt");
expect(result.size).toBeGreaterThan(0);
});
it("should upload a Blob", async () => {
const client = await createTestClient();
const testContent = "Test blob content";
const blob = new Blob([testContent], { type: "text/plain" });
const result = await client.storage.upload(blob, "blob.txt");
expect(result).toBeDefined();
expect(result.cid).toBeDefined();
expect(typeof result.cid).toBe("string");
expect(result.name).toBe("blob.txt");
});
it("should upload ArrayBuffer", async () => {
const client = await createTestClient();
const testContent = "Test array buffer";
const buffer = new TextEncoder().encode(testContent).buffer;
const result = await client.storage.upload(buffer, "buffer.bin");
expect(result).toBeDefined();
expect(result.cid).toBeDefined();
expect(typeof result.cid).toBe("string");
});
it("should upload Uint8Array", async () => {
const client = await createTestClient();
const testContent = "Test uint8array";
const uint8Array = new TextEncoder().encode(testContent);
const result = await client.storage.upload(uint8Array, "uint8.txt");
expect(result).toBeDefined();
expect(result.cid).toBeDefined();
expect(typeof result.cid).toBe("string");
});
it("should pin a CID", async () => {
const client = await createTestClient();
// First upload a file to get a CID
const testContent = "File to pin";
const testFile = new File([testContent], "pin-test.txt", {
type: "text/plain",
});
const uploadResult = await client.storage.upload(testFile);
const cid = uploadResult.cid;
// Now pin it
const pinResult = await client.storage.pin(cid, "pinned-file");
expect(pinResult).toBeDefined();
expect(pinResult.cid).toBe(cid);
expect(pinResult.name).toBe("pinned-file");
});
it("should get pin status", async () => {
const client = await createTestClient();
// First upload and pin a file
const testContent = "File for status check";
const testFile = new File([testContent], "status-test.txt", {
type: "text/plain",
});
const uploadResult = await client.storage.upload(testFile);
await client.storage.pin(uploadResult.cid, "status-test");
// Wait a bit for pin to propagate
await new Promise((resolve) => setTimeout(resolve, 1000));
const status = await client.storage.status(uploadResult.cid);
expect(status).toBeDefined();
expect(status.cid).toBe(uploadResult.cid);
expect(status.name).toBe("status-test");
expect(status.status).toBeDefined();
expect(typeof status.status).toBe("string");
expect(status.replication_factor).toBeGreaterThanOrEqual(0);
expect(Array.isArray(status.peers)).toBe(true);
});
it("should retrieve content by CID", async () => {
const client = await createTestClient();
const testContent = "Content to retrieve";
const testFile = new File([testContent], "retrieve-test.txt", {
type: "text/plain",
});
const uploadResult = await client.storage.upload(testFile);
const cid = uploadResult.cid;
// Get the content back
const stream = await client.storage.get(cid);
expect(stream).toBeDefined();
expect(stream instanceof ReadableStream).toBe(true);
// Read the stream
const reader = stream.getReader();
const chunks: Uint8Array[] = [];
let done = false;
while (!done) {
const { value, done: streamDone } = await reader.read();
done = streamDone;
if (value) {
chunks.push(value);
}
}
// Combine chunks
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const combined = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
combined.set(chunk, offset);
offset += chunk.length;
}
const retrievedContent = new TextDecoder().decode(combined);
expect(retrievedContent).toBe(testContent);
});
it("should unpin a CID", async () => {
const client = await createTestClient();
// First upload and pin a file
const testContent = "File to unpin";
const testFile = new File([testContent], "unpin-test.txt", {
type: "text/plain",
});
const uploadResult = await client.storage.upload(testFile);
await client.storage.pin(uploadResult.cid, "unpin-test");
// Wait a bit
await new Promise((resolve) => setTimeout(resolve, 1000));
// Unpin it
await expect(client.storage.unpin(uploadResult.cid)).resolves.not.toThrow();
});
it("should handle upload errors gracefully", async () => {
const client = await createTestClient();
// Try to upload invalid data
const invalidFile = null as any;
await expect(client.storage.upload(invalidFile)).rejects.toThrow();
});
it("should handle status errors for non-existent CID", async () => {
const client = await createTestClient();
const fakeCID = "QmInvalidCID123456789";
await expect(client.storage.status(fakeCID)).rejects.toThrow();
});
it("should upload large content", async () => {
const client = await createTestClient();
// Create a larger file (100KB)
const largeContent = "x".repeat(100 * 1024);
const largeFile = new File([largeContent], "large.txt", {
type: "text/plain",
});
const result = await client.storage.upload(largeFile);
expect(result).toBeDefined();
expect(result.cid).toBeDefined();
expect(result.size).toBeGreaterThanOrEqual(100 * 1024);
});
it("should upload binary content", async () => {
const client = await createTestClient();
// Create binary data
const binaryData = new Uint8Array([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a]); // PNG header
const blob = new Blob([binaryData], { type: "image/png" });
const result = await client.storage.upload(blob, "image.png");
expect(result).toBeDefined();
expect(result.cid).toBeDefined();
expect(result.name).toBe("image.png");
});
});

View File

@ -1,4 +1,7 @@
import { defineConfig } from "vitest/config";
import dotenv from "dotenv";
dotenv.config();
export default defineConfig({
test: {