mirror of
https://github.com/DeBrosOfficial/network-ts-sdk.git
synced 2025-12-11 01:58:49 +00:00
feat: integrate Olric distributed cache support
- Added Olric cache server integration, including configuration options for Olric servers and timeout settings. - Implemented HTTP handlers for cache operations: health check, get, put, delete, and scan. - Enhanced Makefile with commands to run the Olric server and manage its configuration. - Updated README and setup scripts to include Olric installation and configuration instructions. - Introduced tests for cache handlers to ensure proper functionality and error handling.
This commit is contained in:
parent
e30e81d0c9
commit
e233696166
@ -221,6 +221,78 @@ export class HttpClient {
|
||||
return this.request<T>("DELETE", path, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload a file using multipart/form-data
|
||||
* This is a special method for file uploads that bypasses JSON serialization
|
||||
*/
|
||||
async uploadFile<T = any>(
|
||||
path: string,
|
||||
formData: FormData,
|
||||
options?: {
|
||||
timeout?: number;
|
||||
}
|
||||
): Promise<T> {
|
||||
const url = new URL(this.baseURL + path);
|
||||
const headers: Record<string, string> = {
|
||||
...this.getAuthHeaders(path),
|
||||
// Don't set Content-Type - browser will set it with boundary
|
||||
};
|
||||
|
||||
const controller = new AbortController();
|
||||
const requestTimeout = options?.timeout ?? this.timeout * 5; // 5x timeout for uploads
|
||||
const timeoutId = setTimeout(() => controller.abort(), requestTimeout);
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: formData,
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
try {
|
||||
return await this.requestWithRetry(url.toString(), fetchOptions);
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a binary response (returns Response object for streaming)
|
||||
*/
|
||||
async getBinary(path: string): Promise<Response> {
|
||||
const url = new URL(this.baseURL + path);
|
||||
const headers: Record<string, string> = {
|
||||
...this.getAuthHeaders(path),
|
||||
};
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), this.timeout * 5); // 5x timeout for downloads
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
method: "GET",
|
||||
headers,
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await this.fetch(url.toString(), fetchOptions);
|
||||
if (!response.ok) {
|
||||
clearTimeout(timeoutId);
|
||||
const error = await response.json().catch(() => ({
|
||||
error: response.statusText,
|
||||
}));
|
||||
throw SDKError.fromResponse(response.status, error);
|
||||
}
|
||||
return response;
|
||||
} catch (error) {
|
||||
clearTimeout(timeoutId);
|
||||
if (error instanceof SDKError) {
|
||||
throw error;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getToken(): string | undefined {
|
||||
return this.getAuthToken();
|
||||
}
|
||||
|
||||
11
src/index.ts
11
src/index.ts
@ -4,6 +4,7 @@ import { DBClient } from "./db/client";
|
||||
import { PubSubClient } from "./pubsub/client";
|
||||
import { NetworkClient } from "./network/client";
|
||||
import { CacheClient } from "./cache/client";
|
||||
import { StorageClient } from "./storage/client";
|
||||
import { WSClientConfig } from "./core/ws";
|
||||
import {
|
||||
StorageAdapter,
|
||||
@ -25,6 +26,7 @@ export interface Client {
|
||||
pubsub: PubSubClient;
|
||||
network: NetworkClient;
|
||||
cache: CacheClient;
|
||||
storage: StorageClient;
|
||||
}
|
||||
|
||||
export function createClient(config: ClientConfig): Client {
|
||||
@ -55,6 +57,7 @@ export function createClient(config: ClientConfig): Client {
|
||||
});
|
||||
const network = new NetworkClient(httpClient);
|
||||
const cache = new CacheClient(httpClient);
|
||||
const storage = new StorageClient(httpClient);
|
||||
|
||||
return {
|
||||
auth,
|
||||
@ -62,6 +65,7 @@ export function createClient(config: ClientConfig): Client {
|
||||
pubsub,
|
||||
network,
|
||||
cache,
|
||||
storage,
|
||||
};
|
||||
}
|
||||
|
||||
@ -74,6 +78,7 @@ export { Repository } from "./db/repository";
|
||||
export { PubSubClient, Subscription } from "./pubsub/client";
|
||||
export { NetworkClient } from "./network/client";
|
||||
export { CacheClient } from "./cache/client";
|
||||
export { StorageClient } from "./storage/client";
|
||||
export { SDKError } from "./errors";
|
||||
export { MemoryStorage, LocalStorageAdapter } from "./auth/types";
|
||||
export type { StorageAdapter, AuthConfig, WhoAmI } from "./auth/types";
|
||||
@ -101,3 +106,9 @@ export type {
|
||||
CacheScanResponse,
|
||||
CacheHealthResponse,
|
||||
} from "./cache/client";
|
||||
export type {
|
||||
StorageUploadResponse,
|
||||
StoragePinRequest,
|
||||
StoragePinResponse,
|
||||
StorageStatus,
|
||||
} from "./storage/client";
|
||||
|
||||
170
src/storage/client.ts
Normal file
170
src/storage/client.ts
Normal file
@ -0,0 +1,170 @@
|
||||
import { HttpClient } from "../core/http";
|
||||
|
||||
export interface StorageUploadResponse {
|
||||
cid: string;
|
||||
name: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
export interface StoragePinRequest {
|
||||
cid: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export interface StoragePinResponse {
|
||||
cid: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface StorageStatus {
|
||||
cid: string;
|
||||
name: string;
|
||||
status: string; // "pinned", "pinning", "queued", "unpinned", "error"
|
||||
replication_min: number;
|
||||
replication_max: number;
|
||||
replication_factor: number;
|
||||
peers: string[];
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export class StorageClient {
|
||||
private httpClient: HttpClient;
|
||||
|
||||
constructor(httpClient: HttpClient) {
|
||||
this.httpClient = httpClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload content to IPFS and pin it.
|
||||
* Supports both File objects (browser) and Buffer/ReadableStream (Node.js).
|
||||
*
|
||||
* @param file - File to upload (File, Blob, or Buffer)
|
||||
* @param name - Optional filename
|
||||
* @returns Upload result with CID
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Browser
|
||||
* const fileInput = document.querySelector('input[type="file"]');
|
||||
* const file = fileInput.files[0];
|
||||
* const result = await client.storage.upload(file, file.name);
|
||||
* console.log(result.cid);
|
||||
*
|
||||
* // Node.js
|
||||
* const fs = require('fs');
|
||||
* const fileBuffer = fs.readFileSync('image.jpg');
|
||||
* const result = await client.storage.upload(fileBuffer, 'image.jpg');
|
||||
* ```
|
||||
*/
|
||||
async upload(
|
||||
file: File | Blob | ArrayBuffer | Uint8Array | ReadableStream<Uint8Array>,
|
||||
name?: string
|
||||
): Promise<StorageUploadResponse> {
|
||||
// Create FormData for multipart upload
|
||||
const formData = new FormData();
|
||||
|
||||
// Handle different input types
|
||||
if (file instanceof File) {
|
||||
formData.append("file", file);
|
||||
} else if (file instanceof Blob) {
|
||||
formData.append("file", file, name);
|
||||
} else if (file instanceof ArrayBuffer) {
|
||||
const blob = new Blob([file]);
|
||||
formData.append("file", blob, name);
|
||||
} else if (file instanceof Uint8Array) {
|
||||
// Convert Uint8Array to ArrayBuffer for Blob constructor
|
||||
const buffer = file.buffer.slice(
|
||||
file.byteOffset,
|
||||
file.byteOffset + file.byteLength
|
||||
) as ArrayBuffer;
|
||||
const blob = new Blob([buffer], { type: "application/octet-stream" });
|
||||
formData.append("file", blob, name);
|
||||
} else if (file instanceof ReadableStream) {
|
||||
// For ReadableStream, we need to read it into a blob first
|
||||
// This is a limitation - in practice, pass File/Blob/Buffer
|
||||
const chunks: ArrayBuffer[] = [];
|
||||
const reader = file.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
const buffer = value.buffer.slice(
|
||||
value.byteOffset,
|
||||
value.byteOffset + value.byteLength
|
||||
) as ArrayBuffer;
|
||||
chunks.push(buffer);
|
||||
}
|
||||
const blob = new Blob(chunks);
|
||||
formData.append("file", blob, name);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Unsupported file type. Use File, Blob, ArrayBuffer, Uint8Array, or ReadableStream."
|
||||
);
|
||||
}
|
||||
|
||||
return this.httpClient.uploadFile<StorageUploadResponse>(
|
||||
"/v1/storage/upload",
|
||||
formData,
|
||||
{ timeout: 300000 } // 5 minute timeout for large files
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pin an existing CID
|
||||
*
|
||||
* @param cid - Content ID to pin
|
||||
* @param name - Optional name for the pin
|
||||
* @returns Pin result
|
||||
*/
|
||||
async pin(cid: string, name?: string): Promise<StoragePinResponse> {
|
||||
return this.httpClient.post<StoragePinResponse>("/v1/storage/pin", {
|
||||
cid,
|
||||
name,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the pin status for a CID
|
||||
*
|
||||
* @param cid - Content ID to check
|
||||
* @returns Pin status information
|
||||
*/
|
||||
async status(cid: string): Promise<StorageStatus> {
|
||||
return this.httpClient.get<StorageStatus>(`/v1/storage/status/${cid}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve content from IPFS by CID
|
||||
*
|
||||
* @param cid - Content ID to retrieve
|
||||
* @returns ReadableStream of the content
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const stream = await client.storage.get(cid);
|
||||
* const reader = stream.getReader();
|
||||
* while (true) {
|
||||
* const { done, value } = await reader.read();
|
||||
* if (done) break;
|
||||
* // Process chunk
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
async get(cid: string): Promise<ReadableStream<Uint8Array>> {
|
||||
const response = await this.httpClient.getBinary(`/v1/storage/get/${cid}`);
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error("Response body is null");
|
||||
}
|
||||
|
||||
return response.body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unpin a CID
|
||||
*
|
||||
* @param cid - Content ID to unpin
|
||||
*/
|
||||
async unpin(cid: string): Promise<void> {
|
||||
await this.httpClient.delete(`/v1/storage/unpin/${cid}`);
|
||||
}
|
||||
}
|
||||
218
tests/e2e/storage.test.ts
Normal file
218
tests/e2e/storage.test.ts
Normal file
@ -0,0 +1,218 @@
|
||||
import { describe, it, expect, beforeAll } from "vitest";
|
||||
import { createTestClient, skipIfNoGateway } from "./setup";
|
||||
|
||||
describe("Storage", () => {
|
||||
beforeAll(() => {
|
||||
if (skipIfNoGateway()) {
|
||||
console.log("Skipping storage tests");
|
||||
}
|
||||
});
|
||||
|
||||
it("should upload a file", async () => {
|
||||
const client = await createTestClient();
|
||||
const testContent = "Hello, IPFS!";
|
||||
const testFile = new File([testContent], "test.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
const result = await client.storage.upload(testFile);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.cid).toBeDefined();
|
||||
expect(typeof result.cid).toBe("string");
|
||||
expect(result.cid.length).toBeGreaterThan(0);
|
||||
expect(result.name).toBe("test.txt");
|
||||
expect(result.size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should upload a Blob", async () => {
|
||||
const client = await createTestClient();
|
||||
const testContent = "Test blob content";
|
||||
const blob = new Blob([testContent], { type: "text/plain" });
|
||||
|
||||
const result = await client.storage.upload(blob, "blob.txt");
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.cid).toBeDefined();
|
||||
expect(typeof result.cid).toBe("string");
|
||||
expect(result.name).toBe("blob.txt");
|
||||
});
|
||||
|
||||
it("should upload ArrayBuffer", async () => {
|
||||
const client = await createTestClient();
|
||||
const testContent = "Test array buffer";
|
||||
const buffer = new TextEncoder().encode(testContent).buffer;
|
||||
|
||||
const result = await client.storage.upload(buffer, "buffer.bin");
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.cid).toBeDefined();
|
||||
expect(typeof result.cid).toBe("string");
|
||||
});
|
||||
|
||||
it("should upload Uint8Array", async () => {
|
||||
const client = await createTestClient();
|
||||
const testContent = "Test uint8array";
|
||||
const uint8Array = new TextEncoder().encode(testContent);
|
||||
|
||||
const result = await client.storage.upload(uint8Array, "uint8.txt");
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.cid).toBeDefined();
|
||||
expect(typeof result.cid).toBe("string");
|
||||
});
|
||||
|
||||
it("should pin a CID", async () => {
|
||||
const client = await createTestClient();
|
||||
// First upload a file to get a CID
|
||||
const testContent = "File to pin";
|
||||
const testFile = new File([testContent], "pin-test.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
const uploadResult = await client.storage.upload(testFile);
|
||||
const cid = uploadResult.cid;
|
||||
|
||||
// Now pin it
|
||||
const pinResult = await client.storage.pin(cid, "pinned-file");
|
||||
|
||||
expect(pinResult).toBeDefined();
|
||||
expect(pinResult.cid).toBe(cid);
|
||||
expect(pinResult.name).toBe("pinned-file");
|
||||
});
|
||||
|
||||
it("should get pin status", async () => {
|
||||
const client = await createTestClient();
|
||||
// First upload and pin a file
|
||||
const testContent = "File for status check";
|
||||
const testFile = new File([testContent], "status-test.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
const uploadResult = await client.storage.upload(testFile);
|
||||
await client.storage.pin(uploadResult.cid, "status-test");
|
||||
|
||||
// Wait a bit for pin to propagate
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
const status = await client.storage.status(uploadResult.cid);
|
||||
|
||||
expect(status).toBeDefined();
|
||||
expect(status.cid).toBe(uploadResult.cid);
|
||||
expect(status.name).toBe("status-test");
|
||||
expect(status.status).toBeDefined();
|
||||
expect(typeof status.status).toBe("string");
|
||||
expect(status.replication_factor).toBeGreaterThanOrEqual(0);
|
||||
expect(Array.isArray(status.peers)).toBe(true);
|
||||
});
|
||||
|
||||
it("should retrieve content by CID", async () => {
|
||||
const client = await createTestClient();
|
||||
const testContent = "Content to retrieve";
|
||||
const testFile = new File([testContent], "retrieve-test.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
const uploadResult = await client.storage.upload(testFile);
|
||||
const cid = uploadResult.cid;
|
||||
|
||||
// Get the content back
|
||||
const stream = await client.storage.get(cid);
|
||||
|
||||
expect(stream).toBeDefined();
|
||||
expect(stream instanceof ReadableStream).toBe(true);
|
||||
|
||||
// Read the stream
|
||||
const reader = stream.getReader();
|
||||
const chunks: Uint8Array[] = [];
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
const { value, done: streamDone } = await reader.read();
|
||||
done = streamDone;
|
||||
if (value) {
|
||||
chunks.push(value);
|
||||
}
|
||||
}
|
||||
|
||||
// Combine chunks
|
||||
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const combined = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
combined.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
const retrievedContent = new TextDecoder().decode(combined);
|
||||
expect(retrievedContent).toBe(testContent);
|
||||
});
|
||||
|
||||
it("should unpin a CID", async () => {
|
||||
const client = await createTestClient();
|
||||
// First upload and pin a file
|
||||
const testContent = "File to unpin";
|
||||
const testFile = new File([testContent], "unpin-test.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
const uploadResult = await client.storage.upload(testFile);
|
||||
await client.storage.pin(uploadResult.cid, "unpin-test");
|
||||
|
||||
// Wait a bit
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
// Unpin it
|
||||
await expect(client.storage.unpin(uploadResult.cid)).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it("should handle upload errors gracefully", async () => {
|
||||
const client = await createTestClient();
|
||||
// Try to upload invalid data
|
||||
const invalidFile = null as any;
|
||||
|
||||
await expect(client.storage.upload(invalidFile)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should handle status errors for non-existent CID", async () => {
|
||||
const client = await createTestClient();
|
||||
const fakeCID = "QmInvalidCID123456789";
|
||||
|
||||
await expect(client.storage.status(fakeCID)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should handle get errors for non-existent CID", async () => {
|
||||
const client = await createTestClient();
|
||||
const fakeCID = "QmInvalidCID123456789";
|
||||
|
||||
await expect(client.storage.get(fakeCID)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should upload large content", async () => {
|
||||
const client = await createTestClient();
|
||||
// Create a larger file (100KB)
|
||||
const largeContent = "x".repeat(100 * 1024);
|
||||
const largeFile = new File([largeContent], "large.txt", {
|
||||
type: "text/plain",
|
||||
});
|
||||
|
||||
const result = await client.storage.upload(largeFile);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.cid).toBeDefined();
|
||||
expect(result.size).toBeGreaterThanOrEqual(100 * 1024);
|
||||
});
|
||||
|
||||
it("should upload binary content", async () => {
|
||||
const client = await createTestClient();
|
||||
// Create binary data
|
||||
const binaryData = new Uint8Array([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a]); // PNG header
|
||||
const blob = new Blob([binaryData], { type: "image/png" });
|
||||
|
||||
const result = await client.storage.upload(blob, "image.png");
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.cid).toBeDefined();
|
||||
expect(result.name).toBe("image.png");
|
||||
});
|
||||
});
|
||||
Loading…
x
Reference in New Issue
Block a user