feat: Update Docker configurations and integrate IPFS and OrbitDB services
This commit is contained in:
parent
64ed9e82a7
commit
08d110b7e6
@ -29,7 +29,7 @@
|
|||||||
"test:blog-integration": "jest tests/real-integration/blog-scenario/tests --detectOpenHandles --forceExit",
|
"test:blog-integration": "jest tests/real-integration/blog-scenario/tests --detectOpenHandles --forceExit",
|
||||||
"test:blog-build": "cd tests/real-integration/blog-scenario && docker-compose -f docker/docker-compose.blog.yml build",
|
"test:blog-build": "cd tests/real-integration/blog-scenario && docker-compose -f docker/docker-compose.blog.yml build",
|
||||||
"test:blog-clean": "cd tests/real-integration/blog-scenario && docker-compose -f docker/docker-compose.blog.yml down -v --remove-orphans",
|
"test:blog-clean": "cd tests/real-integration/blog-scenario && docker-compose -f docker/docker-compose.blog.yml down -v --remove-orphans",
|
||||||
"test:blog-runner": "ts-node tests/real-integration/blog-scenario/run-tests.ts"
|
"test:blog-runner": "pnpm exec ts-node tests/real-integration/blog-scenario/run-tests.ts"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"ipfs",
|
"ipfs",
|
||||||
|
14
pnpm-lock.yaml
generated
14
pnpm-lock.yaml
generated
@ -99,6 +99,9 @@ importers:
|
|||||||
'@types/node':
|
'@types/node':
|
||||||
specifier: ^22.13.10
|
specifier: ^22.13.10
|
||||||
version: 22.13.16
|
version: 22.13.16
|
||||||
|
'@types/node-fetch':
|
||||||
|
specifier: ^2.6.7
|
||||||
|
version: 2.6.12
|
||||||
'@types/node-forge':
|
'@types/node-forge':
|
||||||
specifier: ^1.3.11
|
specifier: ^1.3.11
|
||||||
version: 1.3.11
|
version: 1.3.11
|
||||||
@ -129,6 +132,9 @@ importers:
|
|||||||
lint-staged:
|
lint-staged:
|
||||||
specifier: ^15.5.0
|
specifier: ^15.5.0
|
||||||
version: 15.5.0
|
version: 15.5.0
|
||||||
|
node-fetch:
|
||||||
|
specifier: ^2.7.0
|
||||||
|
version: 2.7.0
|
||||||
prettier:
|
prettier:
|
||||||
specifier: ^3.5.3
|
specifier: ^3.5.3
|
||||||
version: 3.5.3
|
version: 3.5.3
|
||||||
@ -1522,6 +1528,9 @@ packages:
|
|||||||
'@types/multicast-dns@7.2.4':
|
'@types/multicast-dns@7.2.4':
|
||||||
resolution: {integrity: sha512-ib5K4cIDR4Ro5SR3Sx/LROkMDa0BHz0OPaCBL/OSPDsAXEGZ3/KQeS6poBKYVN7BfjXDL9lWNwzyHVgt/wkyCw==}
|
resolution: {integrity: sha512-ib5K4cIDR4Ro5SR3Sx/LROkMDa0BHz0OPaCBL/OSPDsAXEGZ3/KQeS6poBKYVN7BfjXDL9lWNwzyHVgt/wkyCw==}
|
||||||
|
|
||||||
|
'@types/node-fetch@2.6.12':
|
||||||
|
resolution: {integrity: sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==}
|
||||||
|
|
||||||
'@types/node-forge@1.3.11':
|
'@types/node-forge@1.3.11':
|
||||||
resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==}
|
resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==}
|
||||||
|
|
||||||
@ -7048,6 +7057,11 @@ snapshots:
|
|||||||
'@types/dns-packet': 5.6.5
|
'@types/dns-packet': 5.6.5
|
||||||
'@types/node': 22.13.16
|
'@types/node': 22.13.16
|
||||||
|
|
||||||
|
'@types/node-fetch@2.6.12':
|
||||||
|
dependencies:
|
||||||
|
'@types/node': 22.14.0
|
||||||
|
form-data: 4.0.2
|
||||||
|
|
||||||
'@types/node-forge@1.3.11':
|
'@types/node-forge@1.3.11':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 22.13.16
|
'@types/node': 22.13.16
|
||||||
|
137
src/framework/services/IPFSService.ts
Normal file
137
src/framework/services/IPFSService.ts
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
import { createHelia } from 'helia';
|
||||||
|
import { createLibp2p } from 'libp2p';
|
||||||
|
import { tcp } from '@libp2p/tcp';
|
||||||
|
import { noise } from '@chainsafe/libp2p-noise';
|
||||||
|
import { yamux } from '@chainsafe/libp2p-yamux';
|
||||||
|
import { bootstrap } from '@libp2p/bootstrap';
|
||||||
|
import { mdns } from '@libp2p/mdns';
|
||||||
|
import { identify } from '@libp2p/identify';
|
||||||
|
import { gossipsub } from '@chainsafe/libp2p-gossipsub';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
export interface IPFSConfig {
|
||||||
|
swarmKeyFile?: string;
|
||||||
|
bootstrap?: string[];
|
||||||
|
ports?: {
|
||||||
|
swarm?: number;
|
||||||
|
api?: number;
|
||||||
|
gateway?: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class IPFSService {
|
||||||
|
private helia: any;
|
||||||
|
private libp2p: any;
|
||||||
|
private config: IPFSConfig;
|
||||||
|
|
||||||
|
constructor(config: IPFSConfig = {}) {
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
async init(): Promise<void> {
|
||||||
|
// Create libp2p instance
|
||||||
|
const libp2pConfig: any = {
|
||||||
|
addresses: {
|
||||||
|
listen: [`/ip4/0.0.0.0/tcp/${this.config.ports?.swarm || 4001}`]
|
||||||
|
},
|
||||||
|
transports: [tcp()],
|
||||||
|
connectionEncryption: [noise()],
|
||||||
|
streamMuxers: [yamux()],
|
||||||
|
services: {
|
||||||
|
identify: identify(),
|
||||||
|
pubsub: gossipsub({
|
||||||
|
allowPublishToZeroTopicPeers: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add peer discovery
|
||||||
|
const peerDiscovery = [];
|
||||||
|
|
||||||
|
// Add bootstrap peers if provided
|
||||||
|
if (this.config.bootstrap && this.config.bootstrap.length > 0) {
|
||||||
|
peerDiscovery.push(bootstrap({
|
||||||
|
list: this.config.bootstrap
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add mDNS for local discovery
|
||||||
|
peerDiscovery.push(mdns({
|
||||||
|
interval: 1000
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (peerDiscovery.length > 0) {
|
||||||
|
libp2pConfig.peerDiscovery = peerDiscovery;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.libp2p = await createLibp2p(libp2pConfig);
|
||||||
|
|
||||||
|
// Create Helia instance
|
||||||
|
this.helia = await createHelia({
|
||||||
|
libp2p: this.libp2p
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`IPFS Service initialized with peer ID: ${this.libp2p.peerId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (this.helia) {
|
||||||
|
await this.helia.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getHelia(): any {
|
||||||
|
return this.helia;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLibp2pInstance(): any {
|
||||||
|
return this.libp2p;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getConnectedPeers(): Promise<Map<string, any>> {
|
||||||
|
if (!this.libp2p) {
|
||||||
|
return new Map();
|
||||||
|
}
|
||||||
|
|
||||||
|
const peers = this.libp2p.getPeers();
|
||||||
|
const peerMap = new Map();
|
||||||
|
|
||||||
|
for (const peerId of peers) {
|
||||||
|
peerMap.set(peerId.toString(), peerId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return peerMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
async pinOnNode(nodeId: string, cid: string): Promise<void> {
|
||||||
|
if (this.helia && this.helia.pins) {
|
||||||
|
await this.helia.pins.add(cid);
|
||||||
|
console.log(`Pinned ${cid} on node ${nodeId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get pubsub() {
|
||||||
|
if (!this.libp2p || !this.libp2p.services.pubsub) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
publish: async (topic: string, data: string) => {
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
await this.libp2p.services.pubsub.publish(topic, encoder.encode(data));
|
||||||
|
},
|
||||||
|
subscribe: async (topic: string, handler: (message: any) => void) => {
|
||||||
|
this.libp2p.services.pubsub.subscribe(topic);
|
||||||
|
this.libp2p.services.pubsub.addEventListener('message', (event: any) => {
|
||||||
|
if (event.detail.topic === topic) {
|
||||||
|
handler(event.detail);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
unsubscribe: async (topic: string) => {
|
||||||
|
this.libp2p.services.pubsub.unsubscribe(topic);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
44
src/framework/services/RealOrbitDBService.ts
Normal file
44
src/framework/services/RealOrbitDBService.ts
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import { createOrbitDB } from '@orbitdb/core';
|
||||||
|
|
||||||
|
export class OrbitDBService {
|
||||||
|
private orbitdb: any;
|
||||||
|
private ipfsService: any;
|
||||||
|
|
||||||
|
constructor(ipfsService: any) {
|
||||||
|
this.ipfsService = ipfsService;
|
||||||
|
}
|
||||||
|
|
||||||
|
async init(): Promise<void> {
|
||||||
|
if (!this.ipfsService) {
|
||||||
|
throw new Error('IPFS service is required for OrbitDB');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.orbitdb = await createOrbitDB({
|
||||||
|
ipfs: this.ipfsService.getHelia(),
|
||||||
|
directory: './orbitdb'
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('OrbitDB Service initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (this.orbitdb) {
|
||||||
|
await this.orbitdb.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async openDB(name: string, type: string): Promise<any> {
|
||||||
|
if (!this.orbitdb) {
|
||||||
|
throw new Error('OrbitDB not initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
return await this.orbitdb.open(name, {
|
||||||
|
type,
|
||||||
|
AccessController: this.orbitdb.AccessController
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getOrbitDB(): any {
|
||||||
|
return this.orbitdb;
|
||||||
|
}
|
||||||
|
}
|
@ -68,23 +68,23 @@ blog-scenario/
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run complete integration tests
|
# Run complete integration tests
|
||||||
npm run test:blog-real
|
pnpm run test:blog-real
|
||||||
|
|
||||||
# Or use the test runner for better control
|
# Or use the test runner for better control
|
||||||
npm run test:blog-runner
|
pnpm run test:blog-runner
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Option 2: Build and Run Manually
|
#### Option 2: Build and Run Manually
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build Docker images
|
# Build Docker images
|
||||||
npm run test:blog-build
|
pnpm run test:blog-build
|
||||||
|
|
||||||
# Run tests
|
# Run tests
|
||||||
npm run test:blog-real
|
pnpm run test:blog-real
|
||||||
|
|
||||||
# Clean up afterwards
|
# Clean up afterwards
|
||||||
npm run test:blog-clean
|
pnpm run test:blog-clean
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Option 3: Development Mode
|
#### Option 3: Development Mode
|
||||||
@ -95,7 +95,7 @@ cd tests/real-integration/blog-scenario
|
|||||||
docker-compose -f docker/docker-compose.blog.yml up blog-node-1 blog-node-2 blog-node-3
|
docker-compose -f docker/docker-compose.blog.yml up blog-node-1 blog-node-2 blog-node-3
|
||||||
|
|
||||||
# Run tests against running services
|
# Run tests against running services
|
||||||
npm run test:blog-integration
|
pnpm run test:blog-integration
|
||||||
```
|
```
|
||||||
|
|
||||||
## Test Scenarios
|
## Test Scenarios
|
||||||
@ -275,7 +275,7 @@ lsof -i :4001
|
|||||||
lsof -i :4011-4013
|
lsof -i :4011-4013
|
||||||
|
|
||||||
# Clean up existing containers
|
# Clean up existing containers
|
||||||
npm run test:blog-clean
|
pnpm run test:blog-clean
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Docker Build Failures
|
#### Docker Build Failures
|
||||||
@ -316,10 +316,10 @@ To run tests with additional debugging:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Set debug environment
|
# Set debug environment
|
||||||
DEBUG=* npm run test:blog-real
|
DEBUG=* pnpm run test:blog-real
|
||||||
|
|
||||||
# Run with increased verbosity
|
# Run with increased verbosity
|
||||||
LOG_LEVEL=debug npm run test:blog-real
|
LOG_LEVEL=debug pnpm run test:blog-real
|
||||||
```
|
```
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
@ -13,16 +13,19 @@ RUN apk add --no-cache \
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy package files
|
# Copy package files
|
||||||
COPY package*.json ./
|
COPY package*.json pnpm-lock.yaml ./
|
||||||
|
|
||||||
# Install dependencies
|
# Install pnpm
|
||||||
RUN npm ci --only=production && npm cache clean --force
|
RUN npm install -g pnpm
|
||||||
|
|
||||||
|
# Install dependencies (skip prepare script for Docker)
|
||||||
|
RUN pnpm install --prod --frozen-lockfile --ignore-scripts
|
||||||
|
|
||||||
# Copy source code
|
# Copy source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build the application
|
# Build the application
|
||||||
RUN npm run build
|
RUN pnpm run build
|
||||||
|
|
||||||
# Create data directory
|
# Create data directory
|
||||||
RUN mkdir -p /data
|
RUN mkdir -p /data
|
||||||
|
@ -8,16 +8,19 @@ RUN apk add --no-cache curl jq
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy package files
|
# Copy package files
|
||||||
COPY package*.json ./
|
COPY package*.json pnpm-lock.yaml ./
|
||||||
|
|
||||||
# Install all dependencies (including dev dependencies for testing)
|
# Install pnpm
|
||||||
RUN npm ci && npm cache clean --force
|
RUN npm install -g pnpm
|
||||||
|
|
||||||
|
# Install all dependencies (including dev dependencies for testing, skip prepare script)
|
||||||
|
RUN pnpm install --frozen-lockfile --ignore-scripts
|
||||||
|
|
||||||
# Copy source code
|
# Copy source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build the application
|
# Build the application
|
||||||
RUN npm run build
|
RUN pnpm run build
|
||||||
|
|
||||||
# Create results directory
|
# Create results directory
|
||||||
RUN mkdir -p /app/results
|
RUN mkdir -p /app/results
|
||||||
@ -27,4 +30,4 @@ ENV NODE_ENV=test
|
|||||||
ENV TEST_SCENARIO=blog
|
ENV TEST_SCENARIO=blog
|
||||||
|
|
||||||
# Default command (can be overridden)
|
# Default command (can be overridden)
|
||||||
CMD ["npm", "run", "test:blog-integration"]
|
CMD ["pnpm", "run", "test:blog-integration"]
|
@ -562,7 +562,7 @@ class BlogAPIServer {
|
|||||||
try {
|
try {
|
||||||
if (this.framework) {
|
if (this.framework) {
|
||||||
const ipfsService = this.framework.getIPFSService();
|
const ipfsService = this.framework.getIPFSService();
|
||||||
if (ipfsService) {
|
if (ipfsService && ipfsService.getConnectedPeers) {
|
||||||
const peers = await ipfsService.getConnectedPeers();
|
const peers = await ipfsService.getConnectedPeers();
|
||||||
return peers.size;
|
return peers.size;
|
||||||
}
|
}
|
||||||
@ -603,10 +603,10 @@ class BlogAPIServer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private async initializeFramework(): Promise<void> {
|
private async initializeFramework(): Promise<void> {
|
||||||
// Import services - adjust paths based on your actual service locations
|
// Import services
|
||||||
// Note: You'll need to implement these services or use existing ones
|
const { IPFSService } = await import('../../../../src/framework/services/IPFSService');
|
||||||
const IPFSService = (await import('../../../../src/framework/services/IPFSService')).IPFSService;
|
const { OrbitDBService } = await import('../../../../src/framework/services/RealOrbitDBService');
|
||||||
const OrbitDBService = (await import('../../../../src/framework/services/OrbitDBService')).OrbitDBService;
|
const { FrameworkIPFSService, FrameworkOrbitDBService } = await import('../../../../src/framework/services/OrbitDBService');
|
||||||
|
|
||||||
// Initialize IPFS service
|
// Initialize IPFS service
|
||||||
const ipfsService = new IPFSService({
|
const ipfsService = new IPFSService({
|
||||||
@ -625,6 +625,10 @@ class BlogAPIServer {
|
|||||||
await orbitDBService.init();
|
await orbitDBService.init();
|
||||||
console.log(`[${this.nodeId}] OrbitDB service initialized`);
|
console.log(`[${this.nodeId}] OrbitDB service initialized`);
|
||||||
|
|
||||||
|
// Wrap services for framework
|
||||||
|
const frameworkIPFS = new FrameworkIPFSService(ipfsService);
|
||||||
|
const frameworkOrbitDB = new FrameworkOrbitDBService(orbitDBService);
|
||||||
|
|
||||||
// Initialize framework
|
// Initialize framework
|
||||||
this.framework = new DebrosFramework({
|
this.framework = new DebrosFramework({
|
||||||
environment: 'test',
|
environment: 'test',
|
||||||
@ -642,7 +646,7 @@ class BlogAPIServer {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
await this.framework.initialize(orbitDBService, ipfsService);
|
await this.framework.initialize(frameworkOrbitDB, frameworkIPFS);
|
||||||
console.log(`[${this.nodeId}] DebrosFramework initialized successfully`);
|
console.log(`[${this.nodeId}] DebrosFramework initialized successfully`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,9 +2,16 @@
|
|||||||
|
|
||||||
echo "Configuring bootstrap IPFS node..."
|
echo "Configuring bootstrap IPFS node..."
|
||||||
|
|
||||||
# Set swarm key for private network
|
# Set IPFS path
|
||||||
export IPFS_PATH=/root/.ipfs
|
export IPFS_PATH=/data/ipfs
|
||||||
cp /data/swarm.key $IPFS_PATH/swarm.key
|
|
||||||
|
# Copy swarm key for private network
|
||||||
|
if [ -f "/data/ipfs/swarm.key" ]; then
|
||||||
|
echo "Using existing swarm key"
|
||||||
|
else
|
||||||
|
echo "Swarm key not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Configure IPFS for private network
|
# Configure IPFS for private network
|
||||||
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["*"]'
|
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["*"]'
|
||||||
@ -14,15 +21,13 @@ ipfs config --json API.HTTPHeaders.Access-Control-Allow-Headers '["Authorization
|
|||||||
# Remove default bootstrap nodes (for private network)
|
# Remove default bootstrap nodes (for private network)
|
||||||
ipfs bootstrap rm --all
|
ipfs bootstrap rm --all
|
||||||
|
|
||||||
# Enable experimental features
|
|
||||||
ipfs config --json Experimental.Libp2pStreamMounting true
|
|
||||||
ipfs config --json Experimental.P2pHttpProxy true
|
|
||||||
|
|
||||||
# Configure addresses
|
# Configure addresses
|
||||||
ipfs config Addresses.API "/ip4/0.0.0.0/tcp/5001"
|
ipfs config Addresses.API "/ip4/0.0.0.0/tcp/5001"
|
||||||
ipfs config Addresses.Gateway "/ip4/0.0.0.0/tcp/8080"
|
ipfs config Addresses.Gateway "/ip4/0.0.0.0/tcp/8080"
|
||||||
ipfs config --json Addresses.Swarm '["/ip4/0.0.0.0/tcp/4001"]'
|
ipfs config --json Addresses.Swarm '["/ip4/0.0.0.0/tcp/4001"]'
|
||||||
|
|
||||||
# Start IPFS daemon
|
# Enable PubSub
|
||||||
|
ipfs config --json Pubsub.Enabled true
|
||||||
|
|
||||||
echo "Starting IPFS daemon..."
|
echo "Starting IPFS daemon..."
|
||||||
exec ipfs daemon --enable-gc --enable-pubsub-experiment
|
exec ipfs daemon --enable-gc
|
@ -1,11 +1,10 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
# Bootstrap node for peer discovery
|
# Bootstrap node for peer discovery
|
||||||
blog-bootstrap:
|
blog-bootstrap:
|
||||||
build:
|
build:
|
||||||
context: ../../../
|
context: ../../../../
|
||||||
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.bootstrap
|
dockerfile: tests/real-integration/shared/infrastructure/docker/Dockerfile.bootstrap
|
||||||
environment:
|
environment:
|
||||||
- NODE_TYPE=bootstrap
|
- NODE_TYPE=bootstrap
|
||||||
- NODE_ID=blog-bootstrap
|
- NODE_ID=blog-bootstrap
|
||||||
@ -18,7 +17,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "4001:4001"
|
- "4001:4001"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:5001/api/v0/id"]
|
test: ["CMD", "sh", "-c", "ipfs id >/dev/null 2>&1"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
@ -26,7 +25,7 @@ services:
|
|||||||
# Blog API Node 1
|
# Blog API Node 1
|
||||||
blog-node-1:
|
blog-node-1:
|
||||||
build:
|
build:
|
||||||
context: ../../../
|
context: ../../../../
|
||||||
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.blog-api
|
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.blog-api
|
||||||
depends_on:
|
depends_on:
|
||||||
blog-bootstrap:
|
blog-bootstrap:
|
||||||
@ -47,7 +46,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- blog-network
|
- blog-network
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
test: ["CMD", "sh", "-c", "wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1"]
|
||||||
interval: 15s
|
interval: 15s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 10
|
retries: 10
|
||||||
@ -56,7 +55,7 @@ services:
|
|||||||
# Blog API Node 2
|
# Blog API Node 2
|
||||||
blog-node-2:
|
blog-node-2:
|
||||||
build:
|
build:
|
||||||
context: ../../../
|
context: ../../../../
|
||||||
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.blog-api
|
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.blog-api
|
||||||
depends_on:
|
depends_on:
|
||||||
blog-bootstrap:
|
blog-bootstrap:
|
||||||
@ -77,7 +76,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- blog-network
|
- blog-network
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
test: ["CMD", "sh", "-c", "wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1"]
|
||||||
interval: 15s
|
interval: 15s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 10
|
retries: 10
|
||||||
@ -86,7 +85,7 @@ services:
|
|||||||
# Blog API Node 3
|
# Blog API Node 3
|
||||||
blog-node-3:
|
blog-node-3:
|
||||||
build:
|
build:
|
||||||
context: ../../../
|
context: ../../../../
|
||||||
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.blog-api
|
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.blog-api
|
||||||
depends_on:
|
depends_on:
|
||||||
blog-bootstrap:
|
blog-bootstrap:
|
||||||
@ -107,7 +106,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- blog-network
|
- blog-network
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
test: ["CMD", "sh", "-c", "wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1"]
|
||||||
interval: 15s
|
interval: 15s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 10
|
retries: 10
|
||||||
@ -116,7 +115,7 @@ services:
|
|||||||
# Test Runner
|
# Test Runner
|
||||||
blog-test-runner:
|
blog-test-runner:
|
||||||
build:
|
build:
|
||||||
context: ../../../
|
context: ../../../../
|
||||||
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.test-runner
|
dockerfile: tests/real-integration/blog-scenario/docker/Dockerfile.test-runner
|
||||||
depends_on:
|
depends_on:
|
||||||
blog-node-1:
|
blog-node-1:
|
||||||
@ -131,11 +130,11 @@ services:
|
|||||||
- TEST_TIMEOUT=300000
|
- TEST_TIMEOUT=300000
|
||||||
- NODE_ENV=test
|
- NODE_ENV=test
|
||||||
volumes:
|
volumes:
|
||||||
- ./tests:/app/tests:ro
|
- ../tests:/app/tests:ro
|
||||||
- test-results:/app/results
|
- test-results:/app/results
|
||||||
networks:
|
networks:
|
||||||
- blog-network
|
- blog-network
|
||||||
command: ["npm", "run", "test:blog-integration"]
|
command: ["pnpm", "run", "test:blog-integration"]
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
bootstrap-data:
|
bootstrap-data:
|
||||||
|
@ -0,0 +1,17 @@
|
|||||||
|
# Bootstrap node for IPFS peer discovery
|
||||||
|
FROM ipfs/kubo:v0.24.0
|
||||||
|
|
||||||
|
# Copy swarm key
|
||||||
|
COPY tests/real-integration/blog-scenario/docker/swarm.key /data/ipfs/swarm.key
|
||||||
|
|
||||||
|
# Copy configuration script
|
||||||
|
COPY tests/real-integration/blog-scenario/docker/bootstrap-config.sh /usr/local/bin/bootstrap-config.sh
|
||||||
|
USER root
|
||||||
|
RUN chmod +x /usr/local/bin/bootstrap-config.sh
|
||||||
|
USER ipfs
|
||||||
|
|
||||||
|
# Expose IPFS ports
|
||||||
|
EXPOSE 4001 5001 8080
|
||||||
|
|
||||||
|
# Start IPFS daemon with custom config
|
||||||
|
CMD ["/usr/local/bin/bootstrap-config.sh"]
|
Reference in New Issue
Block a user