Bug refactor to new version

This commit is contained in:
anonpenguin 2025-06-19 06:43:54 +03:00
parent 3232cf2d3e
commit eea46b0144
64 changed files with 12833 additions and 4619 deletions

View File

@ -1,5 +1,5 @@
{ {
"*.{js,ts}": [ "src/**/*.{js,ts}": [
"prettier --write", "prettier --write",
"eslint --fix", "eslint --fix",
"npm run build" "npm run build"

View File

@ -7,7 +7,7 @@ export default [
// Base configuration for all files // Base configuration for all files
{ {
files: ['**/*.{ts}'], files: ['**/*.{ts}'],
ignores: ['dist/**', 'docs/**', 'src/components/bot/templates/**'], ignores: ['dist/**', 'docs/**', 'src/components/bot/templates/**', 'examples/**'],
languageOptions: { languageOptions: {
ecmaVersion: 'latest', ecmaVersion: 'latest',
sourceType: 'module', sourceType: 'module',

View File

@ -0,0 +1,661 @@
/**
* Comprehensive Examples for Automatic Features (Phase 5)
*
* This file demonstrates the automatic pinning and PubSub capabilities:
* - Smart pinning strategies based on usage patterns
* - Automatic event publishing for model changes
* - Real-time synchronization across nodes
* - Performance optimization through intelligent caching
* - Cross-node communication and coordination
*/
import { SocialPlatformFramework, User, Post, Comment } from './framework-integration';
import { PinningManager } from '../src/framework/pinning/PinningManager';
import { PubSubManager } from '../src/framework/pubsub/PubSubManager';
export class AutomaticFeaturesExamples {
private framework: SocialPlatformFramework;
private pinningManager: PinningManager;
private pubsubManager: PubSubManager;
constructor(framework: SocialPlatformFramework) {
this.framework = framework;
// These would be injected from the framework
this.pinningManager = (framework as any).pinningManager;
this.pubsubManager = (framework as any).pubsubManager;
}
async runAllExamples(): Promise<void> {
console.log('🤖 Running comprehensive automatic features examples...\n');
await this.pinningStrategyExamples();
await this.automaticEventPublishingExamples();
await this.realTimeSynchronizationExamples();
await this.crossNodeCommunicationExamples();
await this.performanceOptimizationExamples();
await this.intelligentCleanupExamples();
console.log('✅ All automatic features examples completed!\n');
}
async pinningStrategyExamples(): Promise<void> {
console.log('📌 Smart Pinning Strategy Examples');
console.log('==================================\n');
// Configure different pinning strategies for different model types
console.log('Setting up pinning strategies:');
// Popular content gets pinned based on access patterns
this.pinningManager.setPinningRule('Post', {
strategy: 'popularity',
factor: 1.5,
maxPins: 100,
minAccessCount: 5
});
// User profiles are always pinned (important core data)
this.pinningManager.setPinningRule('User', {
strategy: 'fixed',
factor: 2.0,
maxPins: 50
});
// Comments use size-based pinning (prefer smaller, more efficient content)
this.pinningManager.setPinningRule('Comment', {
strategy: 'size',
factor: 1.0,
maxPins: 200
});
// Create sample content and observe pinning behavior
const posts = await Post.where('isPublic', '=', true).limit(5).exec();
if (posts.length > 0) {
console.log('\nDemonstrating automatic pinning:');
for (let i = 0; i < posts.length; i++) {
const post = posts[i];
const hash = `hash-${post.id}-${Date.now()}`;
// Simulate content access patterns
for (let access = 0; access < (i + 1) * 3; access++) {
await this.pinningManager.recordAccess(hash);
}
// Pin content based on strategy
const pinned = await this.pinningManager.pinContent(
hash,
'Post',
post.id,
{
title: post.title,
createdAt: post.createdAt,
size: post.content.length
}
);
console.log(`Post "${post.title}": ${pinned ? 'PINNED' : 'NOT PINNED'} (${(i + 1) * 3} accesses)`);
}
// Show pinning metrics
const metrics = this.pinningManager.getMetrics();
console.log('\nPinning Metrics:');
console.log(`- Total pinned: ${metrics.totalPinned}`);
console.log(`- Total size: ${(metrics.totalSize / 1024).toFixed(2)} KB`);
console.log(`- Most accessed: ${metrics.mostAccessed?.hash || 'None'}`);
console.log(`- Strategy breakdown:`);
metrics.strategyBreakdown.forEach((count, strategy) => {
console.log(` * ${strategy}: ${count} items`);
});
}
console.log('');
}
async automaticEventPublishingExamples(): Promise<void> {
console.log('📡 Automatic Event Publishing Examples');
console.log('======================================\n');
// Set up event listeners to demonstrate automatic publishing
const events: any[] = [];
await this.pubsubManager.subscribe('model.created', (event) => {
events.push({ type: 'created', ...event });
console.log(`🆕 Model created: ${event.data.modelName}:${event.data.modelId}`);
});
await this.pubsubManager.subscribe('model.updated', (event) => {
events.push({ type: 'updated', ...event });
console.log(`📝 Model updated: ${event.data.modelName}:${event.data.modelId}`);
});
await this.pubsubManager.subscribe('model.deleted', (event) => {
events.push({ type: 'deleted', ...event });
console.log(`🗑️ Model deleted: ${event.data.modelName}:${event.data.modelId}`);
});
console.log('Event listeners set up, creating test data...\n');
// Create data and observe automatic event publishing
const testUser = await User.create({
username: `testuser-${Date.now()}`,
email: `test${Date.now()}@example.com`,
bio: 'Testing automatic event publishing'
});
// Simulate event emission (in real implementation, this would be automatic)
this.pubsubManager.emit('modelEvent', 'create', testUser);
const testPost = await Post.create({
title: 'Testing Automatic Events',
content: 'This post creation should trigger automatic event publishing',
userId: testUser.id,
isPublic: true
});
this.pubsubManager.emit('modelEvent', 'create', testPost);
// Update the post
await testPost.update({ title: 'Updated: Testing Automatic Events' });
this.pubsubManager.emit('modelEvent', 'update', testPost, { title: 'Updated title' });
// Wait a moment for event processing
await new Promise(resolve => setTimeout(resolve, 1000));
console.log(`\nCaptured ${events.length} automatic events:`);
events.forEach((event, index) => {
console.log(`${index + 1}. ${event.type}: ${event.data?.modelName || 'unknown'}`);
});
console.log('');
}
async realTimeSynchronizationExamples(): Promise<void> {
console.log('⚡ Real-Time Synchronization Examples');
console.log('=====================================\n');
// Simulate multiple nodes subscribing to the same topics
const nodeEvents: Record<string, any[]> = {
node1: [],
node2: [],
node3: []
};
// Subscribe each "node" to model events
await this.pubsubManager.subscribe('model.*', (event) => {
nodeEvents.node1.push(event);
}, {
filter: (event) => event.data.modelName === 'Post'
});
await this.pubsubManager.subscribe('model.*', (event) => {
nodeEvents.node2.push(event);
}, {
filter: (event) => event.data.modelName === 'User'
});
await this.pubsubManager.subscribe('model.*', (event) => {
nodeEvents.node3.push(event);
}); // No filter - receives all events
console.log('Multiple nodes subscribed to synchronization topics');
// Generate events that should synchronize across nodes
const syncUser = await User.create({
username: `syncuser-${Date.now()}`,
email: `sync${Date.now()}@example.com`,
bio: 'User for testing real-time sync'
});
const syncPost = await Post.create({
title: 'Real-Time Sync Test',
content: 'This should synchronize across all subscribed nodes',
userId: syncUser.id,
isPublic: true
});
// Emit events
await this.pubsubManager.publish('model.created', {
modelName: 'User',
modelId: syncUser.id,
timestamp: Date.now()
});
await this.pubsubManager.publish('model.created', {
modelName: 'Post',
modelId: syncPost.id,
timestamp: Date.now()
});
// Wait for synchronization
await new Promise(resolve => setTimeout(resolve, 1500));
console.log('\nSynchronization results:');
console.log(`Node 1 (Post filter): ${nodeEvents.node1.length} events received`);
console.log(`Node 2 (User filter): ${nodeEvents.node2.length} events received`);
console.log(`Node 3 (No filter): ${nodeEvents.node3.length} events received`);
// Demonstrate conflict resolution
console.log('\nSimulating conflict resolution:');
await this.pubsubManager.publish('database.conflict', {
modelName: 'Post',
modelId: syncPost.id,
conflictType: 'concurrent_update',
resolution: 'last_write_wins',
timestamp: Date.now()
});
console.log('');
}
async crossNodeCommunicationExamples(): Promise<void> {
console.log('🌐 Cross-Node Communication Examples');
console.log('====================================\n');
// Simulate coordination between nodes
const coordinationEvents: any[] = [];
// Set up coordination topics
await this.pubsubManager.subscribe('node.heartbeat', (event) => {
coordinationEvents.push(event);
console.log(`💓 Heartbeat from ${event.source}: ${event.data.status}`);
});
await this.pubsubManager.subscribe('node.resource', (event) => {
coordinationEvents.push(event);
console.log(`📊 Resource update from ${event.source}: ${event.data.type}`);
});
await this.pubsubManager.subscribe('cluster.rebalance', (event) => {
coordinationEvents.push(event);
console.log(`⚖️ Cluster rebalance initiated: ${event.data.reason}`);
});
console.log('Cross-node communication channels established\n');
// Simulate node communication
await this.pubsubManager.publish('node.heartbeat', {
status: 'healthy',
load: 0.65,
memory: '2.1GB',
connections: 42
});
await this.pubsubManager.publish('node.resource', {
type: 'storage',
available: '5.2TB',
used: '2.8TB',
threshold: 0.8
});
await this.pubsubManager.publish('cluster.rebalance', {
reason: 'load_balancing',
nodes: ['node-a', 'node-b', 'node-c'],
strategy: 'round_robin'
});
// Demonstrate distributed consensus
console.log('Initiating distributed consensus...');
await this.pubsubManager.publish('consensus.propose', {
proposalId: `proposal-${Date.now()}`,
type: 'pin_strategy_change',
data: {
modelName: 'Post',
newStrategy: 'popularity',
newFactor: 2.0
},
requiredVotes: 3
});
await new Promise(resolve => setTimeout(resolve, 1000));
console.log(`\nCommunication events processed: ${coordinationEvents.length}`);
console.log('Cross-node coordination completed successfully');
console.log('');
}
async performanceOptimizationExamples(): Promise<void> {
console.log('🚀 Performance Optimization Examples');
console.log('====================================\n');
// Demonstrate intelligent cache warming
console.log('1. Intelligent Cache Warming:');
const popularPosts = await Post
.where('isPublic', '=', true)
.where('likeCount', '>', 10)
.orderBy('likeCount', 'desc')
.limit(10)
.exec();
// Pre-pin popular content
for (const post of popularPosts) {
const hash = `hash-${post.id}-content`;
await this.pinningManager.pinContent(hash, 'Post', post.id, {
title: post.title,
likeCount: post.likeCount,
priority: 'high'
});
}
console.log(`Pre-pinned ${popularPosts.length} popular posts for better performance`);
// Demonstrate predictive pinning
console.log('\n2. Predictive Pinning:');
const analysis = this.pinningManager.analyzePerformance();
console.log(`Current hit rate: ${(analysis.hitRate * 100).toFixed(2)}%`);
console.log(`Storage efficiency: ${(analysis.storageEfficiency * 100).toFixed(2)}%`);
console.log(`Average priority: ${analysis.averagePriority.toFixed(3)}`);
// Simulate access pattern analysis
const accessPatterns = this.analyzeAccessPatterns();
console.log(`\n3. Access Pattern Analysis:`);
console.log(`Peak access time: ${accessPatterns.peakHour}:00`);
console.log(`Most accessed content type: ${accessPatterns.mostAccessedType}`);
console.log(`Cache miss rate: ${(accessPatterns.missRate * 100).toFixed(2)}%`);
// Optimize based on patterns
if (accessPatterns.missRate > 0.1) { // 10% miss rate
console.log('\nHigh miss rate detected, optimizing...');
await this.optimizePinningStrategy(accessPatterns);
}
console.log('');
}
async intelligentCleanupExamples(): Promise<void> {
console.log('🧹 Intelligent Cleanup Examples');
console.log('===============================\n');
// Get initial stats
const initialStats = this.pinningManager.getStats();
console.log('Initial state:');
console.log(`- Pinned items: ${initialStats.totalPinned}`);
console.log(`- Total size: ${(initialStats.totalSize / 1024).toFixed(2)} KB`);
// Create some test content that will be cleaned up
const testHashes = [];
for (let i = 0; i < 10; i++) {
const hash = `test-cleanup-${i}-${Date.now()}`;
testHashes.push(hash);
await this.pinningManager.pinContent(hash, 'Comment', `comment-${i}`, {
content: `Test comment ${i} for cleanup`,
size: 100 + i * 10,
priority: Math.random() * 0.3 // Low priority
});
}
console.log(`\nCreated ${testHashes.length} test items for cleanup`);
// Simulate time passing (items become stale)
console.log('Simulating passage of time...');
// Artificially age some items
for (let i = 0; i < 5; i++) {
const hash = testHashes[i];
const item = (this.pinningManager as any).pinnedItems.get(hash);
if (item) {
item.lastAccessed = Date.now() - (8 * 24 * 60 * 60 * 1000); // 8 days ago
item.accessCount = 1; // Very low access
}
}
// Trigger cleanup
console.log('Triggering intelligent cleanup...');
const cleanedItems = await (this.pinningManager as any).performCleanup();
const finalStats = this.pinningManager.getStats();
console.log('\nCleanup results:');
console.log(`- Items after cleanup: ${finalStats.totalPinned}`);
console.log(`- Size freed: ${((initialStats.totalSize - finalStats.totalSize) / 1024).toFixed(2)} KB`);
console.log(`- Cleanup efficiency: ${((initialStats.totalPinned - finalStats.totalPinned) / initialStats.totalPinned * 100).toFixed(2)}%`);
// Demonstrate memory optimization
console.log('\nMemory optimization metrics:');
const memoryAnalysis = this.analyzeMemoryUsage();
console.log(`- Memory utilization: ${(memoryAnalysis.utilization * 100).toFixed(2)}%`);
console.log(`- Fragmentation ratio: ${(memoryAnalysis.fragmentation * 100).toFixed(2)}%`);
console.log(`- Recommended cleanup interval: ${memoryAnalysis.recommendedInterval}ms`);
console.log('');
}
// Helper methods for analysis and optimization
private analyzeAccessPatterns(): any {
// Simulate access pattern analysis
return {
peakHour: 14, // 2 PM
mostAccessedType: 'Post',
missRate: 0.15,
trendsDetected: ['increased_mobile_access', 'peak_evening_hours'],
recommendations: ['increase_post_pinning', 'reduce_comment_pinning']
};
}
private async optimizePinningStrategy(patterns: any): Promise<void> {
console.log('Applying optimization based on access patterns:');
// Increase pinning for most accessed content type
if (patterns.mostAccessedType === 'Post') {
this.pinningManager.setPinningRule('Post', {
strategy: 'popularity',
factor: 2.0,
maxPins: 150 // Increased from 100
});
console.log('- Increased Post pinning capacity');
}
// Adjust cleanup frequency based on miss rate
if (patterns.missRate > 0.2) {
// More aggressive cleanup needed
console.log('- Enabled more aggressive cleanup');
}
console.log('Optimization complete');
}
private analyzeMemoryUsage(): any {
const stats = this.pinningManager.getStats();
return {
utilization: stats.totalSize / (10 * 1024 * 1024), // Assuming 10MB limit
fragmentation: 0.12, // 12% fragmentation
recommendedInterval: stats.totalPinned > 100 ? 30000 : 60000, // More frequent cleanup if many items
hotspots: ['user_profiles', 'recent_posts'],
coldSpots: ['old_comments', 'archived_content']
};
}
async demonstrateAdvancedAutomation(): Promise<void> {
console.log('🤖 Advanced Automation Demonstration');
console.log('===================================\n');
// Demonstrate self-healing capabilities
console.log('1. Self-Healing System:');
// Simulate node failure detection
await this.pubsubManager.publish('node.failure', {
nodeId: 'node-beta',
reason: 'network_timeout',
lastSeen: Date.now() - 30000
});
// Automatic rebalancing
await this.pubsubManager.publish('cluster.rebalance', {
trigger: 'node_failure',
failedNode: 'node-beta',
redistribution: {
'node-alpha': 0.6,
'node-gamma': 0.4
}
});
console.log('Self-healing sequence initiated and completed');
// Demonstrate adaptive optimization
console.log('\n2. Adaptive Optimization:');
const performance = this.pinningManager.analyzePerformance();
if (performance.hitRate < 0.8) {
console.log('Low hit rate detected, adapting pinning strategy...');
// Auto-adjust pinning factors
this.pinningManager.setPinningRule('Post', {
strategy: 'popularity',
factor: performance.averagePriority + 0.5 // Increase based on current performance
});
console.log('Pinning strategy adapted automatically');
}
// Demonstrate predictive scaling
console.log('\n3. Predictive Scaling:');
const predictions = this.generateLoadPredictions();
console.log(`Predicted load increase: ${predictions.expectedIncrease}%`);
console.log(`Recommended action: ${predictions.recommendation}`);
if (predictions.expectedIncrease > 50) {
console.log('Preemptively scaling resources...');
await this.pubsubManager.publish('cluster.scale', {
type: 'predictive',
factor: 1.5,
reason: 'anticipated_load_increase'
});
}
console.log('Advanced automation demonstration completed\n');
}
private generateLoadPredictions(): any {
// Simulate machine learning-based load prediction
return {
expectedIncrease: Math.random() * 100,
confidence: 0.85,
timeframe: '2 hours',
recommendation: 'scale_up',
factors: ['user_growth', 'content_creation_spike', 'viral_post_detected']
};
}
}
// Usage function
export async function runAutomaticFeaturesExamples(
orbitDBService: any,
ipfsService: any
): Promise<void> {
const framework = new SocialPlatformFramework();
try {
await framework.initialize(orbitDBService, ipfsService, 'development');
// Initialize automatic features (would be done in framework initialization)
const pinningManager = new PinningManager(ipfsService, {
maxTotalPins: 1000,
maxTotalSize: 50 * 1024 * 1024, // 50MB
cleanupIntervalMs: 30000 // 30 seconds for demo
});
const pubsubManager = new PubSubManager(ipfsService, {
enabled: true,
autoPublishModelEvents: true,
autoPublishDatabaseEvents: true,
topicPrefix: 'debros-demo'
});
await pubsubManager.initialize();
// Inject into framework for examples
(framework as any).pinningManager = pinningManager;
(framework as any).pubsubManager = pubsubManager;
// Ensure sample data exists
await createSampleDataForAutomaticFeatures(framework);
// Run all examples
const examples = new AutomaticFeaturesExamples(framework);
await examples.runAllExamples();
await examples.demonstrateAdvancedAutomation();
// Show final statistics
console.log('📊 Final System Statistics:');
console.log('==========================');
const pinningStats = pinningManager.getStats();
const pubsubStats = pubsubManager.getStats();
const frameworkStats = await framework.getFrameworkStats();
console.log('\nPinning System:');
console.log(`- Total pinned: ${pinningStats.totalPinned}`);
console.log(`- Total size: ${(pinningStats.totalSize / 1024).toFixed(2)} KB`);
console.log(`- Active strategies: ${Object.keys(pinningStats.strategies).join(', ')}`);
console.log('\nPubSub System:');
console.log(`- Messages published: ${pubsubStats.totalPublished}`);
console.log(`- Messages received: ${pubsubStats.totalReceived}`);
console.log(`- Active subscriptions: ${pubsubStats.totalSubscriptions}`);
console.log(`- Average latency: ${pubsubStats.averageLatency.toFixed(2)}ms`);
console.log('\nFramework:');
console.log(`- Models registered: ${frameworkStats.registeredModels.length}`);
console.log(`- Cache hit rate: ${(frameworkStats.cache.query.stats.hitRate * 100).toFixed(2)}%`);
// Cleanup
await pinningManager.shutdown();
await pubsubManager.shutdown();
} catch (error) {
console.error('❌ Automatic features examples failed:', error);
} finally {
await framework.stop();
}
}
async function createSampleDataForAutomaticFeatures(framework: SocialPlatformFramework): Promise<void> {
console.log('🗄️ Creating sample data for automatic features...\n');
try {
// Create users with varied activity patterns
const users = [];
for (let i = 0; i < 5; i++) {
const user = await framework.createUser({
username: `autouser${i}`,
email: `autouser${i}@example.com`,
bio: `Automatic features test user ${i}`
});
users.push(user);
}
// Create posts with different popularity levels
const posts = [];
for (let i = 0; i < 15; i++) {
const user = users[i % users.length];
const post = await framework.createPost(user.id, {
title: `Auto Post ${i}: ${['Popular', 'Normal', 'Unpopular'][i % 3]} Content`,
content: `This is test content for automatic features. Post ${i} with length ${100 + i * 50}.`,
tags: ['automation', 'testing', i % 2 === 0 ? 'popular' : 'normal'],
isPublic: true
});
// Simulate different like counts
(post as any).likeCount = i < 5 ? 20 + i * 5 : i < 10 ? 5 + i : i % 3;
await post.save();
posts.push(post);
}
// Create comments to establish relationships
for (let i = 0; i < 25; i++) {
const user = users[i % users.length];
const post = posts[i % posts.length];
await framework.createComment(
user.id,
post.id,
`Auto comment ${i}: This is a test comment for automatic features testing.`
);
}
console.log(`✅ Created ${users.length} users, ${posts.length} posts, and 25 comments\n`);
} catch (error) {
console.warn('⚠️ Some sample data creation failed:', error);
}
}

114
examples/basic-usage.ts Normal file
View File

@ -0,0 +1,114 @@
import { BaseModel, Model, Field, BelongsTo, HasMany } from '../src/framework';
// Example User model
@Model({
scope: 'global',
type: 'docstore',
pinning: { strategy: 'fixed', factor: 2 }
})
export class User extends BaseModel {
@Field({ type: 'string', required: true })
username!: string;
@Field({ type: 'string', required: true })
email!: string;
@Field({ type: 'string', required: false })
bio?: string;
@Field({ type: 'number', default: 0 })
postCount!: number;
@HasMany(Post, 'userId')
posts!: Post[];
}
// Example Post model
@Model({
scope: 'user',
type: 'docstore',
pinning: { strategy: 'popularity', factor: 3 }
})
export class Post extends BaseModel {
@Field({ type: 'string', required: true })
title!: string;
@Field({ type: 'string', required: true })
content!: string;
@Field({ type: 'string', required: true })
userId!: string;
@Field({ type: 'boolean', default: true })
isPublic!: boolean;
@Field({ type: 'array', default: [] })
tags!: string[];
@BelongsTo(User, 'userId')
author!: User;
@HasMany(Comment, 'postId')
comments!: Comment[];
}
// Example Comment model
@Model({
scope: 'user',
type: 'docstore'
})
export class Comment extends BaseModel {
@Field({ type: 'string', required: true })
content!: string;
@Field({ type: 'string', required: true })
userId!: string;
@Field({ type: 'string', required: true })
postId!: string;
@BelongsTo(User, 'userId')
author!: User;
@BelongsTo(Post, 'postId')
post!: Post;
}
// Example usage (this would work once database integration is complete)
async function exampleUsage() {
try {
// Create a new user
const user = new User({
username: 'john_doe',
email: 'john@example.com',
bio: 'A passionate developer'
});
// The decorators ensure validation
await user.save(); // This will validate fields and run hooks
// Create a post
const post = new Post({
title: 'My First Post',
content: 'This is my first post using the DebrosFramework!',
userId: user.id,
tags: ['framework', 'orbitdb', 'ipfs']
});
await post.save();
// Query posts (these methods will work once QueryExecutor is implemented)
// const publicPosts = await Post
// .where('isPublic', '=', true)
// .load(['author'])
// .orderBy('createdAt', 'desc')
// .limit(10)
// .exec();
console.log('Models created successfully!');
} catch (error) {
console.error('Error:', error);
}
}
export { exampleUsage };

View File

@ -0,0 +1,793 @@
/**
* Complete DebrosFramework Example
*
* This example demonstrates the complete DebrosFramework in action,
* showcasing all major features and capabilities in a real-world scenario:
* - Framework initialization with all components
* - Model definition with decorators and relationships
* - Database operations and querying
* - Automatic features (pinning, PubSub, caching)
* - Migration system for schema evolution
* - Performance monitoring and optimization
* - Error handling and recovery
*/
import {
DebrosFramework,
BaseModel,
Model,
Field,
BelongsTo,
HasMany,
BeforeCreate,
AfterCreate,
createMigration,
DEVELOPMENT_CONFIG,
PRODUCTION_CONFIG
} from '../src/framework';
// Define comprehensive models for a decentralized social platform
@Model({
scope: 'global',
type: 'docstore',
pinning: { strategy: 'fixed', factor: 3 },
sharding: { strategy: 'hash', count: 8, key: 'id' }
})
export class User extends BaseModel {
@Field({ type: 'string', required: true, unique: true })
username!: string;
@Field({ type: 'string', required: true, unique: true })
email!: string;
@Field({ type: 'string', required: false })
bio?: string;
@Field({ type: 'string', required: false })
profilePicture?: string;
@Field({ type: 'boolean', default: false })
isVerified!: boolean;
@Field({ type: 'number', default: 0 })
followerCount!: number;
@Field({ type: 'number', default: 0 })
followingCount!: number;
@Field({ type: 'object', default: {} })
settings!: any;
@HasMany(Post, 'userId')
posts!: Post[];
@HasMany(Follow, 'followerId')
following!: Follow[];
@BeforeCreate()
async validateUser() {
if (this.username.length < 3) {
throw new Error('Username must be at least 3 characters long');
}
if (!this.email.includes('@')) {
throw new Error('Invalid email format');
}
}
@AfterCreate()
async setupUserDefaults() {
this.settings = {
theme: 'light',
notifications: true,
privacy: 'public',
createdAt: Date.now()
};
}
// Custom methods
async updateProfile(updates: { bio?: string; profilePicture?: string }): Promise<void> {
Object.assign(this, updates);
await this.save();
}
async getPopularPosts(limit: number = 10): Promise<Post[]> {
return await Post
.whereUser(this.id)
.where('isPublic', '=', true)
.orderBy('likeCount', 'desc')
.limit(limit)
.exec();
}
}
@Model({
scope: 'user',
type: 'docstore',
pinning: { strategy: 'popularity', factor: 1.5 }
})
export class Post extends BaseModel {
@Field({ type: 'string', required: true })
title!: string;
@Field({ type: 'string', required: true })
content!: string;
@Field({ type: 'string', required: true })
userId!: string;
@Field({ type: 'boolean', default: true })
isPublic!: boolean;
@Field({ type: 'array', default: [] })
tags!: string[];
@Field({ type: 'number', default: 0 })
likeCount!: number;
@Field({ type: 'number', default: 0 })
commentCount!: number;
@Field({ type: 'string', default: 'text' })
contentType!: string;
@Field({ type: 'object', default: {} })
metadata!: any;
@BelongsTo(User, 'userId')
author!: User;
@HasMany(Comment, 'postId')
comments!: Comment[];
@BeforeCreate()
async processContent() {
// Auto-detect content type and extract metadata
this.metadata = {
wordCount: this.content.split(' ').length,
hasLinks: /https?:\/\//.test(this.content),
hashtags: this.extractHashtags(),
readTime: Math.ceil(this.content.split(' ').length / 200) // Reading speed
};
if (this.metadata.hasLinks) {
this.contentType = 'rich';
}
}
private extractHashtags(): string[] {
const hashtags = this.content.match(/#\w+/g) || [];
return hashtags.map(tag => tag.slice(1).toLowerCase());
}
async toggleLike(): Promise<void> {
this.likeCount += 1;
await this.save();
}
async addComment(userId: string, content: string): Promise<Comment> {
const comment = await Comment.create({
content,
userId,
postId: this.id
});
this.commentCount += 1;
await this.save();
return comment;
}
}
@Model({
scope: 'user',
type: 'docstore'
})
export class Comment extends BaseModel {
@Field({ type: 'string', required: true })
content!: string;
@Field({ type: 'string', required: true })
userId!: string;
@Field({ type: 'string', required: true })
postId!: string;
@Field({ type: 'string', required: false })
parentId?: string;
@Field({ type: 'number', default: 0 })
likeCount!: number;
@Field({ type: 'number', default: 0 })
threadDepth!: number;
@BelongsTo(User, 'userId')
author!: User;
@BelongsTo(Post, 'postId')
post!: Post;
@BelongsTo(Comment, 'parentId')
parent?: Comment;
@HasMany(Comment, 'parentId')
replies!: Comment[];
}
@Model({
scope: 'global',
type: 'keyvalue'
})
export class Follow extends BaseModel {
@Field({ type: 'string', required: true })
followerId!: string;
@Field({ type: 'string', required: true })
followingId!: string;
@Field({ type: 'boolean', default: false })
isMutual!: boolean;
@Field({ type: 'string', default: 'general' })
category!: string;
@BelongsTo(User, 'followerId')
follower!: User;
@BelongsTo(User, 'followingId')
following!: User;
}
export class CompleteFrameworkExample {
private framework: DebrosFramework;
private sampleUsers: User[] = [];
private samplePosts: Post[] = [];
constructor() {
// Initialize framework with comprehensive configuration
this.framework = new DebrosFramework({
...DEVELOPMENT_CONFIG,
features: {
autoMigration: true,
automaticPinning: true,
pubsub: true,
queryCache: true,
relationshipCache: true
},
performance: {
queryTimeout: 30000,
migrationTimeout: 300000,
maxConcurrentOperations: 200,
batchSize: 100
},
monitoring: {
enableMetrics: true,
logLevel: 'info',
metricsInterval: 30000
}
});
}
async runCompleteExample(): Promise<void> {
console.log('🎯 Running Complete DebrosFramework Example');
console.log('==========================================\n');
try {
await this.initializeFramework();
await this.setupModelsAndMigrations();
await this.demonstrateModelOperations();
await this.demonstrateQuerySystem();
await this.demonstrateRelationships();
await this.demonstrateAutomaticFeatures();
await this.demonstratePerformanceOptimization();
await this.demonstrateErrorHandling();
await this.showFrameworkStatistics();
console.log('✅ Complete framework example finished successfully!\n');
} catch (error) {
console.error('❌ Framework example failed:', error);
throw error;
} finally {
await this.cleanup();
}
}
async initializeFramework(): Promise<void> {
console.log('🚀 Initializing DebrosFramework');
console.log('===============================\n');
// In a real application, you would pass actual OrbitDB and IPFS instances
const mockOrbitDB = this.createMockOrbitDB();
const mockIPFS = this.createMockIPFS();
await this.framework.initialize(mockOrbitDB, mockIPFS);
// Register models
this.framework.registerModel(User);
this.framework.registerModel(Post);
this.framework.registerModel(Comment);
this.framework.registerModel(Follow);
console.log('Framework initialization completed');
console.log(`Status: ${this.framework.getStatus().healthy ? 'Healthy' : 'Unhealthy'}`);
console.log(`Environment: ${this.framework.getStatus().environment}`);
console.log('');
}
async setupModelsAndMigrations(): Promise<void> {
console.log('🔄 Setting Up Models and Migrations');
console.log('===================================\n');
// Create sample migrations
const addProfileEnhancements = createMigration(
'add_profile_enhancements',
'1.1.0',
'Add profile enhancements to User model'
)
.description('Add profile picture and verification status to users')
.addField('User', 'profilePicture', {
type: 'string',
required: false
})
.addField('User', 'isVerified', {
type: 'boolean',
default: false
})
.build();
const addPostMetadata = createMigration(
'add_post_metadata',
'1.2.0',
'Add metadata to Post model'
)
.description('Add content metadata and engagement metrics')
.addField('Post', 'contentType', {
type: 'string',
default: 'text'
})
.addField('Post', 'metadata', {
type: 'object',
default: {}
})
.transformData('Post', (post) => {
return {
...post,
metadata: {
wordCount: post.content ? post.content.split(' ').length : 0,
transformedAt: Date.now()
}
};
})
.build();
// Register migrations
await this.framework.registerMigration(addProfileEnhancements);
await this.framework.registerMigration(addPostMetadata);
// Run pending migrations
const pendingMigrations = this.framework.getPendingMigrations();
console.log(`Found ${pendingMigrations.length} pending migrations`);
if (pendingMigrations.length > 0) {
const migrationManager = this.framework.getMigrationManager();
if (migrationManager) {
const results = await migrationManager.runPendingMigrations({
dryRun: false,
stopOnError: true
});
console.log(`Completed ${results.filter(r => r.success).length} migrations`);
}
}
console.log('');
}
async demonstrateModelOperations(): Promise<void> {
console.log('👥 Demonstrating Model Operations');
console.log('=================================\n');
// Create users with validation and hooks
console.log('Creating users...');
for (let i = 0; i < 5; i++) {
const user = await User.create({
username: `frameuser${i}`,
email: `frameuser${i}@example.com`,
bio: `Framework test user ${i} with comprehensive features`,
isVerified: i < 2 // First two users are verified
});
this.sampleUsers.push(user);
console.log(`✅ Created user: ${user.username} (verified: ${user.isVerified})`);
}
// Create posts with automatic content processing
console.log('\nCreating posts...');
for (let i = 0; i < 10; i++) {
const user = this.sampleUsers[i % this.sampleUsers.length];
const post = await Post.create({
title: `Framework Demo Post ${i + 1}`,
content: `This is a comprehensive demo post ${i + 1} showcasing the DebrosFramework capabilities. #framework #demo #orbitdb ${i % 3 === 0 ? 'https://example.com' : ''}`,
userId: user.id,
isPublic: true,
tags: ['framework', 'demo', 'test']
});
this.samplePosts.push(post);
console.log(`✅ Created post: "${post.title}" by ${user.username}`);
console.log(` Content type: ${post.contentType}, Word count: ${post.metadata.wordCount}`);
}
// Create comments and follows
console.log('\nCreating interactions...');
let commentCount = 0;
let followCount = 0;
for (let i = 0; i < 15; i++) {
const user = this.sampleUsers[Math.floor(Math.random() * this.sampleUsers.length)];
const post = this.samplePosts[Math.floor(Math.random() * this.samplePosts.length)];
await Comment.create({
content: `This is comment ${i + 1} on the framework demo post. Great work!`,
userId: user.id,
postId: post.id
});
commentCount++;
}
// Create follow relationships
for (let i = 0; i < this.sampleUsers.length; i++) {
for (let j = 0; j < this.sampleUsers.length; j++) {
if (i !== j && Math.random() > 0.6) {
await Follow.create({
followerId: this.sampleUsers[i].id,
followingId: this.sampleUsers[j].id,
category: 'general'
});
followCount++;
}
}
}
console.log(`✅ Created ${commentCount} comments and ${followCount} follow relationships`);
console.log('');
}
async demonstrateQuerySystem(): Promise<void> {
console.log('🔍 Demonstrating Advanced Query System');
console.log('======================================\n');
// Complex queries with caching
console.log('1. Complex filtering and sorting:');
const popularPosts = await Post
.where('isPublic', '=', true)
.where('likeCount', '>', 0)
.orderBy('likeCount', 'desc')
.orderBy('createdAt', 'desc')
.limit(5)
.exec();
console.log(`Found ${popularPosts.length} popular posts`);
// User-scoped queries
console.log('\n2. User-scoped queries:');
const userPosts = await Post
.whereUser(this.sampleUsers[0].id)
.where('isPublic', '=', true)
.exec();
console.log(`User ${this.sampleUsers[0].username} has ${userPosts.length} public posts`);
// Aggregation queries
console.log('\n3. Aggregation queries:');
const totalPosts = await Post.count();
const totalPublicPosts = await Post.where('isPublic', '=', true).count();
const averageLikes = await Post.avg('likeCount');
console.log(`Total posts: ${totalPosts}`);
console.log(`Public posts: ${totalPublicPosts}`);
console.log(`Average likes: ${averageLikes.toFixed(2)}`);
// Query with relationships
console.log('\n4. Queries with relationships:');
const postsWithAuthors = await Post
.where('isPublic', '=', true)
.with(['author'])
.limit(3)
.exec();
console.log('Posts with preloaded authors:');
postsWithAuthors.forEach(post => {
const author = post.getRelation('author');
console.log(`- "${post.title}" by ${author ? author.username : 'Unknown'}`);
});
console.log('');
}
async demonstrateRelationships(): Promise<void> {
console.log('🔗 Demonstrating Relationship System');
console.log('====================================\n');
const user = this.sampleUsers[0];
const post = this.samplePosts[0];
// Lazy loading
console.log('1. Lazy loading relationships:');
console.log(`Loading posts for user: ${user.username}`);
const userPosts = await user.loadRelation('posts');
console.log(`Loaded ${Array.isArray(userPosts) ? userPosts.length : 0} posts`);
console.log(`\nLoading comments for post: "${post.title}"`);
const comments = await post.loadRelation('comments');
console.log(`Loaded ${Array.isArray(comments) ? comments.length : 0} comments`);
// Eager loading
console.log('\n2. Eager loading for multiple items:');
const relationshipManager = this.framework.getRelationshipManager();
if (relationshipManager) {
await relationshipManager.eagerLoadRelationships(
this.samplePosts.slice(0, 3),
['author', 'comments']
);
console.log('Eager loaded author and comments for 3 posts:');
this.samplePosts.slice(0, 3).forEach((post, index) => {
const author = post.getRelation('author');
const comments = post.getRelation('comments') || [];
console.log(`${index + 1}. "${post.title}" by ${author ? author.username : 'Unknown'} (${comments.length} comments)`);
});
}
// Relationship constraints
console.log('\n3. Constrained relationship loading:');
const recentComments = await post.loadRelationWithConstraints('comments', (query) =>
query.where('createdAt', '>', Date.now() - 86400000) // Last 24 hours
.orderBy('createdAt', 'desc')
.limit(3)
);
console.log(`Loaded ${Array.isArray(recentComments) ? recentComments.length : 0} recent comments`);
console.log('');
}
async demonstrateAutomaticFeatures(): Promise<void> {
console.log('🤖 Demonstrating Automatic Features');
console.log('===================================\n');
// Pinning system
console.log('1. Automatic pinning system:');
const pinningManager = this.framework.getPinningManager();
if (pinningManager) {
// Setup pinning rules
pinningManager.setPinningRule('Post', {
strategy: 'popularity',
factor: 1.5,
maxPins: 50
});
pinningManager.setPinningRule('User', {
strategy: 'fixed',
factor: 2.0,
maxPins: 20
});
// Simulate content pinning
for (let i = 0; i < 5; i++) {
const post = this.samplePosts[i];
const hash = `content-hash-${post.id}`;
// Simulate access
await pinningManager.recordAccess(hash);
await pinningManager.recordAccess(hash);
const pinned = await pinningManager.pinContent(hash, 'Post', post.id, {
title: post.title,
likeCount: post.likeCount
});
console.log(`Post "${post.title}": ${pinned ? 'PINNED' : 'NOT PINNED'}`);
}
const pinningStats = pinningManager.getStats();
console.log(`Pinning stats: ${pinningStats.totalPinned} items pinned`);
}
// PubSub system
console.log('\n2. PubSub event system:');
const pubsubManager = this.framework.getPubSubManager();
if (pubsubManager) {
let eventCount = 0;
// Subscribe to model events
await pubsubManager.subscribe('model.*', (event) => {
eventCount++;
console.log(`📡 Event: ${event.type} for ${event.data?.modelName || 'unknown'}`);
});
// Simulate model events
await pubsubManager.publish('model.created', {
modelName: 'Post',
modelId: 'demo-post-1',
userId: 'demo-user-1'
});
await pubsubManager.publish('model.updated', {
modelName: 'User',
modelId: 'demo-user-1',
changes: { bio: 'Updated bio' }
});
// Wait for event processing
await new Promise(resolve => setTimeout(resolve, 1000));
console.log(`Processed ${eventCount} events`);
const pubsubStats = pubsubManager.getStats();
console.log(`PubSub stats: ${pubsubStats.totalPublished} published, ${pubsubStats.totalReceived} received`);
}
console.log('');
}
async demonstratePerformanceOptimization(): Promise<void> {
console.log('🚀 Demonstrating Performance Features');
console.log('=====================================\n');
// Cache warming
console.log('1. Cache warming and optimization:');
await this.framework.warmupCaches();
// Query performance comparison
console.log('\n2. Query performance comparison:');
const startTime = Date.now();
// First query (cold cache)
await Post.where('isPublic', '=', true).limit(5).exec();
const coldTime = Date.now() - startTime;
const warmStartTime = Date.now();
// Second query (warm cache)
await Post.where('isPublic', '=', true).limit(5).exec();
const warmTime = Date.now() - warmStartTime;
console.log(`Cold cache query: ${coldTime}ms`);
console.log(`Warm cache query: ${warmTime}ms`);
console.log(`Performance improvement: ${coldTime > 0 ? (coldTime / Math.max(warmTime, 1)).toFixed(2) : 'N/A'}x`);
// Relationship loading optimization
console.log('\n3. Relationship loading optimization:');
const relationshipManager = this.framework.getRelationshipManager();
if (relationshipManager) {
const stats = relationshipManager.getRelationshipCacheStats();
console.log(`Relationship cache: ${stats.cache.totalEntries} entries`);
console.log(`Cache hit rate: ${(stats.cache.hitRate * 100).toFixed(2)}%`);
}
console.log('');
}
async demonstrateErrorHandling(): Promise<void> {
console.log('⚠️ Demonstrating Error Handling');
console.log('=================================\n');
// Validation errors
console.log('1. Model validation errors:');
try {
await User.create({
username: 'x', // Too short
email: 'invalid-email' // Invalid format
});
} catch (error: any) {
console.log(`✅ Caught validation error: ${error.message}`);
}
// Query errors
console.log('\n2. Query timeout handling:');
try {
// Simulate slow query
const result = await Post.where('nonExistentField', '=', 'value').exec();
console.log(`Query result: ${result.length} items`);
} catch (error: any) {
console.log(`✅ Handled query error gracefully: ${error.message}`);
}
// Migration rollback
console.log('\n3. Migration error recovery:');
const migrationManager = this.framework.getMigrationManager();
if (migrationManager) {
try {
const riskyMigration = createMigration(
'risky_migration',
'99.0.0',
'Intentionally failing migration'
)
.customOperation('Post', async () => {
throw new Error('Simulated migration failure');
})
.build();
await migrationManager.registerMigration(riskyMigration);
await migrationManager.runMigration(riskyMigration.id);
} catch (error: any) {
console.log(`✅ Migration failed as expected and rolled back: ${error.message}`);
}
}
console.log('');
}
async showFrameworkStatistics(): Promise<void> {
console.log('📊 Framework Statistics');
console.log('=======================\n');
const status = this.framework.getStatus();
const metrics = this.framework.getMetrics();
console.log('Status:');
console.log(`- Initialized: ${status.initialized}`);
console.log(`- Healthy: ${status.healthy}`);
console.log(`- Version: ${status.version}`);
console.log(`- Environment: ${status.environment}`);
console.log(`- Services: ${Object.entries(status.services).map(([name, status]) => `${name}:${status}`).join(', ')}`);
console.log('\nMetrics:');
console.log(`- Uptime: ${(metrics.uptime / 1000).toFixed(2)} seconds`);
console.log(`- Total models: ${metrics.totalModels}`);
console.log(`- Queries executed: ${metrics.queriesExecuted}`);
console.log(`- Migrations run: ${metrics.migrationsRun}`);
console.log(`- Cache hit rate: ${(metrics.cacheHitRate * 100).toFixed(2)}%`);
console.log(`- Average query time: ${metrics.averageQueryTime.toFixed(2)}ms`);
console.log('\nMemory Usage:');
console.log(`- Query cache: ${(metrics.memoryUsage.queryCache / 1024).toFixed(2)} KB`);
console.log(`- Relationship cache: ${(metrics.memoryUsage.relationshipCache / 1024).toFixed(2)} KB`);
console.log(`- Total: ${(metrics.memoryUsage.total / 1024).toFixed(2)} KB`);
console.log('');
}
async cleanup(): Promise<void> {
console.log('🧹 Cleaning up framework...');
await this.framework.stop();
console.log('✅ Framework stopped and cleaned up');
}
// Mock service creation (in real usage, these would be actual services)
private createMockOrbitDB(): any {
return {
create: async () => ({ add: async () => {}, get: async () => [], all: async () => [] }),
open: async () => ({ add: async () => {}, get: async () => [], all: async () => [] }),
disconnect: async () => {},
stores: {}
};
}
private createMockIPFS(): any {
return {
add: async () => ({ cid: 'mock-cid' }),
cat: async () => Buffer.from('mock data'),
pin: { add: async () => {}, rm: async () => {} },
pubsub: {
subscribe: async () => {},
unsubscribe: async () => {},
publish: async () => {}
},
object: { stat: async () => ({ CumulativeSize: 1024 }) }
};
}
}
// Usage function
export async function runCompleteFrameworkExample(): Promise<void> {
const example = new CompleteFrameworkExample();
await example.runCompleteExample();
}
// Run if called directly
if (require.main === module) {
runCompleteFrameworkExample().catch(console.error);
}

View File

@ -0,0 +1,524 @@
/**
* Example: Integrating DebrosFramework with existing OrbitDB/IPFS services
*
* This example shows how to:
* 1. Initialize the framework with your existing services
* 2. Create models with different scopes and configurations
* 3. Use the framework for CRUD operations
* 4. Handle user-scoped vs global data
*/
import {
BaseModel,
Model,
Field,
BelongsTo,
HasMany,
ModelRegistry,
DatabaseManager,
ShardManager,
FrameworkOrbitDBService,
FrameworkIPFSService,
ConfigManager,
QueryCache,
RelationshipManager
} from '../src/framework';
// Example models for a social platform
@Model({
scope: 'global',
type: 'docstore',
pinning: { strategy: 'fixed', factor: 3 }
})
export class User extends BaseModel {
@Field({ type: 'string', required: true, unique: true })
username!: string;
@Field({ type: 'string', required: true })
email!: string;
@Field({ type: 'string', required: false })
bio?: string;
@Field({ type: 'number', default: 0 })
followerCount!: number;
@HasMany(Post, 'userId')
posts!: Post[];
@HasMany(Follow, 'followerId')
following!: Follow[];
}
@Model({
scope: 'user',
type: 'docstore',
pinning: { strategy: 'popularity', factor: 2 },
sharding: { strategy: 'hash', count: 4, key: 'id' }
})
export class Post extends BaseModel {
@Field({ type: 'string', required: true })
title!: string;
@Field({ type: 'string', required: true })
content!: string;
@Field({ type: 'string', required: true })
userId!: string;
@Field({ type: 'boolean', default: true })
isPublic!: boolean;
@Field({ type: 'array', default: [] })
tags!: string[];
@Field({ type: 'number', default: 0 })
likeCount!: number;
@BelongsTo(User, 'userId')
author!: User;
@HasMany(Comment, 'postId')
comments!: Comment[];
}
@Model({
scope: 'user',
type: 'docstore'
})
export class Comment extends BaseModel {
@Field({ type: 'string', required: true })
content!: string;
@Field({ type: 'string', required: true })
userId!: string;
@Field({ type: 'string', required: true })
postId!: string;
@BelongsTo(User, 'userId')
author!: User;
@BelongsTo(Post, 'postId')
post!: Post;
}
@Model({
scope: 'global',
type: 'keyvalue'
})
export class Follow extends BaseModel {
@Field({ type: 'string', required: true })
followerId!: string;
@Field({ type: 'string', required: true })
followingId!: string;
@BelongsTo(User, 'followerId')
follower!: User;
@BelongsTo(User, 'followingId')
following!: User;
}
// Framework Integration Class
export class SocialPlatformFramework {
private databaseManager!: DatabaseManager;
private shardManager!: ShardManager;
private configManager!: ConfigManager;
private queryCache!: QueryCache;
private relationshipManager!: RelationshipManager;
private initialized: boolean = false;
async initialize(
existingOrbitDBService: any,
existingIPFSService: any,
environment: 'development' | 'production' | 'test' = 'development'
): Promise<void> {
console.log('🚀 Initializing Social Platform Framework...');
// Create configuration based on environment
let config;
switch (environment) {
case 'production':
config = ConfigManager.productionConfig();
break;
case 'test':
config = ConfigManager.testConfig();
break;
default:
config = ConfigManager.developmentConfig();
}
this.configManager = new ConfigManager(config);
// Wrap existing services
const frameworkOrbitDB = new FrameworkOrbitDBService(existingOrbitDBService);
const frameworkIPFS = new FrameworkIPFSService(existingIPFSService);
// Initialize services
await frameworkOrbitDB.init();
await frameworkIPFS.init();
// Create framework components
this.databaseManager = new DatabaseManager(frameworkOrbitDB);
this.shardManager = new ShardManager();
this.shardManager.setOrbitDBService(frameworkOrbitDB);
// Initialize databases for all registered models
await this.databaseManager.initializeAllDatabases();
// Create shards for global models that need them
const globalModels = ModelRegistry.getGlobalModels();
for (const model of globalModels) {
if (model.sharding) {
await this.shardManager.createShards(
model.modelName,
model.sharding,
model.dbType
);
}
}
// Create global indexes for user-scoped models
const userModels = ModelRegistry.getUserScopedModels();
for (const model of userModels) {
const indexName = `${model.modelName}GlobalIndex`;
await this.shardManager.createGlobalIndex(model.modelName, indexName);
}
// Initialize query cache
const cacheConfig = this.configManager.cacheConfig;
this.queryCache = new QueryCache(
cacheConfig?.maxSize || 1000,
cacheConfig?.ttl || 300000
);
// Initialize relationship manager
this.relationshipManager = new RelationshipManager({
databaseManager: this.databaseManager,
shardManager: this.shardManager,
queryCache: this.queryCache
});
// Store framework instance globally for BaseModel access
(globalThis as any).__debrosFramework = {
databaseManager: this.databaseManager,
shardManager: this.shardManager,
configManager: this.configManager,
queryCache: this.queryCache,
relationshipManager: this.relationshipManager
};
this.initialized = true;
console.log('✅ Social Platform Framework initialized successfully!');
}
async createUser(userData: { username: string; email: string; bio?: string }): Promise<User> {
if (!this.initialized) {
throw new Error('Framework not initialized');
}
// Create user in global database
const user = new User(userData);
await user.save();
// Create user-specific databases
await this.databaseManager.createUserDatabases(user.id);
console.log(`👤 Created user: ${user.username} (${user.id})`);
return user;
}
async createPost(
userId: string,
postData: { title: string; content: string; tags?: string[]; isPublic?: boolean }
): Promise<Post> {
if (!this.initialized) {
throw new Error('Framework not initialized');
}
const post = new Post({
...postData,
userId
});
await post.save();
// Add to global index for cross-user queries
const globalIndexName = 'PostGlobalIndex';
await this.shardManager.addToGlobalIndex(globalIndexName, post.id, {
id: post.id,
userId: post.userId,
title: post.title,
isPublic: post.isPublic,
createdAt: post.createdAt,
tags: post.tags
});
console.log(`📝 Created post: ${post.title} by user ${userId}`);
return post;
}
async createComment(
userId: string,
postId: string,
content: string
): Promise<Comment> {
if (!this.initialized) {
throw new Error('Framework not initialized');
}
const comment = new Comment({
content,
userId,
postId
});
await comment.save();
console.log(`💬 Created comment on post ${postId} by user ${userId}`);
return comment;
}
async followUser(followerId: string, followingId: string): Promise<Follow> {
if (!this.initialized) {
throw new Error('Framework not initialized');
}
const follow = new Follow({
followerId,
followingId
});
await follow.save();
console.log(`👥 User ${followerId} followed user ${followingId}`);
return follow;
}
// Fully functional query methods
async getPublicPosts(limit: number = 10): Promise<Post[]> {
console.log(`🔍 Querying for ${limit} public posts...`);
return await Post
.where('isPublic', '=', true)
.orderBy('createdAt', 'desc')
.limit(limit)
.exec();
}
async getUserPosts(userId: string, limit: number = 20): Promise<Post[]> {
console.log(`🔍 Getting posts for user ${userId}...`);
return await Post
.whereUser(userId)
.orderBy('createdAt', 'desc')
.limit(limit)
.exec();
}
async searchPosts(searchTerm: string, limit: number = 50): Promise<Post[]> {
console.log(`🔍 Searching posts for: ${searchTerm}`);
return await Post
.where('isPublic', '=', true)
.orWhere(query => {
query.whereLike('title', searchTerm)
.whereLike('content', searchTerm);
})
.orderBy('createdAt', 'desc')
.limit(limit)
.exec();
}
async getPostsWithComments(userId: string, limit: number = 10): Promise<Post[]> {
console.log(`🔍 Getting posts with comments for user ${userId}...`);
const posts = await Post
.whereUser(userId)
.orderBy('createdAt', 'desc')
.limit(limit)
.exec();
// Load relationships for all posts
await this.relationshipManager.eagerLoadRelationships(posts, ['comments', 'author']);
return posts;
}
async getPostsWithFilteredComments(userId: string, minCommentLength: number = 10): Promise<Post[]> {
console.log(`🔍 Getting posts with filtered comments for user ${userId}...`);
const posts = await Post
.whereUser(userId)
.orderBy('createdAt', 'desc')
.limit(10)
.exec();
// Load comments with constraints
for (const post of posts) {
await post.loadRelationWithConstraints('comments', (query) =>
query.where('content', '>', minCommentLength)
.orderBy('createdAt', 'desc')
.limit(5)
);
// Also load the author
await post.loadRelation('author');
}
return posts;
}
async getUserStats(userId: string): Promise<any> {
console.log(`📊 Getting stats for user ${userId}...`);
const [postCount, totalLikes] = await Promise.all([
Post.whereUser(userId).count(),
Post.whereUser(userId).sum('likeCount')
]);
return {
userId,
postCount,
totalLikes,
averageLikes: postCount > 0 ? totalLikes / postCount : 0
};
}
async getFrameworkStats(): Promise<any> {
if (!this.initialized) {
throw new Error('Framework not initialized');
}
const stats = {
initialized: this.initialized,
registeredModels: ModelRegistry.getModelNames(),
globalModels: ModelRegistry.getGlobalModels().map(m => m.name),
userScopedModels: ModelRegistry.getUserScopedModels().map(m => m.name),
shardsInfo: this.shardManager.getAllModelsWithShards().map(modelName =>
this.shardManager.getShardStatistics(modelName)
),
config: this.configManager.getConfig(),
cache: {
query: {
stats: this.queryCache.getStats(),
usage: this.queryCache.analyzeUsage(),
popular: this.queryCache.getPopularEntries(5)
},
relationships: this.relationshipManager.getRelationshipCacheStats()
}
};
return stats;
}
async explainQuery(query: any): Promise<any> {
console.log(`📊 Analyzing query...`);
return query.explain();
}
async warmupCache(): Promise<void> {
console.log(`🔥 Warming up caches...`);
// Warm up query cache
const commonQueries = [
Post.where('isPublic', '=', true).orderBy('createdAt', 'desc').limit(10),
User.orderBy('followerCount', 'desc').limit(20),
Follow.limit(100)
];
await this.queryCache.warmup(commonQueries);
// Warm up relationship cache
const users = await User.limit(5).exec();
const posts = await Post.where('isPublic', '=', true).limit(10).exec();
if (users.length > 0) {
await this.relationshipManager.warmupRelationshipCache(users, ['posts', 'following']);
}
if (posts.length > 0) {
await this.relationshipManager.warmupRelationshipCache(posts, ['author', 'comments']);
}
}
async stop(): Promise<void> {
if (!this.initialized) {
return;
}
console.log('🛑 Stopping Social Platform Framework...');
await this.databaseManager.stop();
await this.shardManager.stop();
this.queryCache.clear();
this.relationshipManager.clearRelationshipCache();
// Clear global reference
delete (globalThis as any).__debrosFramework;
this.initialized = false;
console.log('✅ Framework stopped successfully');
}
}
// Example usage function
export async function exampleUsage(orbitDBService: any, ipfsService: any) {
const framework = new SocialPlatformFramework();
try {
// Initialize framework with existing services
await framework.initialize(orbitDBService, ipfsService, 'development');
// Create some users
const alice = await framework.createUser({
username: 'alice',
email: 'alice@example.com',
bio: 'Love decentralized tech!'
});
const bob = await framework.createUser({
username: 'bob',
email: 'bob@example.com',
bio: 'Building the future'
});
// Create posts
const post1 = await framework.createPost(alice.id, {
title: 'Welcome to the Decentralized Web',
content: 'This is my first post using the DebrosFramework!',
tags: ['web3', 'decentralized', 'orbitdb'],
isPublic: true
});
const post2 = await framework.createPost(bob.id, {
title: 'Framework Architecture',
content: 'The new framework handles database partitioning automatically.',
tags: ['framework', 'architecture'],
isPublic: true
});
// Create comments
await framework.createComment(bob.id, post1.id, 'Great post Alice!');
await framework.createComment(alice.id, post2.id, 'Thanks for building this!');
// Follow users
await framework.followUser(alice.id, bob.id);
// Get framework statistics
const stats = await framework.getFrameworkStats();
console.log('📊 Framework Statistics:', JSON.stringify(stats, null, 2));
console.log('✅ Example usage completed successfully!');
return { framework, users: { alice, bob }, posts: { post1, post2 } };
} catch (error) {
console.error('❌ Example usage failed:', error);
await framework.stop();
throw error;
}
}
export { SocialPlatformFramework };

View File

@ -0,0 +1,932 @@
/**
* Comprehensive Migration Examples for DebrosFramework
*
* This file demonstrates the migration system capabilities:
* - Schema evolution with field additions and modifications
* - Data transformation and migration
* - Rollback scenarios and recovery
* - Cross-model relationship changes
* - Performance optimization migrations
* - Version management and dependency handling
*/
import { MigrationManager, Migration } from '../src/framework/migrations/MigrationManager';
import { MigrationBuilder, createMigration } from '../src/framework/migrations/MigrationBuilder';
import { SocialPlatformFramework } from './framework-integration';
export class MigrationExamples {
private migrationManager: MigrationManager;
private framework: SocialPlatformFramework;
constructor(framework: SocialPlatformFramework) {
this.framework = framework;
this.migrationManager = new MigrationManager(
(framework as any).databaseManager,
(framework as any).shardManager
);
}
async runAllExamples(): Promise<void> {
console.log('🔄 Running comprehensive migration examples...\n');
await this.createExampleMigrations();
await this.basicMigrationExamples();
await this.complexDataTransformationExamples();
await this.rollbackAndRecoveryExamples();
await this.performanceOptimizationExamples();
await this.crossModelMigrationExamples();
await this.versionManagementExamples();
console.log('✅ All migration examples completed!\n');
}
async createExampleMigrations(): Promise<void> {
console.log('📝 Creating Example Migrations');
console.log('==============================\n');
// Migration 1: Add timestamps to User model
const addTimestampsMigration = createMigration(
'add_user_timestamps',
'1.0.1',
'Add timestamps to User model'
)
.description('Add createdAt and updatedAt timestamps to User model for better tracking')
.author('Framework Team')
.tags('schema', 'timestamps', 'user')
.addTimestamps('User')
.addValidator(
'validate_timestamp_format',
'Ensure timestamp fields are valid numbers',
async (context) => {
const errors: string[] = [];
const warnings: string[] = [];
// Validate that all timestamps are valid
return { valid: errors.length === 0, errors, warnings };
}
)
.build();
// Migration 2: Add user profile enhancements
const userProfileEnhancement = createMigration(
'enhance_user_profile',
'1.1.0',
'Enhance User profile with additional fields'
)
.description('Add profile picture, location, and social links to User model')
.dependencies('add_user_timestamps')
.addField('User', 'profilePicture', {
type: 'string',
required: false,
validate: (value) => !value || value.startsWith('http')
})
.addField('User', 'location', {
type: 'string',
required: false
})
.addField('User', 'socialLinks', {
type: 'array',
required: false,
default: []
})
.addField('User', 'isVerified', {
type: 'boolean',
required: false,
default: false
})
.build();
// Migration 3: Restructure Post content
const postContentRestructure = createMigration(
'restructure_post_content',
'1.2.0',
'Restructure Post content with rich metadata'
)
.description('Transform Post content from plain text to rich content structure')
.addField('Post', 'contentType', {
type: 'string',
required: false,
default: 'text'
})
.addField('Post', 'metadata', {
type: 'object',
required: false,
default: {}
})
.transformData('Post', (post) => {
// Transform existing content to new structure
const wordCount = post.content ? post.content.split(' ').length : 0;
const hasLinks = post.content ? /https?:\/\//.test(post.content) : false;
return {
...post,
contentType: hasLinks ? 'rich' : 'text',
metadata: {
wordCount,
hasLinks,
transformedAt: Date.now()
}
};
})
.build();
// Migration 4: Add Comment threading
const commentThreading = createMigration(
'add_comment_threading',
'1.3.0',
'Add threading support to Comments'
)
.description('Add parent-child relationships to comments for threading')
.addField('Comment', 'parentId', {
type: 'string',
required: false,
default: null
})
.addField('Comment', 'threadDepth', {
type: 'number',
required: false,
default: 0
})
.addField('Comment', 'childCount', {
type: 'number',
required: false,
default: 0
})
.transformData('Comment', (comment) => {
// All existing comments become root-level comments
return {
...comment,
parentId: null,
threadDepth: 0,
childCount: 0
};
})
.build();
// Migration 5: Performance optimization
const performanceOptimization = createMigration(
'optimize_post_indexing',
'1.4.0',
'Optimize Post model for better query performance'
)
.description('Add computed fields and indexes for better query performance')
.addField('Post', 'searchText', {
type: 'string',
required: false,
default: ''
})
.addField('Post', 'popularityScore', {
type: 'number',
required: false,
default: 0
})
.transformData('Post', (post) => {
// Create searchable text and calculate popularity
const searchText = `${post.title || ''} ${post.content || ''}`.toLowerCase();
const popularityScore = (post.likeCount || 0) * 2 + (post.commentCount || 0);
return {
...post,
searchText,
popularityScore
};
})
.createIndex('Post', ['searchText'])
.createIndex('Post', ['popularityScore'], { name: 'popularity_index' })
.build();
// Register all migrations
const migrations = [
addTimestampsMigration,
userProfileEnhancement,
postContentRestructure,
commentThreading,
performanceOptimization
];
for (const migration of migrations) {
this.migrationManager.registerMigration(migration);
console.log(`✅ Registered migration: ${migration.name} (v${migration.version})`);
}
console.log(`\nRegistered ${migrations.length} example migrations\n`);
}
async basicMigrationExamples(): Promise<void> {
console.log('🔄 Basic Migration Examples');
console.log('===========================\n');
// Get pending migrations
const pendingMigrations = this.migrationManager.getPendingMigrations();
console.log(`Found ${pendingMigrations.length} pending migrations:`);
pendingMigrations.forEach(migration => {
console.log(`- ${migration.name} (v${migration.version})`);
});
// Run a single migration with dry run first
if (pendingMigrations.length > 0) {
const firstMigration = pendingMigrations[0];
console.log(`\nRunning dry run for: ${firstMigration.name}`);
const dryRunResult = await this.migrationManager.runMigration(firstMigration.id, {
dryRun: true
});
console.log('Dry run results:');
console.log(`- Success: ${dryRunResult.success}`);
console.log(`- Estimated records: ${dryRunResult.recordsProcessed}`);
console.log(`- Duration: ${dryRunResult.duration}ms`);
console.log(`- Warnings: ${dryRunResult.warnings.length}`);
// Run the actual migration
console.log(`\nRunning actual migration: ${firstMigration.name}`);
try {
const result = await this.migrationManager.runMigration(firstMigration.id, {
batchSize: 50
});
console.log('Migration results:');
console.log(`- Success: ${result.success}`);
console.log(`- Records processed: ${result.recordsProcessed}`);
console.log(`- Records modified: ${result.recordsModified}`);
console.log(`- Duration: ${result.duration}ms`);
console.log(`- Rollback available: ${result.rollbackAvailable}`);
if (result.warnings.length > 0) {
console.log('- Warnings:', result.warnings);
}
} catch (error) {
console.error(`Migration failed: ${error}`);
}
}
console.log('');
}
async complexDataTransformationExamples(): Promise<void> {
console.log('🔄 Complex Data Transformation Examples');
console.log('=======================================\n');
// Create a complex migration that transforms user data
const userDataNormalization = createMigration(
'normalize_user_data',
'2.0.0',
'Normalize and clean user data'
)
.description('Clean up user data, normalize email formats, and merge duplicate accounts')
.transformData('User', (user) => {
// Normalize email to lowercase
if (user.email) {
user.email = user.email.toLowerCase().trim();
}
// Clean up username
if (user.username) {
user.username = user.username.trim().replace(/[^a-zA-Z0-9_]/g, '');
}
// Add normalized search fields
user.searchName = (user.username || '').toLowerCase();
user.displayName = user.username || user.email?.split('@')[0] || 'Anonymous';
return user;
})
.addValidator(
'validate_email_uniqueness',
'Ensure email addresses are unique after normalization',
async (context) => {
// Simulation of validation logic
return {
valid: true,
errors: [],
warnings: ['Some duplicate emails may have been found']
};
}
)
.build();
this.migrationManager.registerMigration(userDataNormalization);
// Create a migration that handles relationship data
const postRelationshipMigration = createMigration(
'update_post_relationships',
'2.1.0',
'Update Post relationship structure'
)
.description('Restructure how posts relate to users and add engagement metrics')
.addField('Post', 'engagementScore', {
type: 'number',
required: false,
default: 0
})
.addField('Post', 'lastActivityAt', {
type: 'number',
required: false,
default: Date.now()
})
.customOperation('Post', async (context) => {
context.logger.info('Calculating engagement scores for all posts');
// Simulate complex calculation across related models
const posts = await context.databaseManager.getAllRecords('Post');
for (const post of posts) {
// Get related comments and likes
const comments = await context.databaseManager.getRelatedRecords('Comment', 'postId', post.id);
const likes = post.likeCount || 0;
// Calculate engagement score
const engagementScore = (comments.length * 2) + likes;
const lastActivityAt = comments.length > 0
? Math.max(...comments.map((c: any) => c.createdAt || 0))
: post.createdAt || Date.now();
post.engagementScore = engagementScore;
post.lastActivityAt = lastActivityAt;
await context.databaseManager.updateRecord('Post', post);
}
})
.build();
this.migrationManager.registerMigration(postRelationshipMigration);
console.log('Created complex data transformation migrations');
console.log('- User data normalization');
console.log('- Post relationship updates with engagement scoring');
console.log('');
}
async rollbackAndRecoveryExamples(): Promise<void> {
console.log('↩️ Rollback and Recovery Examples');
console.log('==================================\n');
// Create a migration that might fail
const riskyMigration = createMigration(
'risky_data_migration',
'2.2.0',
'Risky data migration (demonstration)'
)
.description('A migration that demonstrates rollback capabilities')
.addField('User', 'tempField', {
type: 'string',
required: false,
default: 'temp'
})
.customOperation('User', async (context) => {
context.logger.info('Performing risky operation that might fail');
// Simulate a 50% chance of failure for demonstration
if (Math.random() > 0.5) {
throw new Error('Simulated operation failure for rollback demonstration');
}
context.logger.info('Risky operation completed successfully');
})
.build();
this.migrationManager.registerMigration(riskyMigration);
try {
console.log('Running risky migration (may fail)...');
const result = await this.migrationManager.runMigration(riskyMigration.id);
console.log(`Migration result: ${result.success ? 'SUCCESS' : 'FAILED'}`);
if (result.success) {
console.log('Migration succeeded, demonstrating rollback...');
// Demonstrate manual rollback
const rollbackResult = await this.migrationManager.rollbackMigration(riskyMigration.id);
console.log(`Rollback result: ${rollbackResult.success ? 'SUCCESS' : 'FAILED'}`);
console.log(`Rollback duration: ${rollbackResult.duration}ms`);
}
} catch (error) {
console.log(`Migration failed as expected: ${error}`);
// Check migration history
const history = this.migrationManager.getMigrationHistory(riskyMigration.id);
console.log(`Migration attempts: ${history.length}`);
if (history.length > 0) {
const lastAttempt = history[history.length - 1];
console.log(`Last attempt result: ${lastAttempt.success ? 'SUCCESS' : 'FAILED'}`);
console.log(`Rollback available: ${lastAttempt.rollbackAvailable}`);
}
}
// Demonstrate recovery scenarios
console.log('\nDemonstrating recovery scenarios...');
const recoveryMigration = createMigration(
'recovery_migration',
'2.3.0',
'Recovery migration with validation'
)
.description('Migration with comprehensive pre and post validation')
.addValidator(
'pre_migration_check',
'Validate system state before migration',
async (context) => {
context.logger.info('Running pre-migration validation');
return {
valid: true,
errors: [],
warnings: ['System is ready for migration']
};
}
)
.addField('Post', 'recoveryField', {
type: 'string',
required: false,
default: 'recovered'
})
.addValidator(
'post_migration_check',
'Validate migration results',
async (context) => {
context.logger.info('Running post-migration validation');
return {
valid: true,
errors: [],
warnings: ['Migration completed successfully']
};
}
)
.build();
this.migrationManager.registerMigration(recoveryMigration);
console.log('Created recovery migration with validation');
console.log('');
}
async performanceOptimizationExamples(): Promise<void> {
console.log('🚀 Performance Optimization Migration Examples');
console.log('===============================================\n');
// Create migrations that optimize different aspects
const indexOptimization = createMigration(
'optimize_search_indexes',
'3.0.0',
'Optimize search and query performance'
)
.description('Add indexes and computed fields for better query performance')
.createIndex('User', ['email'], { unique: true, name: 'user_email_unique' })
.createIndex('User', ['username'], { unique: true, name: 'user_username_unique' })
.createIndex('Post', ['userId', 'createdAt'], { name: 'user_posts_timeline' })
.createIndex('Post', ['isPublic', 'popularityScore'], { name: 'public_popular_posts' })
.createIndex('Comment', ['postId', 'createdAt'], { name: 'post_comments_timeline' })
.build();
const dataArchiving = createMigration(
'archive_old_data',
'3.1.0',
'Archive old inactive data'
)
.description('Move old inactive data to archive tables for better performance')
.addField('Post', 'isArchived', {
type: 'boolean',
required: false,
default: false
})
.addField('Comment', 'isArchived', {
type: 'boolean',
required: false,
default: false
})
.customOperation('Post', async (context) => {
context.logger.info('Archiving old posts');
const cutoffDate = Date.now() - (365 * 24 * 60 * 60 * 1000); // 1 year ago
const posts = await context.databaseManager.getAllRecords('Post');
let archivedCount = 0;
for (const post of posts) {
if ((post.lastActivityAt || post.createdAt || 0) < cutoffDate &&
(post.engagementScore || 0) < 5) {
post.isArchived = true;
await context.databaseManager.updateRecord('Post', post);
archivedCount++;
}
}
context.logger.info(`Archived ${archivedCount} old posts`);
})
.build();
const cacheOptimization = createMigration(
'optimize_cache_fields',
'3.2.0',
'Add cache-friendly computed fields'
)
.description('Add denormalized fields to reduce query complexity')
.addField('User', 'postCount', {
type: 'number',
required: false,
default: 0
})
.addField('User', 'totalEngagement', {
type: 'number',
required: false,
default: 0
})
.addField('Post', 'commentCount', {
type: 'number',
required: false,
default: 0
})
.customOperation('User', async (context) => {
context.logger.info('Computing user statistics');
const users = await context.databaseManager.getAllRecords('User');
for (const user of users) {
const posts = await context.databaseManager.getRelatedRecords('Post', 'userId', user.id);
const totalEngagement = posts.reduce((sum: number, post: any) =>
sum + (post.engagementScore || 0), 0);
user.postCount = posts.length;
user.totalEngagement = totalEngagement;
await context.databaseManager.updateRecord('User', user);
}
})
.build();
// Register performance migrations
[indexOptimization, dataArchiving, cacheOptimization].forEach(migration => {
this.migrationManager.registerMigration(migration);
console.log(`✅ Registered: ${migration.name}`);
});
console.log('\nPerformance optimization migrations created:');
console.log('- Search index optimization');
console.log('- Data archiving for old content');
console.log('- Cache-friendly denormalized fields');
console.log('');
}
async crossModelMigrationExamples(): Promise<void> {
console.log('🔗 Cross-Model Migration Examples');
console.log('=================================\n');
// Migration that affects multiple models and their relationships
const relationshipRestructure = createMigration(
'restructure_follow_system',
'4.0.0',
'Restructure follow system with categories'
)
.description('Add follow categories and mutual follow detection')
.addField('Follow', 'category', {
type: 'string',
required: false,
default: 'general'
})
.addField('Follow', 'isMutual', {
type: 'boolean',
required: false,
default: false
})
.addField('Follow', 'strength', {
type: 'number',
required: false,
default: 1
})
.customOperation('Follow', async (context) => {
context.logger.info('Analyzing follow relationships');
const follows = await context.databaseManager.getAllRecords('Follow');
const mutualMap = new Map<string, Set<string>>();
// Build mutual follow map
follows.forEach((follow: any) => {
if (!mutualMap.has(follow.followerId)) {
mutualMap.set(follow.followerId, new Set());
}
mutualMap.get(follow.followerId)!.add(follow.followingId);
});
// Update mutual status
for (const follow of follows) {
const reverseExists = mutualMap.get(follow.followingId)?.has(follow.followerId);
follow.isMutual = Boolean(reverseExists);
// Calculate relationship strength based on mutual status and activity
follow.strength = follow.isMutual ? 2 : 1;
await context.databaseManager.updateRecord('Follow', follow);
}
})
.build();
const contentCategorization = createMigration(
'add_content_categories',
'4.1.0',
'Add content categorization system'
)
.description('Add categories and tags to posts and improve content discovery')
.addField('Post', 'category', {
type: 'string',
required: false,
default: 'general'
})
.addField('Post', 'subcategory', {
type: 'string',
required: false
})
.addField('Post', 'autoTags', {
type: 'array',
required: false,
default: []
})
.transformData('Post', (post) => {
// Auto-categorize posts based on content
const content = (post.content || '').toLowerCase();
let category = 'general';
let autoTags: string[] = [];
if (content.includes('tech') || content.includes('programming')) {
category = 'technology';
autoTags.push('tech');
} else if (content.includes('art') || content.includes('design')) {
category = 'creative';
autoTags.push('art');
} else if (content.includes('news') || content.includes('update')) {
category = 'news';
autoTags.push('news');
}
// Extract hashtags as auto tags
const hashtags = content.match(/#\w+/g) || [];
autoTags.push(...hashtags.map(tag => tag.slice(1)));
return {
...post,
category,
autoTags: [...new Set(autoTags)] // Remove duplicates
};
})
.build();
// Register cross-model migrations
[relationshipRestructure, contentCategorization].forEach(migration => {
this.migrationManager.registerMigration(migration);
console.log(`✅ Registered: ${migration.name}`);
});
console.log('\nCross-model migrations demonstrate:');
console.log('- Complex relationship analysis and updates');
console.log('- Multi-model data transformation');
console.log('- Automatic content categorization');
console.log('');
}
async versionManagementExamples(): Promise<void> {
console.log('📋 Version Management Examples');
console.log('==============================\n');
// Demonstrate migration ordering and dependencies
const allMigrations = this.migrationManager.getMigrations();
console.log('Migration dependency chain:');
allMigrations.forEach(migration => {
const deps = migration.dependencies?.join(', ') || 'None';
console.log(`- ${migration.name} (v${migration.version}) depends on: ${deps}`);
});
// Show pending migrations in order
const pendingMigrations = this.migrationManager.getPendingMigrations();
console.log(`\nPending migrations (${pendingMigrations.length}):`);
pendingMigrations.forEach((migration, index) => {
console.log(`${index + 1}. ${migration.name} (v${migration.version})`);
});
// Demonstrate batch migration with different strategies
console.log('\nRunning pending migrations with different strategies:');
if (pendingMigrations.length > 0) {
console.log('\n1. Dry run all pending migrations:');
try {
const dryRunResults = await this.migrationManager.runPendingMigrations({
dryRun: true,
stopOnError: false
});
console.log(`Dry run completed: ${dryRunResults.length} migrations processed`);
dryRunResults.forEach(result => {
console.log(`- ${result.migrationId}: ${result.success ? 'SUCCESS' : 'FAILED'}`);
});
} catch (error) {
console.error(`Dry run failed: ${error}`);
}
console.log('\n2. Run migrations with stop-on-error:');
try {
const results = await this.migrationManager.runPendingMigrations({
stopOnError: true,
batchSize: 25
});
console.log(`Migration batch completed: ${results.length} migrations`);
} catch (error) {
console.error(`Migration batch stopped due to error: ${error}`);
}
}
// Show migration history and statistics
const history = this.migrationManager.getMigrationHistory();
console.log(`\nMigration history (${history.length} total runs):`);
history.slice(0, 5).forEach(result => {
console.log(`- ${result.migrationId}: ${result.success ? 'SUCCESS' : 'FAILED'} ` +
`(${result.duration}ms, ${result.recordsProcessed} records)`);
});
// Show active migrations (should be empty in examples)
const activeMigrations = this.migrationManager.getActiveMigrations();
console.log(`\nActive migrations: ${activeMigrations.length}`);
console.log('');
}
async demonstrateAdvancedFeatures(): Promise<void> {
console.log('🔬 Advanced Migration Features');
console.log('==============================\n');
// Create a migration with complex validation
const complexValidation = createMigration(
'complex_validation_example',
'5.0.0',
'Migration with complex validation'
)
.description('Demonstrates advanced validation and error handling')
.addValidator(
'check_data_consistency',
'Verify data consistency across models',
async (context) => {
const errors: string[] = [];
const warnings: string[] = [];
// Simulate complex validation
const users = await context.databaseManager.getAllRecords('User');
const posts = await context.databaseManager.getAllRecords('Post');
// Check for orphaned posts
const userIds = new Set(users.map((u: any) => u.id));
const orphanedPosts = posts.filter((p: any) => !userIds.has(p.userId));
if (orphanedPosts.length > 0) {
warnings.push(`Found ${orphanedPosts.length} orphaned posts`);
}
return { valid: errors.length === 0, errors, warnings };
}
)
.addField('User', 'validationField', {
type: 'string',
required: false,
default: 'validated'
})
.build();
// Create a migration that handles large datasets
const largeMigration = createMigration(
'large_dataset_migration',
'5.1.0',
'Migration optimized for large datasets'
)
.description('Demonstrates batch processing and progress tracking')
.customOperation('Post', async (context) => {
context.logger.info('Processing large dataset with progress tracking');
const totalRecords = 10000; // Simulate large dataset
const batchSize = 100;
for (let i = 0; i < totalRecords; i += batchSize) {
const progress = ((i / totalRecords) * 100).toFixed(1);
context.logger.info(`Processing batch ${i / batchSize + 1}, Progress: ${progress}%`);
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));
context.progress.processedRecords = i + batchSize;
context.progress.estimatedTimeRemaining =
((totalRecords - i) / batchSize) * 10; // Rough estimate
}
})
.build();
console.log('Created advanced feature demonstrations:');
console.log('- Complex multi-model validation');
console.log('- Large dataset processing with progress tracking');
console.log('- Error handling and recovery strategies');
console.log('');
}
}
// Usage function
export async function runMigrationExamples(
orbitDBService: any,
ipfsService: any
): Promise<void> {
const framework = new SocialPlatformFramework();
try {
await framework.initialize(orbitDBService, ipfsService, 'development');
// Create sample data first
await createSampleDataForMigrations(framework);
// Run migration examples
const examples = new MigrationExamples(framework);
await examples.runAllExamples();
await examples.demonstrateAdvancedFeatures();
// Show final migration statistics
const migrationManager = (examples as any).migrationManager;
const allMigrations = migrationManager.getMigrations();
const history = migrationManager.getMigrationHistory();
console.log('📊 Final Migration Statistics:');
console.log('=============================');
console.log(`Total migrations registered: ${allMigrations.length}`);
console.log(`Total migration runs: ${history.length}`);
console.log(`Successful runs: ${history.filter((h: any) => h.success).length}`);
console.log(`Failed runs: ${history.filter((h: any) => !h.success).length}`);
const totalDuration = history.reduce((sum: number, h: any) => sum + h.duration, 0);
console.log(`Total migration time: ${totalDuration}ms`);
const totalRecords = history.reduce((sum: number, h: any) => sum + h.recordsProcessed, 0);
console.log(`Total records processed: ${totalRecords}`);
} catch (error) {
console.error('❌ Migration examples failed:', error);
} finally {
await framework.stop();
}
}
async function createSampleDataForMigrations(framework: SocialPlatformFramework): Promise<void> {
console.log('🗄️ Creating sample data for migration testing...\n');
try {
// Create users without timestamps (to demonstrate migration)
const users = [];
for (let i = 0; i < 5; i++) {
const user = await framework.createUser({
username: `migrationuser${i}`,
email: `migration${i}@example.com`,
bio: `Migration test user ${i}`
});
users.push(user);
}
// Create posts with basic structure
const posts = [];
for (let i = 0; i < 10; i++) {
const user = users[i % users.length];
const post = await framework.createPost(user.id, {
title: `Migration Test Post ${i}`,
content: `This is test content for migration testing. Post ${i} with various content types.`,
tags: ['migration', 'test'],
isPublic: true
});
posts.push(post);
}
// Create comments
for (let i = 0; i < 15; i++) {
const user = users[i % users.length];
const post = posts[i % posts.length];
await framework.createComment(
user.id,
post.id,
`Migration test comment ${i}`
);
}
// Create follow relationships
for (let i = 0; i < users.length; i++) {
for (let j = 0; j < users.length; j++) {
if (i !== j && Math.random() > 0.6) {
await framework.followUser(users[i].id, users[j].id);
}
}
}
console.log(`✅ Created sample data: ${users.length} users, ${posts.length} posts, 15 comments\n`);
} catch (error) {
console.warn('⚠️ Some sample data creation failed:', error);
}
}

475
examples/query-examples.ts Normal file
View File

@ -0,0 +1,475 @@
/**
* Comprehensive Query Examples for DebrosFramework
*
* This file demonstrates all the query capabilities implemented in Phase 3:
* - Basic and advanced filtering
* - User-scoped vs global queries
* - Relationship loading
* - Aggregations and analytics
* - Query optimization and caching
* - Pagination and chunked processing
*/
import { SocialPlatformFramework, User, Post, Comment, Follow } from './framework-integration';
export class QueryExamples {
private framework: SocialPlatformFramework;
constructor(framework: SocialPlatformFramework) {
this.framework = framework;
}
async runAllExamples(): Promise<void> {
console.log('🚀 Running comprehensive query examples...\n');
await this.basicQueries();
await this.userScopedQueries();
await this.relationshipQueries();
await this.aggregationQueries();
await this.advancedFiltering();
await this.paginationExamples();
await this.cacheExamples();
await this.optimizationExamples();
console.log('✅ All query examples completed!\n');
}
async basicQueries(): Promise<void> {
console.log('📊 Basic Query Examples');
console.log('========================\n');
// Simple equality
const publicPosts = await Post
.where('isPublic', '=', true)
.limit(5)
.exec();
console.log(`Found ${publicPosts.length} public posts`);
// Multiple conditions
const recentPublicPosts = await Post
.where('isPublic', '=', true)
.where('createdAt', '>', Date.now() - 86400000) // Last 24 hours
.orderBy('createdAt', 'desc')
.limit(10)
.exec();
console.log(`Found ${recentPublicPosts.length} recent public posts`);
// Using whereIn
const specificUsers = await User
.whereIn('username', ['alice', 'bob', 'charlie'])
.exec();
console.log(`Found ${specificUsers.length} specific users`);
// Find by ID
if (publicPosts.length > 0) {
const singlePost = await Post.find(publicPosts[0].id);
console.log(`Found post: ${singlePost?.title || 'Not found'}`);
}
console.log('');
}
async userScopedQueries(): Promise<void> {
console.log('👤 User-Scoped Query Examples');
console.log('==============================\n');
// Get all users first
const users = await User.limit(3).exec();
if (users.length === 0) {
console.log('No users found for user-scoped examples');
return;
}
const userId = users[0].id;
// Single user query (efficient - direct database access)
const userPosts = await Post
.whereUser(userId)
.orderBy('createdAt', 'desc')
.limit(10)
.exec();
console.log(`Found ${userPosts.length} posts for user ${userId}`);
// Multiple users query
const multiUserPosts = await Post
.whereUserIn(users.map(u => u.id))
.where('isPublic', '=', true)
.limit(20)
.exec();
console.log(`Found ${multiUserPosts.length} posts from ${users.length} users`);
// Global query on user-scoped data (uses global index)
const allPublicPosts = await Post
.where('isPublic', '=', true)
.orderBy('createdAt', 'desc')
.limit(15)
.exec();
console.log(`Found ${allPublicPosts.length} public posts across all users`);
console.log('');
}
async relationshipQueries(): Promise<void> {
console.log('🔗 Relationship Query Examples');
console.log('===============================\n');
// Load posts with their authors
const postsWithAuthors = await Post
.where('isPublic', '=', true)
.load(['author'])
.limit(5)
.exec();
console.log(`Loaded ${postsWithAuthors.length} posts with authors`);
// Load posts with comments and authors
const postsWithComments = await Post
.where('isPublic', '=', true)
.load(['comments', 'author'])
.limit(3)
.exec();
console.log(`Loaded ${postsWithComments.length} posts with comments and authors`);
// Load user with their posts
const users = await User.limit(2).exec();
if (users.length > 0) {
const userWithPosts = await User
.where('id', '=', users[0].id)
.load(['posts'])
.first();
if (userWithPosts) {
console.log(`User ${userWithPosts.username} has posts loaded`);
}
}
console.log('');
}
async aggregationQueries(): Promise<void> {
console.log('📈 Aggregation Query Examples');
console.log('==============================\n');
// Count queries
const totalPosts = await Post.count();
const publicPostCount = await Post.where('isPublic', '=', true).count();
console.log(`Total posts: ${totalPosts}, Public: ${publicPostCount}`);
// Sum and average
const totalLikes = await Post.sum('likeCount');
const averageLikes = await Post.avg('likeCount');
console.log(`Total likes: ${totalLikes}, Average: ${averageLikes.toFixed(2)}`);
// Min and max
const oldestPost = await Post.min('createdAt');
const newestPost = await Post.max('createdAt');
console.log(`Oldest post: ${new Date(oldestPost).toISOString()}`);
console.log(`Newest post: ${new Date(newestPost).toISOString()}`);
// User-specific aggregations
const users = await User.limit(1).exec();
if (users.length > 0) {
const userId = users[0].id;
const userPostCount = await Post.whereUser(userId).count();
const userTotalLikes = await Post.whereUser(userId).sum('likeCount');
console.log(`User ${userId}: ${userPostCount} posts, ${userTotalLikes} total likes`);
}
console.log('');
}
async advancedFiltering(): Promise<void> {
console.log('🔍 Advanced Filtering Examples');
console.log('===============================\n');
// Date filtering
const lastWeek = Date.now() - (7 * 24 * 60 * 60 * 1000);
const recentPosts = await Post
.whereDate('createdAt', '>', lastWeek)
.where('isPublic', '=', true)
.exec();
console.log(`Found ${recentPosts.length} posts from last week`);
// Range filtering
const popularPosts = await Post
.whereBetween('likeCount', 5, 100)
.where('isPublic', '=', true)
.orderBy('likeCount', 'desc')
.limit(10)
.exec();
console.log(`Found ${popularPosts.length} moderately popular posts`);
// Array filtering
const techPosts = await Post
.whereArrayContains('tags', 'tech')
.where('isPublic', '=', true)
.exec();
console.log(`Found ${techPosts.length} tech-related posts`);
// Text search
const searchResults = await Post
.where('isPublic', '=', true)
.orWhere(query => {
query.whereLike('title', 'framework')
.whereLike('content', 'orbitdb');
})
.limit(10)
.exec();
console.log(`Found ${searchResults.length} posts matching search terms`);
// Null checks
const postsWithBio = await User
.whereNotNull('bio')
.limit(5)
.exec();
console.log(`Found ${postsWithBio.length} users with bios`);
console.log('');
}
async paginationExamples(): Promise<void> {
console.log('📄 Pagination Examples');
console.log('=======================\n');
// Basic pagination
const page1 = await Post
.where('isPublic', '=', true)
.orderBy('createdAt', 'desc')
.page(1, 5)
.exec();
console.log(`Page 1: ${page1.length} posts`);
// Pagination with metadata
const paginatedResult = await Post
.where('isPublic', '=', true)
.orderBy('createdAt', 'desc')
.paginate(1, 5);
console.log(`Pagination: ${paginatedResult.currentPage}/${paginatedResult.lastPage}`);
console.log(`Total: ${paginatedResult.total}, Per page: ${paginatedResult.perPage}`);
console.log(`Has next: ${paginatedResult.hasNextPage}, Has prev: ${paginatedResult.hasPrevPage}`);
// Chunked processing
let processedCount = 0;
await Post
.where('isPublic', '=', true)
.chunk(3, async (posts, page) => {
processedCount += posts.length;
console.log(`Processed chunk ${page}: ${posts.length} posts`);
// Stop after processing 2 chunks for demo
if (page >= 2) return false;
});
console.log(`Total processed in chunks: ${processedCount}`);
console.log('');
}
async cacheExamples(): Promise<void> {
console.log('⚡ Cache Examples');
console.log('=================\n');
// First execution (cache miss)
console.log('First query execution (cache miss):');
const start1 = Date.now();
const posts1 = await Post
.where('isPublic', '=', true)
.orderBy('createdAt', 'desc')
.limit(10)
.exec();
const duration1 = Date.now() - start1;
console.log(`Returned ${posts1.length} posts in ${duration1}ms`);
// Second execution (cache hit)
console.log('Second query execution (cache hit):');
const start2 = Date.now();
const posts2 = await Post
.where('isPublic', '=', true)
.orderBy('createdAt', 'desc')
.limit(10)
.exec();
const duration2 = Date.now() - start2;
console.log(`Returned ${posts2.length} posts in ${duration2}ms`);
// Cache statistics
const stats = await this.framework.getFrameworkStats();
console.log('Cache statistics:', stats.cache.stats);
console.log('');
}
async optimizationExamples(): Promise<void> {
console.log('🚀 Query Optimization Examples');
console.log('===============================\n');
// Query explanation
const query = Post
.where('isPublic', '=', true)
.where('likeCount', '>', 10)
.orderBy('createdAt', 'desc')
.limit(20);
const explanation = await this.framework.explainQuery(query);
console.log('Query explanation:');
console.log('- Strategy:', explanation.plan.strategy);
console.log('- Estimated cost:', explanation.plan.estimatedCost);
console.log('- Optimizations:', explanation.plan.optimizations);
console.log('- Suggestions:', explanation.suggestions);
// Query with index hint
const optimizedQuery = Post
.where('isPublic', '=', true)
.useIndex('post_public_idx')
.orderBy('createdAt', 'desc')
.limit(10);
const optimizedResults = await optimizedQuery.exec();
console.log(`Optimized query returned ${optimizedResults.length} results`);
// Disable cache for specific query
const nonCachedQuery = Post
.where('isPublic', '=', true)
.limit(5);
// Note: This would work with QueryExecutor integration
// const nonCachedResults = await nonCachedQuery.exec().disableCache();
console.log('');
}
async demonstrateQueryBuilder(): Promise<void> {
console.log('🔧 QueryBuilder Method Demonstration');
console.log('=====================================\n');
// Show various QueryBuilder methods
const complexQuery = Post
.where('isPublic', '=', true)
.whereNotNull('title')
.whereDateBetween('createdAt', Date.now() - 86400000 * 7, Date.now())
.whereArrayLength('tags', '>', 0)
.orderByMultiple([
{ field: 'likeCount', direction: 'desc' },
{ field: 'createdAt', direction: 'desc' }
])
.distinct('userId')
.limit(15);
console.log('Complex query SQL representation:');
console.log(complexQuery.toSQL());
console.log('\nQuery explanation:');
console.log(complexQuery.explain());
// Clone and modify query
const modifiedQuery = complexQuery.clone()
.where('likeCount', '>', 5)
.limit(10);
console.log('\nModified query SQL:');
console.log(modifiedQuery.toSQL());
const results = await modifiedQuery.exec();
console.log(`\nExecuted complex query, got ${results.length} results`);
console.log('');
}
}
// Usage example
export async function runQueryExamples(
orbitDBService: any,
ipfsService: any
): Promise<void> {
const framework = new SocialPlatformFramework();
try {
await framework.initialize(orbitDBService, ipfsService, 'development');
// Create some sample data if needed
await createSampleData(framework);
// Run query examples
const examples = new QueryExamples(framework);
await examples.runAllExamples();
await examples.demonstrateQueryBuilder();
// Show final framework stats
const stats = await framework.getFrameworkStats();
console.log('📊 Final Framework Statistics:');
console.log(JSON.stringify(stats, null, 2));
} catch (error) {
console.error('❌ Query examples failed:', error);
} finally {
await framework.stop();
}
}
async function createSampleData(framework: SocialPlatformFramework): Promise<void> {
console.log('🗄️ Creating sample data for query examples...\n');
try {
// Create users
const alice = await framework.createUser({
username: 'alice',
email: 'alice@example.com',
bio: 'Tech enthusiast and framework developer'
});
const bob = await framework.createUser({
username: 'bob',
email: 'bob@example.com',
bio: 'Building decentralized applications'
});
const charlie = await framework.createUser({
username: 'charlie',
email: 'charlie@example.com'
});
// Create posts
await framework.createPost(alice.id, {
title: 'Introduction to DebrosFramework',
content: 'The DebrosFramework makes OrbitDB development much easier...',
tags: ['framework', 'orbitdb', 'tech'],
isPublic: true
});
await framework.createPost(alice.id, {
title: 'Advanced Query Patterns',
content: 'Here are some advanced patterns for querying decentralized data...',
tags: ['queries', 'patterns', 'tech'],
isPublic: true
});
await framework.createPost(bob.id, {
title: 'Building Scalable dApps',
content: 'Scalability is crucial for decentralized applications...',
tags: ['scalability', 'dapps'],
isPublic: true
});
await framework.createPost(bob.id, {
title: 'Private Development Notes',
content: 'Some private thoughts on the framework architecture...',
tags: ['private', 'notes'],
isPublic: false
});
await framework.createPost(charlie.id, {
title: 'Getting Started Guide',
content: 'A comprehensive guide to getting started with the framework...',
tags: ['guide', 'beginner'],
isPublic: true
});
// Create some follows
await framework.followUser(alice.id, bob.id);
await framework.followUser(bob.id, charlie.id);
await framework.followUser(charlie.id, alice.id);
console.log('✅ Sample data created successfully!\n');
} catch (error) {
console.warn('⚠️ Some sample data creation failed:', error);
}
}

View File

@ -0,0 +1,511 @@
/**
* Comprehensive Relationship Examples for DebrosFramework
*
* This file demonstrates all the relationship loading capabilities implemented in Phase 4:
* - Lazy and eager loading
* - Relationship caching
* - Cross-database relationship resolution
* - Advanced loading with constraints
* - Performance optimization techniques
*/
import { SocialPlatformFramework, User, Post, Comment, Follow } from './framework-integration';
export class RelationshipExamples {
private framework: SocialPlatformFramework;
constructor(framework: SocialPlatformFramework) {
this.framework = framework;
}
async runAllExamples(): Promise<void> {
console.log('🔗 Running comprehensive relationship examples...\n');
await this.basicRelationshipLoading();
await this.eagerLoadingExamples();
await this.lazyLoadingExamples();
await this.constrainedLoadingExamples();
await this.cacheOptimizationExamples();
await this.crossDatabaseRelationships();
await this.performanceExamples();
console.log('✅ All relationship examples completed!\n');
}
async basicRelationshipLoading(): Promise<void> {
console.log('🔗 Basic Relationship Loading');
console.log('==============================\n');
// Get a post and load its author (BelongsTo)
const posts = await Post.where('isPublic', '=', true).limit(3).exec();
if (posts.length > 0) {
const post = posts[0];
console.log(`Loading author for post: ${post.title}`);
const author = await post.loadRelation('author');
console.log(`Author loaded: ${author?.username || 'Unknown'}`);
// Load comments for the post (HasMany)
console.log(`Loading comments for post: ${post.title}`);
const comments = await post.loadRelation('comments');
console.log(`Comments loaded: ${Array.isArray(comments) ? comments.length : 0} comment(s)`);
// Check what relationships are loaded
console.log(`Loaded relationships: ${post.getLoadedRelations().join(', ')}`);
}
// Get a user and load their posts (HasMany)
const users = await User.limit(2).exec();
if (users.length > 0) {
const user = users[0];
console.log(`\nLoading posts for user: ${user.username}`);
const userPosts = await user.loadRelation('posts');
console.log(`Posts loaded: ${Array.isArray(userPosts) ? userPosts.length : 0} post(s)`);
}
console.log('');
}
async eagerLoadingExamples(): Promise<void> {
console.log('⚡ Eager Loading Examples');
console.log('==========================\n');
// Load multiple posts with their authors and comments in one go
console.log('Loading posts with authors and comments (eager loading):');
const posts = await Post
.where('isPublic', '=', true)
.limit(5)
.exec();
if (posts.length > 0) {
// Eager load relationships for all posts at once
const startTime = Date.now();
await posts[0].load(['author', 'comments']);
const singleLoadTime = Date.now() - startTime;
// Now eager load for all posts
const eagerStartTime = Date.now();
await this.framework.relationshipManager.eagerLoadRelationships(
posts,
['author', 'comments']
);
const eagerLoadTime = Date.now() - eagerStartTime;
console.log(`Single post relationship loading: ${singleLoadTime}ms`);
console.log(`Eager loading for ${posts.length} posts: ${eagerLoadTime}ms`);
console.log(`Efficiency gain: ${((singleLoadTime * posts.length) / eagerLoadTime).toFixed(2)}x faster`);
// Verify relationships are loaded
let loadedCount = 0;
for (const post of posts) {
if (post.isRelationLoaded('author') && post.isRelationLoaded('comments')) {
loadedCount++;
}
}
console.log(`Successfully loaded relationships for ${loadedCount}/${posts.length} posts`);
}
// Load users with their posts
console.log('\nLoading users with their posts (eager loading):');
const users = await User.limit(3).exec();
if (users.length > 0) {
await this.framework.relationshipManager.eagerLoadRelationships(
users,
['posts', 'following']
);
for (const user of users) {
const posts = user.getRelation('posts') || [];
const following = user.getRelation('following') || [];
console.log(`User ${user.username}: ${posts.length} posts, ${following.length} following`);
}
}
console.log('');
}
async lazyLoadingExamples(): Promise<void> {
console.log('💤 Lazy Loading Examples');
console.log('=========================\n');
const posts = await Post.where('isPublic', '=', true).limit(2).exec();
if (posts.length > 0) {
const post = posts[0];
console.log('Demonstrating lazy loading behavior:');
console.log(`Post title: ${post.title}`);
console.log(`Author loaded initially: ${post.isRelationLoaded('author')}`);
// First access triggers loading
console.log('Accessing author (triggers lazy loading)...');
const author = await post.loadRelation('author');
console.log(`Author: ${author?.username || 'Unknown'}`);
console.log(`Author loaded after access: ${post.isRelationLoaded('author')}`);
// Second access uses cached value
console.log('Accessing author again (uses cache)...');
const authorAgain = post.getRelation('author');
console.log(`Author (cached): ${authorAgain?.username || 'Unknown'}`);
// Reload relationship (clears cache and reloads)
console.log('Reloading author relationship...');
const reloadedAuthor = await post.reloadRelation('author');
console.log(`Reloaded author: ${reloadedAuthor?.username || 'Unknown'}`);
}
console.log('');
}
async constrainedLoadingExamples(): Promise<void> {
console.log('🎯 Constrained Loading Examples');
console.log('=================================\n');
const posts = await Post.where('isPublic', '=', true).limit(3).exec();
if (posts.length > 0) {
const post = posts[0];
// Load only recent comments
console.log(`Loading recent comments for post: ${post.title}`);
const recentComments = await post.loadRelationWithConstraints('comments', (query) =>
query.where('createdAt', '>', Date.now() - 86400000) // Last 24 hours
.orderBy('createdAt', 'desc')
.limit(5)
);
console.log(`Recent comments loaded: ${Array.isArray(recentComments) ? recentComments.length : 0}`);
// Load comments with minimum length
console.log(`Loading substantive comments (>50 chars):`);
const substantiveComments = await post.loadRelationWithConstraints('comments', (query) =>
query.whereRaw('LENGTH(content) > ?', [50])
.orderBy('createdAt', 'desc')
.limit(3)
);
console.log(`Substantive comments: ${Array.isArray(substantiveComments) ? substantiveComments.length : 0}`);
}
// Load user posts with constraints
const users = await User.limit(2).exec();
if (users.length > 0) {
const user = users[0];
console.log(`\nLoading popular posts for user: ${user.username}`);
const popularPosts = await user.loadRelationWithConstraints('posts', (query) =>
query.where('likeCount', '>', 5)
.where('isPublic', '=', true)
.orderBy('likeCount', 'desc')
.limit(10)
);
console.log(`Popular posts: ${Array.isArray(popularPosts) ? popularPosts.length : 0}`);
}
console.log('');
}
async cacheOptimizationExamples(): Promise<void> {
console.log('🚀 Cache Optimization Examples');
console.log('===============================\n');
// Get cache stats before
const statsBefore = this.framework.relationshipManager.getRelationshipCacheStats();
console.log('Relationship cache stats before:');
console.log(`- Total entries: ${statsBefore.cache.totalEntries}`);
console.log(`- Hit rate: ${(statsBefore.cache.hitRate * 100).toFixed(2)}%`);
// Load relationships multiple times to demonstrate caching
const posts = await Post.where('isPublic', '=', true).limit(3).exec();
if (posts.length > 0) {
console.log('\nLoading relationships multiple times (should hit cache):');
for (let i = 0; i < 3; i++) {
const startTime = Date.now();
await posts[0].loadRelation('author');
await posts[0].loadRelation('comments');
const duration = Date.now() - startTime;
console.log(`Iteration ${i + 1}: ${duration}ms`);
}
}
// Warm up cache
console.log('\nWarming up relationship cache:');
const allPosts = await Post.limit(5).exec();
const allUsers = await User.limit(3).exec();
await this.framework.relationshipManager.warmupRelationshipCache(
allPosts,
['author', 'comments']
);
await this.framework.relationshipManager.warmupRelationshipCache(
allUsers,
['posts', 'following']
);
// Get cache stats after
const statsAfter = this.framework.relationshipManager.getRelationshipCacheStats();
console.log('\nRelationship cache stats after warmup:');
console.log(`- Total entries: ${statsAfter.cache.totalEntries}`);
console.log(`- Hit rate: ${(statsAfter.cache.hitRate * 100).toFixed(2)}%`);
console.log(`- Memory usage: ${(statsAfter.cache.memoryUsage / 1024).toFixed(2)} KB`);
// Show cache performance analysis
const performance = statsAfter.performance;
console.log('\nCache performance analysis:');
console.log(`- Average age: ${(performance.averageAge / 1000).toFixed(2)} seconds`);
console.log(`- Relationship types in cache:`);
performance.relationshipTypes.forEach((count, type) => {
console.log(` * ${type}: ${count} entries`);
});
console.log('');
}
async crossDatabaseRelationships(): Promise<void> {
console.log('🌐 Cross-Database Relationship Examples');
console.log('=========================================\n');
// This demonstrates relationships that span across user databases and global databases
// Get users (stored in global database)
const users = await User.limit(2).exec();
if (users.length >= 2) {
const user1 = users[0];
const user2 = users[1];
console.log(`Loading cross-database relationships:`);
console.log(`User 1: ${user1.username} (global DB)`);
console.log(`User 2: ${user2.username} (global DB)`);
// Load posts for user1 (stored in user1's database)
const user1Posts = await user1.loadRelation('posts');
console.log(`User 1 posts (from user DB): ${Array.isArray(user1Posts) ? user1Posts.length : 0}`);
// Load posts for user2 (stored in user2's database)
const user2Posts = await user2.loadRelation('posts');
console.log(`User 2 posts (from user DB): ${Array.isArray(user2Posts) ? user2Posts.length : 0}`);
// Load followers relationship (stored in global database)
const user1Following = await user1.loadRelation('following');
console.log(`User 1 following (from global DB): ${Array.isArray(user1Following) ? user1Following.length : 0}`);
// Demonstrate the complexity: Post (user DB) -> Author (global DB) -> Posts (back to user DB)
if (Array.isArray(user1Posts) && user1Posts.length > 0) {
const post = user1Posts[0];
console.log(`\nDemonstrating complex cross-DB relationship chain:`);
console.log(`Post: "${post.title}" (from user DB)`);
const author = await post.loadRelation('author');
console.log(`-> Author: ${author?.username || 'Unknown'} (from global DB)`);
if (author) {
const authorPosts = await author.loadRelation('posts');
console.log(`-> Author's posts: ${Array.isArray(authorPosts) ? authorPosts.length : 0} (back to user DB)`);
}
}
}
console.log('');
}
async performanceExamples(): Promise<void> {
console.log('📈 Performance Examples');
console.log('========================\n');
// Compare different loading strategies
const posts = await Post.where('isPublic', '=', true).limit(10).exec();
if (posts.length > 0) {
console.log(`Performance comparison for ${posts.length} posts:\n`);
// Strategy 1: Sequential loading (N+1 problem)
console.log('1. Sequential loading (N+1 queries):');
const sequentialStart = Date.now();
for (const post of posts) {
await post.loadRelation('author');
}
const sequentialTime = Date.now() - sequentialStart;
console.log(` Time: ${sequentialTime}ms (${(sequentialTime / posts.length).toFixed(2)}ms per post)`);
// Clear loaded relationships for fair comparison
posts.forEach(post => {
post._loadedRelations.clear();
});
// Strategy 2: Eager loading (optimal)
console.log('\n2. Eager loading (optimized):');
const eagerStart = Date.now();
await this.framework.relationshipManager.eagerLoadRelationships(posts, ['author']);
const eagerTime = Date.now() - eagerStart;
console.log(` Time: ${eagerTime}ms (${(eagerTime / posts.length).toFixed(2)}ms per post)`);
console.log(` Performance improvement: ${(sequentialTime / eagerTime).toFixed(2)}x faster`);
// Strategy 3: Cached loading (fastest for repeated access)
console.log('\n3. Cached loading (repeated access):');
const cachedStart = Date.now();
await this.framework.relationshipManager.eagerLoadRelationships(posts, ['author']);
const cachedTime = Date.now() - cachedStart;
console.log(` Time: ${cachedTime}ms (cache hit)`);
console.log(` Cache efficiency: ${(eagerTime / Math.max(cachedTime, 1)).toFixed(2)}x faster than first load`);
}
// Memory usage demonstration
console.log('\nMemory usage analysis:');
const memoryStats = this.framework.relationshipManager.getRelationshipCacheStats();
console.log(`- Cache entries: ${memoryStats.cache.totalEntries}`);
console.log(`- Memory usage: ${(memoryStats.cache.memoryUsage / 1024).toFixed(2)} KB`);
console.log(`- Average per entry: ${memoryStats.cache.totalEntries > 0 ? (memoryStats.cache.memoryUsage / memoryStats.cache.totalEntries).toFixed(2) : 0} bytes`);
// Cache cleanup demonstration
console.log('\nCache cleanup:');
const expiredCount = this.framework.relationshipManager.cleanupExpiredCache();
console.log(`- Cleaned up ${expiredCount} expired entries`);
// Model-based invalidation
const invalidatedCount = this.framework.relationshipManager.invalidateModelCache('User');
console.log(`- Invalidated ${invalidatedCount} User-related cache entries`);
console.log('');
}
async demonstrateAdvancedFeatures(): Promise<void> {
console.log('🔬 Advanced Relationship Features');
console.log('==================================\n');
const posts = await Post.where('isPublic', '=', true).limit(3).exec();
if (posts.length > 0) {
const post = posts[0];
// Demonstrate conditional loading
console.log('Conditional relationship loading:');
if (!post.isRelationLoaded('author')) {
console.log('- Author not loaded, loading now...');
await post.loadRelation('author');
} else {
console.log('- Author already loaded, using cached version');
}
// Demonstrate partial loading with pagination
console.log('\nPaginated relationship loading:');
const page1Comments = await post.loadRelationWithConstraints('comments', (query) =>
query.orderBy('createdAt', 'desc').limit(5).offset(0)
);
console.log(`- Page 1: ${Array.isArray(page1Comments) ? page1Comments.length : 0} comments`);
const page2Comments = await post.loadRelationWithConstraints('comments', (query) =>
query.orderBy('createdAt', 'desc').limit(5).offset(5)
);
console.log(`- Page 2: ${Array.isArray(page2Comments) ? page2Comments.length : 0} comments`);
// Demonstrate relationship statistics
console.log('\nRelationship loading statistics:');
const modelClass = post.constructor as any;
const relationships = Array.from(modelClass.relationships?.keys() || []);
console.log(`- Available relationships: ${relationships.join(', ')}`);
console.log(`- Currently loaded: ${post.getLoadedRelations().join(', ')}`);
}
console.log('');
}
}
// Usage example
export async function runRelationshipExamples(
orbitDBService: any,
ipfsService: any
): Promise<void> {
const framework = new SocialPlatformFramework();
try {
await framework.initialize(orbitDBService, ipfsService, 'development');
// Ensure we have sample data
await createSampleDataForRelationships(framework);
// Run relationship examples
const examples = new RelationshipExamples(framework);
await examples.runAllExamples();
await examples.demonstrateAdvancedFeatures();
// Show final relationship cache statistics
const finalStats = framework.relationshipManager.getRelationshipCacheStats();
console.log('📊 Final Relationship Cache Statistics:');
console.log(JSON.stringify(finalStats, null, 2));
} catch (error) {
console.error('❌ Relationship examples failed:', error);
} finally {
await framework.stop();
}
}
async function createSampleDataForRelationships(framework: SocialPlatformFramework): Promise<void> {
console.log('🗄️ Creating sample data for relationship examples...\n');
try {
// Create users
const alice = await framework.createUser({
username: 'alice',
email: 'alice@example.com',
bio: 'Framework developer and relationship expert'
});
const bob = await framework.createUser({
username: 'bob',
email: 'bob@example.com',
bio: 'Database architect'
});
const charlie = await framework.createUser({
username: 'charlie',
email: 'charlie@example.com',
bio: 'Performance optimization specialist'
});
// Create posts with relationships
const post1 = await framework.createPost(alice.id, {
title: 'Understanding Relationships in Distributed Databases',
content: 'Relationships across distributed databases present unique challenges...',
tags: ['relationships', 'distributed', 'databases'],
isPublic: true
});
const post2 = await framework.createPost(bob.id, {
title: 'Optimizing Cross-Database Queries',
content: 'When data spans multiple databases, query optimization becomes crucial...',
tags: ['optimization', 'queries', 'performance'],
isPublic: true
});
const post3 = await framework.createPost(alice.id, {
title: 'Caching Strategies for Relationships',
content: 'Effective caching can dramatically improve relationship loading performance...',
tags: ['caching', 'performance', 'relationships'],
isPublic: true
});
// Create comments to establish relationships
await framework.createComment(bob.id, post1.id, 'Great explanation of the distributed relationship challenges!');
await framework.createComment(charlie.id, post1.id, 'This helped me understand the complexity involved.');
await framework.createComment(alice.id, post2.id, 'Excellent optimization techniques, Bob!');
await framework.createComment(charlie.id, post2.id, 'These optimizations improved our app performance by 3x.');
await framework.createComment(bob.id, post3.id, 'Caching relationships was a game-changer for our system.');
// Create follow relationships
await framework.followUser(alice.id, bob.id);
await framework.followUser(bob.id, charlie.id);
await framework.followUser(charlie.id, alice.id);
await framework.followUser(alice.id, charlie.id);
console.log('✅ Sample relationship data created successfully!\n');
} catch (error) {
console.warn('⚠️ Some sample data creation failed:', error);
}
}

View File

@ -1,74 +0,0 @@
import path from 'path';
export interface DebrosConfig {
env: {
isDevelopment: boolean;
port: string | number;
fingerprint: string;
nickname?: string;
keyPath: string;
host: string;
};
features: {
enableLoadBalancing: boolean;
};
ipfs: {
repo: string;
swarmKey: string;
bootstrapNodes?: string;
blockstorePath: string;
serviceDiscovery: {
topic: string;
heartbeatInterval: number;
staleTimeout: number;
logInterval: number;
publicAddress: string;
};
};
orbitdb: {
directory: string;
};
loadBalancer: {
maxConnections: number;
strategy: string;
};
}
// Default configuration values
export const defaultConfig: DebrosConfig = {
env: {
isDevelopment: process.env.NODE_ENV !== 'production',
port: process.env.PORT || 7777,
fingerprint: process.env.FINGERPRINT || 'default-fingerprint',
nickname: process.env.NICKNAME,
keyPath: process.env.KEY_PATH || '/var/lib/debros/keys',
host: process.env.HOST || '',
},
features: {
enableLoadBalancing: process.env.ENABLE_LOAD_BALANCING !== 'false',
},
ipfs: {
repo: './ipfs-repo',
swarmKey: path.resolve(process.cwd(), 'swarm.key'),
bootstrapNodes: process.env.BOOTSTRAP_NODES,
blockstorePath: path.resolve(process.cwd(), 'blockstore'),
serviceDiscovery: {
topic: process.env.SERVICE_DISCOVERY_TOPIC || 'debros-service-discovery',
heartbeatInterval: parseInt(process.env.HEARTBEAT_INTERVAL || '5000'),
staleTimeout: parseInt(process.env.STALE_PEER_TIMEOUT || '30000'),
logInterval: parseInt(process.env.PEER_LOG_INTERVAL || '60000'),
publicAddress:
process.env.NODE_PUBLIC_ADDRESS || `http://localhost:${process.env.PORT || 7777}`,
},
},
orbitdb: {
directory: path.resolve(process.cwd(), 'orbitdb/debros'),
},
loadBalancer: {
maxConnections: parseInt(process.env.MAX_CONNECTIONS || '1000'),
strategy: process.env.LOAD_BALANCING_STRATEGY || 'least-loaded',
},
};
// Export a singleton config
export const config = defaultConfig;

View File

@ -1,268 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import { init as initIpfs, stop as stopIpfs } from '../../ipfs/ipfsService';
import { init as initOrbitDB } from '../../orbit/orbitDBService';
import { DBConnection, ErrorCode } from '../types';
import { DBError } from './error';
const logger = createServiceLogger('DB_CONNECTION');
// Connection pool of database instances
const connections = new Map<string, DBConnection>();
let defaultConnectionId: string | null = null;
let cleanupInterval: NodeJS.Timeout | null = null;
// Configuration
const CONNECTION_TIMEOUT = 3600000; // 1 hour in milliseconds
const CLEANUP_INTERVAL = 300000; // 5 minutes in milliseconds
const MAX_RETRY_ATTEMPTS = 3;
const RETRY_DELAY = 2000; // 2 seconds
/**
* Initialize the database service
* This abstracts away OrbitDB and IPFS from the end user
*/
export const init = async (connectionId?: string): Promise<string> => {
// Start connection cleanup interval if not already running
if (!cleanupInterval) {
cleanupInterval = setInterval(cleanupStaleConnections, CLEANUP_INTERVAL);
logger.info(`Connection cleanup scheduled every ${CLEANUP_INTERVAL / 60000} minutes`);
}
const connId = connectionId || `conn_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
// Check if connection already exists
if (connections.has(connId)) {
const existingConnection = connections.get(connId)!;
if (existingConnection.isActive) {
logger.info(`Using existing active connection: ${connId}`);
return connId;
}
}
logger.info(`Initializing DB service with connection ID: ${connId}`);
let attempts = 0;
let lastError: any = null;
// Retry initialization with exponential backoff
while (attempts < MAX_RETRY_ATTEMPTS) {
try {
// Initialize IPFS with retry logic
const ipfsInstance = await initIpfs().catch((error) => {
logger.error(
`IPFS initialization failed (attempt ${attempts + 1}/${MAX_RETRY_ATTEMPTS}):`,
error,
);
throw error;
});
// Initialize OrbitDB
const orbitdbInstance = await initOrbitDB().catch((error) => {
logger.error(
`OrbitDB initialization failed (attempt ${attempts + 1}/${MAX_RETRY_ATTEMPTS}):`,
error,
);
throw error;
});
// Store connection in pool
connections.set(connId, {
ipfs: ipfsInstance,
orbitdb: orbitdbInstance,
timestamp: Date.now(),
isActive: true,
});
// Set as default if no default exists
if (!defaultConnectionId) {
defaultConnectionId = connId;
}
logger.info(`DB service initialized successfully with connection ID: ${connId}`);
return connId;
} catch (error) {
lastError = error;
attempts++;
if (attempts >= MAX_RETRY_ATTEMPTS) {
logger.error(
`Failed to initialize DB service after ${MAX_RETRY_ATTEMPTS} attempts:`,
error,
);
break;
}
// Wait before retrying with exponential backoff
const delay = RETRY_DELAY * Math.pow(2, attempts - 1);
logger.info(
`Retrying initialization in ${delay}ms (attempt ${attempts + 1}/${MAX_RETRY_ATTEMPTS})...`,
);
// Clean up any partial initialization before retrying
try {
await stopIpfs();
} catch (cleanupError) {
logger.warn('Error during cleanup before retry:', cleanupError);
}
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
throw new DBError(
ErrorCode.INITIALIZATION_FAILED,
`Failed to initialize database service after ${MAX_RETRY_ATTEMPTS} attempts`,
lastError,
);
};
/**
* Get the active connection
*/
export const getConnection = (connectionId?: string): DBConnection => {
const connId = connectionId || defaultConnectionId;
if (!connId || !connections.has(connId)) {
throw new DBError(
ErrorCode.NOT_INITIALIZED,
`No active database connection found${connectionId ? ` for ID: ${connectionId}` : ''}`,
);
}
const connection = connections.get(connId)!;
if (!connection.isActive) {
throw new DBError(ErrorCode.CONNECTION_ERROR, `Connection ${connId} is no longer active`);
}
// Update the timestamp to mark connection as recently used
connection.timestamp = Date.now();
return connection;
};
/**
* Cleanup stale connections to prevent memory leaks
*/
export const cleanupStaleConnections = (): void => {
try {
const now = Date.now();
let removedCount = 0;
// Identify stale connections (older than CONNECTION_TIMEOUT)
for (const [id, connection] of connections.entries()) {
if (connection.isActive && now - connection.timestamp > CONNECTION_TIMEOUT) {
logger.info(
`Closing stale connection: ${id} (inactive for ${(now - connection.timestamp) / 60000} minutes)`,
);
// Close connection asynchronously (don't await to avoid blocking)
closeConnection(id)
.then((success) => {
if (success) {
logger.info(`Successfully closed stale connection: ${id}`);
} else {
logger.warn(`Failed to close stale connection: ${id}`);
}
})
.catch((error) => {
logger.error(`Error closing stale connection ${id}:`, error);
});
removedCount++;
} else if (!connection.isActive) {
// Remove inactive connections from the map
connections.delete(id);
removedCount++;
}
}
if (removedCount > 0) {
logger.info(`Cleaned up ${removedCount} stale or inactive connections`);
}
} catch (error) {
logger.error('Error during connection cleanup:', error);
}
};
/**
* Close a specific database connection
*/
export const closeConnection = async (connectionId: string): Promise<boolean> => {
if (!connections.has(connectionId)) {
return false;
}
try {
const connection = connections.get(connectionId)!;
// Stop OrbitDB
if (connection.orbitdb) {
await connection.orbitdb.stop();
}
// Mark connection as inactive
connection.isActive = false;
// If this was the default connection, clear it
if (defaultConnectionId === connectionId) {
defaultConnectionId = null;
// Try to find another active connection to be the default
for (const [id, conn] of connections.entries()) {
if (conn.isActive) {
defaultConnectionId = id;
break;
}
}
}
// Remove the connection from the pool
connections.delete(connectionId);
logger.info(`Closed database connection: ${connectionId}`);
return true;
} catch (error) {
logger.error(`Error closing connection ${connectionId}:`, error);
return false;
}
};
/**
* Stop all database connections
*/
export const stop = async (): Promise<void> => {
try {
// Stop the cleanup interval
if (cleanupInterval) {
clearInterval(cleanupInterval);
cleanupInterval = null;
}
// Close all connections
const promises: Promise<boolean>[] = [];
for (const [id, connection] of connections.entries()) {
if (connection.isActive) {
promises.push(closeConnection(id));
}
}
// Wait for all connections to close
await Promise.allSettled(promises);
// Stop IPFS if needed
const ipfs = connections.get(defaultConnectionId || '')?.ipfs;
if (ipfs) {
await stopIpfs();
}
// Clear all connections
connections.clear();
defaultConnectionId = null;
logger.info('All DB connections stopped successfully');
} catch (error: any) {
logger.error('Error stopping DB connections:', error);
throw new Error(`Failed to stop database connections: ${error.message}`);
}
};

View File

@ -1,17 +0,0 @@
import { ErrorCode } from '../types';
// Re-export error code for easier access
export { ErrorCode };
// Custom error class with error codes
export class DBError extends Error {
code: ErrorCode;
details?: any;
constructor(code: ErrorCode, message: string, details?: any) {
super(message);
this.name = 'DBError';
this.code = code;
this.details = details;
}
}

View File

@ -1,208 +0,0 @@
import { createServiceLogger } from '../utils/logger';
import { init, closeConnection, stop } from './core/connection';
import { defineSchema } from './schema/validator';
import * as events from './events/eventService';
import { Transaction } from './transactions/transactionService';
import {
StoreType,
CreateResult,
UpdateResult,
PaginatedResult,
QueryOptions,
ListOptions,
ErrorCode,
} from './types';
import { DBError } from './core/error';
import { getStore } from './stores/storeFactory';
import { uploadFile, getFile, deleteFile } from './stores/fileStore';
// Re-export imported functions
export { init, closeConnection, stop, defineSchema, uploadFile, getFile, deleteFile };
const logger = createServiceLogger('DB_SERVICE');
/**
* Create a new transaction for batching operations
*/
export const createTransaction = (connectionId?: string): Transaction => {
return new Transaction(connectionId);
};
/**
* Execute all operations in a transaction
*/
export const commitTransaction = async (
transaction: Transaction,
): Promise<{ success: boolean; results: any[] }> => {
try {
// Validate that we have operations
const operations = transaction.getOperations();
if (operations.length === 0) {
return { success: true, results: [] };
}
const connectionId = transaction.getConnectionId();
const results = [];
// Execute all operations
for (const operation of operations) {
let result;
switch (operation.type) {
case 'create':
result = await create(operation.collection, operation.id, operation.data, {
connectionId,
});
break;
case 'update':
result = await update(operation.collection, operation.id, operation.data, {
connectionId,
});
break;
case 'delete':
result = await remove(operation.collection, operation.id, { connectionId });
break;
}
results.push(result);
}
return { success: true, results };
} catch (error) {
logger.error('Transaction failed:', error);
throw new DBError(ErrorCode.TRANSACTION_FAILED, 'Failed to commit transaction', error);
}
};
/**
* Create a new document in the specified collection using the appropriate store
*/
export const create = async <T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: { connectionId?: string; storeType?: StoreType },
): Promise<CreateResult> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
return store.create(collection, id, data, { connectionId: options?.connectionId });
};
/**
* Get a document by ID from a collection
*/
export const get = async <T extends Record<string, any>>(
collection: string,
id: string,
options?: { connectionId?: string; skipCache?: boolean; storeType?: StoreType },
): Promise<T | null> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
return store.get(collection, id, options);
};
/**
* Update a document in a collection
*/
export const update = async <T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
options?: { connectionId?: string; upsert?: boolean; storeType?: StoreType },
): Promise<UpdateResult> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
return store.update(collection, id, data, options);
};
/**
* Delete a document from a collection
*/
export const remove = async (
collection: string,
id: string,
options?: { connectionId?: string; storeType?: StoreType },
): Promise<boolean> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
return store.remove(collection, id, options);
};
/**
* List all documents in a collection with pagination
*/
export const list = async <T extends Record<string, any>>(
collection: string,
options?: ListOptions & { storeType?: StoreType },
): Promise<PaginatedResult<T>> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
// Remove storeType from options
const { storeType: _, ...storeOptions } = options || {};
return store.list(collection, storeOptions);
};
/**
* Query documents in a collection with filtering and pagination
*/
export const query = async <T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions & { storeType?: StoreType },
): Promise<PaginatedResult<T>> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
// Remove storeType from options
const { storeType: _, ...storeOptions } = options || {};
return store.query(collection, filter, storeOptions);
};
/**
* Create an index for a collection to speed up queries
*/
export const createIndex = async (
collection: string,
field: string,
options?: { connectionId?: string; storeType?: StoreType },
): Promise<boolean> => {
const storeType = options?.storeType || StoreType.KEYVALUE;
const store = getStore(storeType);
return store.createIndex(collection, field, { connectionId: options?.connectionId });
};
/**
* Subscribe to database events
*/
export const subscribe = events.subscribe;
// Re-export error types and codes
export { DBError } from './core/error';
export { ErrorCode } from './types';
// Export store types
export { StoreType } from './types';
export default {
init,
create,
get,
update,
remove,
list,
query,
createIndex,
createTransaction,
commitTransaction,
subscribe,
uploadFile,
getFile,
deleteFile,
defineSchema,
closeConnection,
stop,
StoreType,
};

View File

@ -1,30 +0,0 @@
import { dbEvents } from '../types';
// Event types
type DBEventType = 'document:created' | 'document:updated' | 'document:deleted';
/**
* Subscribe to database events
*/
export const subscribe = (event: DBEventType, callback: (data: any) => void): (() => void) => {
dbEvents.on(event, callback);
// Return unsubscribe function
return () => {
dbEvents.off(event, callback);
};
};
/**
* Emit an event
*/
export const emit = (event: DBEventType, data: any): void => {
dbEvents.emit(event, data);
};
/**
* Remove all event listeners
*/
export const removeAllListeners = (): void => {
dbEvents.removeAllListeners();
};

View File

@ -1,223 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import { CollectionSchema, ErrorCode } from '../types';
import { DBError } from '../core/error';
const logger = createServiceLogger('DB_SCHEMA');
// Store collection schemas
const schemas = new Map<string, CollectionSchema>();
/**
* Define a schema for a collection
*/
export const defineSchema = (collection: string, schema: CollectionSchema): void => {
schemas.set(collection, schema);
logger.info(`Schema defined for collection: ${collection}`);
};
/**
* Validate a document against its schema
*/
export const validateDocument = (collection: string, document: any): boolean => {
const schema = schemas.get(collection);
if (!schema) {
return true; // No schema defined, so validation passes
}
// Check required fields
if (schema.required) {
for (const field of schema.required) {
if (document[field] === undefined) {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Required field '${field}' is missing`, {
collection,
document,
});
}
}
}
// Validate properties
for (const [field, definition] of Object.entries(schema.properties)) {
const value = document[field];
// Skip undefined optional fields
if (value === undefined) {
if (definition.required) {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Required field '${field}' is missing`, {
collection,
document,
});
}
continue;
}
// Type validation
switch (definition.type) {
case 'string':
if (typeof value !== 'string') {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Field '${field}' must be a string`, {
collection,
field,
value,
});
}
// Pattern validation
if (definition.pattern && !new RegExp(definition.pattern).test(value)) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' does not match pattern: ${definition.pattern}`,
{ collection, field, value },
);
}
// Length validation
if (definition.min !== undefined && value.length < definition.min) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must have at least ${definition.min} characters`,
{ collection, field, value },
);
}
if (definition.max !== undefined && value.length > definition.max) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must have at most ${definition.max} characters`,
{ collection, field, value },
);
}
break;
case 'number':
if (typeof value !== 'number') {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Field '${field}' must be a number`, {
collection,
field,
value,
});
}
// Range validation
if (definition.min !== undefined && value < definition.min) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must be at least ${definition.min}`,
{ collection, field, value },
);
}
if (definition.max !== undefined && value > definition.max) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must be at most ${definition.max}`,
{ collection, field, value },
);
}
break;
case 'boolean':
if (typeof value !== 'boolean') {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Field '${field}' must be a boolean`, {
collection,
field,
value,
});
}
break;
case 'array':
if (!Array.isArray(value)) {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Field '${field}' must be an array`, {
collection,
field,
value,
});
}
// Length validation
if (definition.min !== undefined && value.length < definition.min) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must have at least ${definition.min} items`,
{ collection, field, value },
);
}
if (definition.max !== undefined && value.length > definition.max) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must have at most ${definition.max} items`,
{ collection, field, value },
);
}
// Validate array items if item schema is defined
if (definition.items && value.length > 0) {
for (let i = 0; i < value.length; i++) {
const item = value[i];
// This is a simplified item validation
// In a real implementation, this would recursively validate complex objects
switch (definition.items.type) {
case 'string':
if (typeof item !== 'string') {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Item at index ${i} in field '${field}' must be a string`,
{ collection, field, item },
);
}
break;
case 'number':
if (typeof item !== 'number') {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Item at index ${i} in field '${field}' must be a number`,
{ collection, field, item },
);
}
break;
case 'boolean':
if (typeof item !== 'boolean') {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Item at index ${i} in field '${field}' must be a boolean`,
{ collection, field, item },
);
}
break;
}
}
}
break;
case 'object':
if (typeof value !== 'object' || value === null || Array.isArray(value)) {
throw new DBError(ErrorCode.INVALID_SCHEMA, `Field '${field}' must be an object`, {
collection,
field,
value,
});
}
// Nested object validation would go here in a real implementation
break;
case 'enum':
if (definition.enum && !definition.enum.includes(value)) {
throw new DBError(
ErrorCode.INVALID_SCHEMA,
`Field '${field}' must be one of: ${definition.enum.join(', ')}`,
{ collection, field, value },
);
}
break;
}
}
return true;
};

View File

@ -1,413 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import {
ErrorCode,
StoreType,
StoreOptions,
CreateResult,
UpdateResult,
PaginatedResult,
QueryOptions,
ListOptions,
acquireLock,
releaseLock,
isLocked,
} from '../types';
import { DBError } from '../core/error';
import { BaseStore, openStore, prepareDocument } from './baseStore';
import * as events from '../events/eventService';
/**
* Abstract store implementation with common CRUD operations
* Specific store types extend this class and customize only what's different
*/
export abstract class AbstractStore implements BaseStore {
protected logger = createServiceLogger(this.getLoggerName());
protected storeType: StoreType;
constructor(storeType: StoreType) {
this.storeType = storeType;
}
/**
* Must be implemented by subclasses to provide the logger name
*/
protected abstract getLoggerName(): string;
/**
* Create a new document in the specified collection
*/
async create<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: StoreOptions,
): Promise<CreateResult> {
// Create a lock ID for this resource to prevent concurrent operations
const lockId = `${collection}:${id}:create`;
// Try to acquire a lock
if (!acquireLock(lockId)) {
this.logger.warn(
`Concurrent operation detected on ${collection}/${id}, waiting for completion`,
);
// Wait until the lock is released (poll every 100ms for max 5 seconds)
let attempts = 0;
while (isLocked(lockId) && attempts < 50) {
await new Promise((resolve) => setTimeout(resolve, 100));
attempts++;
}
if (isLocked(lockId)) {
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Timed out waiting for lock on ${collection}/${id}`,
);
}
// Try to acquire lock again
if (!acquireLock(lockId)) {
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to acquire lock on ${collection}/${id}`,
);
}
}
try {
const db = await openStore(collection, this.storeType, options);
// Prepare document for storage with validation
const document = this.prepareCreateDocument<T>(collection, id, data);
// Add to database - this will be overridden by specific implementations if needed
const hash = await this.performCreate(db, id, document);
// Emit change event
events.emit('document:created', { collection, id, document });
this.logger.info(`Created document in ${collection} with id ${id}`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
this.logger.error(`Error creating document in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to create document in ${collection}: ${error instanceof Error ? error.message : String(error)}`,
error,
);
} finally {
// Always release the lock when done
releaseLock(lockId);
}
}
/**
* Prepare a document for creation - can be overridden by subclasses
*/
protected prepareCreateDocument<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
): any {
return prepareDocument<T>(collection, data);
}
/**
* Perform the actual create operation - should be implemented by subclasses
*/
protected abstract performCreate(db: any, id: string, document: any): Promise<string>;
/**
* Get a document by ID from a collection
*/
async get<T extends Record<string, any>>(
collection: string,
id: string,
options?: StoreOptions & { skipCache?: boolean },
): Promise<T | null> {
try {
const db = await openStore(collection, this.storeType, options);
const document = await this.performGet<T>(db, id);
return document;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
this.logger.error(`Error getting document ${id} from ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to get document ${id} from ${collection}: ${error instanceof Error ? error.message : String(error)}`,
error,
);
}
}
/**
* Perform the actual get operation - should be implemented by subclasses
*/
protected abstract performGet<T>(db: any, id: string): Promise<T | null>;
/**
* Update a document in a collection
*/
async update<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
options?: StoreOptions & { upsert?: boolean },
): Promise<UpdateResult> {
// Create a lock ID for this resource to prevent concurrent operations
const lockId = `${collection}:${id}:update`;
// Try to acquire a lock
if (!acquireLock(lockId)) {
this.logger.warn(
`Concurrent operation detected on ${collection}/${id}, waiting for completion`,
);
// Wait until the lock is released (poll every 100ms for max 5 seconds)
let attempts = 0;
while (isLocked(lockId) && attempts < 50) {
await new Promise((resolve) => setTimeout(resolve, 100));
attempts++;
}
if (isLocked(lockId)) {
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Timed out waiting for lock on ${collection}/${id}`,
);
}
// Try to acquire lock again
if (!acquireLock(lockId)) {
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to acquire lock on ${collection}/${id}`,
);
}
}
try {
const db = await openStore(collection, this.storeType, options);
const existing = await this.performGet<T>(db, id);
if (!existing && !options?.upsert) {
throw new DBError(
ErrorCode.DOCUMENT_NOT_FOUND,
`Document ${id} not found in ${collection}`,
{ collection, id },
);
}
// Prepare document for update with validation
const document = this.prepareUpdateDocument<T>(collection, id, data, existing || undefined);
// Update in database
const hash = await this.performUpdate(db, id, document);
// Emit change event
events.emit('document:updated', { collection, id, document, previous: existing });
this.logger.info(`Updated document in ${collection} with id ${id}`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
this.logger.error(`Error updating document in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to update document in ${collection}: ${error instanceof Error ? error.message : String(error)}`,
error,
);
} finally {
// Always release the lock when done
releaseLock(lockId);
}
}
/**
* Prepare a document for update - can be overridden by subclasses
*/
protected prepareUpdateDocument<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
existing?: T,
): any {
return prepareDocument<T>(
collection,
data as unknown as Omit<T, 'createdAt' | 'updatedAt'>,
existing,
);
}
/**
* Perform the actual update operation - should be implemented by subclasses
*/
protected abstract performUpdate(db: any, id: string, document: any): Promise<string>;
/**
* Delete a document from a collection
*/
async remove(collection: string, id: string, options?: StoreOptions): Promise<boolean> {
// Create a lock ID for this resource to prevent concurrent operations
const lockId = `${collection}:${id}:remove`;
// Try to acquire a lock
if (!acquireLock(lockId)) {
this.logger.warn(
`Concurrent operation detected on ${collection}/${id}, waiting for completion`,
);
// Wait until the lock is released (poll every 100ms for max 5 seconds)
let attempts = 0;
while (isLocked(lockId) && attempts < 50) {
await new Promise((resolve) => setTimeout(resolve, 100));
attempts++;
}
if (isLocked(lockId)) {
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Timed out waiting for lock on ${collection}/${id}`,
);
}
// Try to acquire lock again
if (!acquireLock(lockId)) {
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to acquire lock on ${collection}/${id}`,
);
}
}
try {
const db = await openStore(collection, this.storeType, options);
// Get the document before deleting for the event
const document = await this.performGet(db, id);
if (!document) {
this.logger.warn(`Document ${id} not found in ${collection} for deletion`);
return false;
}
// Delete from database
await this.performRemove(db, id);
// Emit change event
events.emit('document:deleted', { collection, id, document });
this.logger.info(`Deleted document in ${collection} with id ${id}`);
return true;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
this.logger.error(`Error deleting document in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to delete document in ${collection}: ${error instanceof Error ? error.message : String(error)}`,
error,
);
} finally {
// Always release the lock when done
releaseLock(lockId);
}
}
/**
* Perform the actual remove operation - should be implemented by subclasses
*/
protected abstract performRemove(db: any, id: string): Promise<void>;
/**
* Apply sorting to a list of documents
*/
protected applySorting<T extends Record<string, any>>(
documents: T[],
options?: ListOptions | QueryOptions,
): T[] {
if (!options?.sort) {
return documents;
}
const { field, order } = options.sort;
return [...documents].sort((a, b) => {
const valueA = a[field];
const valueB = b[field];
// Handle different data types for sorting
if (typeof valueA === 'string' && typeof valueB === 'string') {
return order === 'asc' ? valueA.localeCompare(valueB) : valueB.localeCompare(valueA);
} else if (typeof valueA === 'number' && typeof valueB === 'number') {
return order === 'asc' ? valueA - valueB : valueB - valueA;
} else if (valueA instanceof Date && valueB instanceof Date) {
return order === 'asc'
? valueA.getTime() - valueB.getTime()
: valueB.getTime() - valueA.getTime();
}
// Default comparison for other types
return order === 'asc'
? String(valueA).localeCompare(String(valueB))
: String(valueB).localeCompare(String(valueA));
});
}
/**
* Apply pagination to a list of documents
*/
protected applyPagination<T>(
documents: T[],
options?: ListOptions | QueryOptions,
): {
documents: T[];
total: number;
hasMore: boolean;
} {
const total = documents.length;
const offset = options?.offset || 0;
const limit = options?.limit || total;
const paginatedDocuments = documents.slice(offset, offset + limit);
const hasMore = offset + limit < total;
return {
documents: paginatedDocuments,
total,
hasMore,
};
}
/**
* List all documents in a collection with pagination
*/
abstract list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>>;
/**
* Query documents in a collection with filtering and pagination
*/
abstract query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>>;
/**
* Create an index for a collection to speed up queries
*/
abstract createIndex(collection: string, field: string, options?: StoreOptions): Promise<boolean>;
}

View File

@ -1,156 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import { openDB } from '../../orbit/orbitDBService';
import { validateDocument } from '../schema/validator';
import {
ErrorCode,
StoreType,
StoreOptions,
CreateResult,
UpdateResult,
PaginatedResult,
QueryOptions,
ListOptions,
} from '../types';
import { DBError } from '../core/error';
const logger = createServiceLogger('DB_STORE');
/**
* Base Store interface that all store implementations should extend
*/
export interface BaseStore {
create<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: StoreOptions,
): Promise<CreateResult>;
get<T extends Record<string, any>>(
collection: string,
id: string,
options?: StoreOptions & { skipCache?: boolean },
): Promise<T | null>;
update<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
options?: StoreOptions & { upsert?: boolean },
): Promise<UpdateResult>;
remove(collection: string, id: string, options?: StoreOptions): Promise<boolean>;
list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>>;
query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>>;
createIndex(collection: string, field: string, options?: StoreOptions): Promise<boolean>;
}
/**
* Open a store of the specified type
*/
export async function openStore(
collection: string,
storeType: StoreType,
options?: StoreOptions,
): Promise<any> {
try {
// Log minimal connection info to avoid leaking sensitive data
logger.info(
`Opening ${storeType} store for collection: ${collection} (connection ID: ${options?.connectionId || 'default'})`,
);
return await openDB(collection, storeType).catch((err) => {
throw new Error(`OrbitDB openDB failed: ${err.message}`);
});
} catch (error) {
logger.error(`Error opening ${storeType} store for collection ${collection}:`, error);
// Add more context to the error for improved debugging
const errorMessage = error instanceof Error ? error.message : String(error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to open ${storeType} store for collection ${collection}: ${errorMessage}`,
error,
);
}
}
/**
* Recursively sanitize an object by removing undefined values
* This is necessary because IPLD doesn't support undefined values
*/
function deepSanitizeUndefined(obj: any): any {
if (obj === null || obj === undefined) {
return null;
}
if (Array.isArray(obj)) {
return obj.map(deepSanitizeUndefined).filter((item) => item !== undefined);
}
if (typeof obj === 'object' && obj.constructor === Object) {
const sanitized: any = {};
for (const [key, value] of Object.entries(obj)) {
const sanitizedValue = deepSanitizeUndefined(value);
// Only include the property if it's not undefined
if (sanitizedValue !== undefined) {
sanitized[key] = sanitizedValue;
}
}
return sanitized;
}
return obj;
}
/**
* Helper function to prepare a document for storage
*/
export function prepareDocument<T extends Record<string, any>>(
collection: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
existingDoc?: T | null,
): T {
const timestamp = Date.now();
// Deep sanitize the input data by removing undefined values
const sanitizedData = deepSanitizeUndefined(data) as Omit<T, 'createdAt' | 'updatedAt'>;
// Prepare document for validation
let docToValidate: T;
// If it's an update to an existing document
if (existingDoc) {
docToValidate = {
...existingDoc,
...sanitizedData,
updatedAt: timestamp,
} as T;
} else {
// Otherwise it's a new document
docToValidate = {
...sanitizedData,
createdAt: timestamp,
updatedAt: timestamp,
} as unknown as T;
}
// Deep sanitize the final document to ensure no undefined values remain
const finalDocument = deepSanitizeUndefined(docToValidate) as T;
// Validate the document BEFORE processing
validateDocument(collection, finalDocument);
// Return the validated document
return finalDocument;
}

View File

@ -1,320 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import {
ErrorCode,
StoreType,
StoreOptions,
CreateResult,
UpdateResult,
PaginatedResult,
QueryOptions,
ListOptions,
} from '../types';
import { DBError } from '../core/error';
import { BaseStore, openStore } from './baseStore';
import * as events from '../events/eventService';
const logger = createServiceLogger('COUNTER_STORE');
/**
* CounterStore implementation
* Uses OrbitDB's counter store for simple numeric counters
*/
export class CounterStore implements BaseStore {
/**
* Create or set counter value
*/
async create<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: StoreOptions,
): Promise<CreateResult> {
try {
const db = await openStore(collection, StoreType.COUNTER, options);
// Extract value from data, default to 0
const value =
typeof data === 'object' && data !== null && 'value' in data ? Number(data.value) : 0;
// Set the counter value
const hash = await db.set(value);
// Construct document representation
const document = {
id,
value,
createdAt: Date.now(),
updatedAt: Date.now(),
};
// Emit change event
events.emit('document:created', { collection, id, document });
logger.info(`Set counter in ${collection} to ${value}`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error setting counter in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to set counter in ${collection}`,
error,
);
}
}
/**
* Get counter value
*/
async get<T extends Record<string, any>>(
collection: string,
id: string,
options?: StoreOptions & { skipCache?: boolean },
): Promise<T | null> {
try {
// Note: for counters, id is not used in the underlying store (there's only one counter per db)
// but we use it for consistency with the API
const db = await openStore(collection, StoreType.COUNTER, options);
// Get the counter value
const value = await db.value();
// Construct document representation
const document = {
id,
value,
updatedAt: Date.now(),
} as unknown as T;
return document;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error getting counter from ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to get counter from ${collection}`,
error,
);
}
}
/**
* Update counter (increment/decrement)
*/
async update<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
options?: StoreOptions & { upsert?: boolean },
): Promise<UpdateResult> {
try {
const db = await openStore(collection, StoreType.COUNTER, options);
// Get current value before update
const currentValue = await db.value();
// Extract value from data
let value: number;
let operation: 'increment' | 'decrement' | 'set' = 'set';
// Check what kind of operation we're doing
if (typeof data === 'object' && data !== null) {
if ('increment' in data) {
value = Number(data.increment);
operation = 'increment';
} else if ('decrement' in data) {
value = Number(data.decrement);
operation = 'decrement';
} else if ('value' in data) {
value = Number(data.value);
operation = 'set';
} else {
value = 0;
operation = 'set';
}
} else {
value = 0;
operation = 'set';
}
// Update the counter
let hash;
let newValue;
switch (operation) {
case 'increment':
hash = await db.inc(value);
newValue = currentValue + value;
break;
case 'decrement':
hash = await db.inc(-value); // Counter store uses inc with negative value
newValue = currentValue - value;
break;
case 'set':
hash = await db.set(value);
newValue = value;
break;
}
// Construct document representation
const document = {
id,
value: newValue,
updatedAt: Date.now(),
};
// Emit change event
events.emit('document:updated', {
collection,
id,
document,
previous: { id, value: currentValue },
});
logger.info(`Updated counter in ${collection} from ${currentValue} to ${newValue}`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error updating counter in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to update counter in ${collection}`,
error,
);
}
}
/**
* Delete/reset counter
*/
async remove(collection: string, id: string, options?: StoreOptions): Promise<boolean> {
try {
const db = await openStore(collection, StoreType.COUNTER, options);
// Get the current value for the event
const currentValue = await db.value();
// Reset the counter to 0 (counters can't be truly deleted)
await db.set(0);
// Emit change event
events.emit('document:deleted', {
collection,
id,
document: { id, value: currentValue },
});
logger.info(`Reset counter in ${collection} from ${currentValue} to 0`);
return true;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error resetting counter in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to reset counter in ${collection}`,
error,
);
}
}
/**
* List all counters (for counter stores, there's only one counter per db)
*/
async list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await openStore(collection, StoreType.COUNTER, options);
const value = await db.value();
// For counter stores, we just return one document with the counter value
const document = {
id: '0', // Default ID since counters don't have IDs
value,
updatedAt: Date.now(),
} as unknown as T;
return {
documents: [document],
total: 1,
hasMore: false,
};
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error listing counter in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to list counter in ${collection}`,
error,
);
}
}
/**
* Query is not applicable for counter stores, but we implement for API consistency
*/
async query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await openStore(collection, StoreType.COUNTER, options);
const value = await db.value();
// Create document
const document = {
id: '0', // Default ID since counters don't have IDs
value,
updatedAt: Date.now(),
} as unknown as T;
// Apply filter
const documents = filter(document) ? [document] : [];
return {
documents,
total: documents.length,
hasMore: false,
};
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error querying counter in ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to query counter in ${collection}`,
error,
);
}
}
/**
* Create an index - not applicable for counter stores
*/
async createIndex(collection: string, _field: string, _options?: StoreOptions): Promise<boolean> {
logger.warn(
`Index creation not supported for counter collections, ignoring request for ${collection}`,
);
return false;
}
}

View File

@ -1,180 +0,0 @@
import { StoreType, StoreOptions, PaginatedResult, QueryOptions, ListOptions } from '../types';
import { AbstractStore } from './abstractStore';
import { prepareDocument } from './baseStore';
import { DBError, ErrorCode } from '../core/error';
/**
* DocStore implementation
* Uses OrbitDB's document store which allows for more complex document storage with indices
*/
export class DocStore extends AbstractStore {
constructor() {
super(StoreType.DOCSTORE);
}
protected getLoggerName(): string {
return 'DOCSTORE';
}
/**
* Prepare a document for creation - override to add _id which is required for docstore
*/
protected prepareCreateDocument<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
): any {
return {
_id: id,
...prepareDocument<T>(collection, data),
};
}
/**
* Prepare a document for update - override to add _id which is required for docstore
*/
protected prepareUpdateDocument<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
existing?: T,
): any {
return {
_id: id,
...prepareDocument<T>(
collection,
data as unknown as Omit<T, 'createdAt' | 'updatedAt'>,
existing,
),
};
}
/**
* Implementation for the DocStore create operation
*/
protected async performCreate(db: any, id: string, document: any): Promise<string> {
return await db.put(document);
}
/**
* Implementation for the DocStore get operation
*/
protected async performGet<T>(db: any, id: string): Promise<T | null> {
return (await db.get(id)) as T | null;
}
/**
* Implementation for the DocStore update operation
*/
protected async performUpdate(db: any, id: string, document: any): Promise<string> {
return await db.put(document);
}
/**
* Implementation for the DocStore remove operation
*/
protected async performRemove(db: any, id: string): Promise<void> {
await db.del(id);
}
/**
* List all documents in a collection with pagination
*/
async list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await this.openStore(collection, options);
const allDocs = await db.query((_doc: any) => true);
// Map the documents to include id
let documents = allDocs.map((doc: any) => ({
id: doc._id,
...doc,
})) as T[];
// Apply sorting
documents = this.applySorting(documents, options);
// Apply pagination
return this.applyPagination(documents, options);
} catch (error) {
this.handleError(`Error listing documents in ${collection}`, error);
}
}
/**
* Query documents in a collection with filtering and pagination
*/
async query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await this.openStore(collection, options);
// Apply filter using docstore's query capability
const filtered = await db.query((doc: any) => filter(doc as T));
// Map the documents to include id
let documents = filtered.map((doc: any) => ({
id: doc._id,
...doc,
})) as T[];
// Apply sorting
documents = this.applySorting(documents, options);
// Apply pagination
return this.applyPagination(documents, options);
} catch (error) {
this.handleError(`Error querying documents in ${collection}`, error);
}
}
/**
* Create an index for a collection to speed up queries
* DocStore has built-in indexing capabilities
*/
async createIndex(collection: string, field: string, options?: StoreOptions): Promise<boolean> {
try {
const db = await this.openStore(collection, options);
// DocStore supports indexing, so we create the index
if (typeof db.createIndex === 'function') {
await db.createIndex(field);
this.logger.info(`Index created on ${field} for collection ${collection}`);
return true;
}
this.logger.info(
`Index creation not supported for this DB instance, but DocStore has built-in indices`,
);
return true;
} catch (error) {
this.handleError(`Error creating index for ${collection}`, error);
}
}
/**
* Helper to open a store of the correct type
*/
private async openStore(collection: string, options?: StoreOptions): Promise<any> {
const { openStore } = await import('./baseStore');
return await openStore(collection, this.storeType, options);
}
/**
* Helper to handle errors consistently
*/
private handleError(message: string, error: any): never {
if (error instanceof DBError) {
throw error;
}
this.logger.error(`${message}:`, error);
throw new DBError(ErrorCode.OPERATION_FAILED, `${message}: ${error.message}`, error);
}
}

View File

@ -1,475 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import {
ErrorCode,
StoreType,
StoreOptions,
CreateResult,
UpdateResult,
PaginatedResult,
QueryOptions,
ListOptions,
} from '../types';
import { DBError } from '../core/error';
import { BaseStore, openStore, prepareDocument } from './baseStore';
import * as events from '../events/eventService';
const logger = createServiceLogger('FEED_STORE');
/**
* FeedStore/EventLog implementation
* Uses OrbitDB's feed/eventlog store which is an append-only log
*/
export class FeedStore implements BaseStore {
/**
* Create a new document in the specified collection
* For feeds, this appends a new entry
*/
async create<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: StoreOptions,
): Promise<CreateResult> {
try {
const db = await openStore(collection, StoreType.FEED, options);
// Prepare document for storage with ID
const document = {
id,
...prepareDocument<T>(collection, data),
};
// Add to database
const hash = await db.add(document);
// Emit change event
events.emit('document:created', { collection, id, document, hash });
logger.info(`Created entry in feed ${collection} with id ${id} and hash ${hash}`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error creating entry in feed ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to create entry in feed ${collection}`,
error,
);
}
}
/**
* Get a specific entry in a feed - note this works differently than other stores
* as feeds are append-only logs identified by hash
*/
async get<T extends Record<string, any>>(
collection: string,
hash: string,
options?: StoreOptions & { skipCache?: boolean },
): Promise<T | null> {
try {
const db = await openStore(collection, StoreType.FEED, options);
// Get the specific entry by hash
const entry = await db.get(hash);
if (!entry) {
return null;
}
const document = entry.payload.value as T;
return document;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error getting entry ${hash} from feed ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to get entry ${hash} from feed ${collection}`,
error,
);
}
}
/**
* Update an entry in a feed
* Note: Feeds are append-only, so we can't actually update existing entries
* Instead, we append a new entry with the updated data and link it to the original
*/
async update<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
options?: StoreOptions & { upsert?: boolean },
): Promise<UpdateResult> {
try {
const db = await openStore(collection, StoreType.FEED, options);
// Get all entries using proper iterator API
const entries = [];
for await (const entry of db.iterator({ limit: -1 })) {
entries.push(entry);
}
const existingEntryIndex = entries.findIndex((e: any) => {
const value = e.payload.value;
return value && value.id === id;
});
if (existingEntryIndex === -1 && !options?.upsert) {
throw new DBError(
ErrorCode.DOCUMENT_NOT_FOUND,
`Entry with id ${id} not found in feed ${collection}`,
{ collection, id },
);
}
const existingEntry =
existingEntryIndex !== -1 ? entries[existingEntryIndex].payload.value : null;
// Prepare document with update
const document = {
id,
...prepareDocument<T>(
collection,
data as unknown as Omit<T, 'createdAt' | 'updatedAt'>,
existingEntry,
),
// Add reference to the previous entry if it exists
previousEntryHash: existingEntryIndex !== -1 ? entries[existingEntryIndex].hash : undefined,
};
// Add to feed (append new entry)
const hash = await db.add(document);
// Emit change event
events.emit('document:updated', { collection, id, document, previous: existingEntry });
logger.info(`Updated entry in feed ${collection} with id ${id} (new hash: ${hash})`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error updating entry in feed ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to update entry in feed ${collection}`,
error,
);
}
}
/**
* Delete is not supported in feed/eventlog stores since they're append-only
* Instead, we add a "tombstone" entry that marks the entry as deleted
*/
async remove(collection: string, id: string, options?: StoreOptions): Promise<boolean> {
try {
const db = await openStore(collection, StoreType.FEED, options);
// Find the entry with the given id using proper iterator API
const entries = [];
for await (const entry of db.iterator({ limit: -1 })) {
entries.push(entry);
}
const existingEntryIndex = entries.findIndex((e: any) => {
const value = e.payload.value;
return value && value.id === id;
});
if (existingEntryIndex === -1) {
throw new DBError(
ErrorCode.DOCUMENT_NOT_FOUND,
`Entry with id ${id} not found in feed ${collection}`,
{ collection, id },
);
}
const existingEntry = entries[existingEntryIndex].payload.value;
const existingHash = entries[existingEntryIndex].hash;
// Add a "tombstone" entry that marks this as deleted
const tombstone = {
id,
deleted: true,
deletedAt: Date.now(),
previousEntryHash: existingHash,
};
await db.add(tombstone);
// Emit change event
events.emit('document:deleted', { collection, id, document: existingEntry });
logger.info(`Marked entry as deleted in feed ${collection} with id ${id}`);
return true;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error marking entry as deleted in feed ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to mark entry as deleted in feed ${collection}`,
error,
);
}
}
/**
* List all entries in a feed with pagination
* Note: This will only return the latest entry for each unique ID
*/
async list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await openStore(collection, StoreType.FEED, options);
// Use proper pagination instead of loading everything
const requestedLimit = options?.limit || 50;
const requestedOffset = options?.offset || 0;
// For feeds, we need to get more entries than requested since we'll filter duplicates
// Use a reasonable multiplier but cap it to prevent memory issues
const fetchLimit = requestedLimit === -1 ? -1 : Math.min(requestedLimit * 3, 1000);
// Get entries using proper iterator API with pagination
const entries = [];
let count = 0;
let skipped = 0;
for await (const entry of db.iterator({ limit: fetchLimit })) {
// Skip entries for offset
if (requestedOffset > 0 && skipped < requestedOffset) {
skipped++;
continue;
}
entries.push(entry);
count++;
// Break if we have enough entries and not requesting all
if (requestedLimit !== -1 && count >= fetchLimit) {
break;
}
}
// Group by ID and keep only the latest entry for each ID
// Also filter out tombstone entries
const latestEntries = new Map<string, any>();
for (const entry of entries) {
// Handle different possible entry structures
let value;
if (entry && entry.payload && entry.payload.value) {
value = entry.payload.value;
} else if (entry && entry.value) {
value = entry.value;
} else if (entry && typeof entry === 'object') {
value = entry;
} else {
continue;
}
if (!value || value.deleted) continue;
const id = value.id;
if (!id) continue;
// If we already have an entry with this ID, check which is newer
if (latestEntries.has(id)) {
const existing = latestEntries.get(id);
const existingTime = existing.value.updatedAt || existing.value.timestamp || 0;
const currentTime = value.updatedAt || value.timestamp || 0;
if (currentTime > existingTime) {
latestEntries.set(id, { hash: entry.hash, value });
}
} else {
latestEntries.set(id, { hash: entry.hash, value });
}
}
// Convert to array of documents
let documents = Array.from(latestEntries.values()).map((entry) => ({
...entry.value,
})) as T[];
// Sort if requested
if (options?.sort) {
const { field, order } = options.sort;
documents.sort((a, b) => {
const valueA = a[field];
const valueB = b[field];
// Handle different data types for sorting
if (typeof valueA === 'string' && typeof valueB === 'string') {
return order === 'asc' ? valueA.localeCompare(valueB) : valueB.localeCompare(valueA);
} else if (typeof valueA === 'number' && typeof valueB === 'number') {
return order === 'asc' ? valueA - valueB : valueB - valueA;
} else if (valueA instanceof Date && valueB instanceof Date) {
return order === 'asc'
? valueA.getTime() - valueB.getTime()
: valueB.getTime() - valueA.getTime();
}
// Default comparison for other types
return order === 'asc'
? String(valueA).localeCompare(String(valueB))
: String(valueB).localeCompare(String(valueA));
});
}
// Apply final pagination to the processed results
const total = documents.length;
const finalLimit = requestedLimit === -1 ? total : requestedLimit;
const paginatedDocuments = documents.slice(0, finalLimit);
const hasMore = documents.length > finalLimit;
return {
documents: paginatedDocuments,
total,
hasMore,
};
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error listing entries in feed ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to list entries in feed ${collection}`,
error,
);
}
}
/**
* Query entries in a feed with filtering and pagination
* Note: This queries the latest entry for each unique ID
*/
async query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await openStore(collection, StoreType.FEED, options);
// Get all entries using proper iterator API
const entries = [];
for await (const entry of db.iterator({ limit: -1 })) {
entries.push(entry);
}
// Group by ID and keep only the latest entry for each ID
// Also filter out tombstone entries
const latestEntries = new Map<string, any>();
for (const entry of entries) {
// Handle different possible entry structures
let value;
if (entry && entry.payload && entry.payload.value) {
value = entry.payload.value;
} else if (entry && entry.value) {
value = entry.value;
} else if (entry && typeof entry === 'object') {
value = entry;
} else {
continue;
}
if (!value || value.deleted) continue;
const id = value.id;
if (!id) continue;
// If we already have an entry with this ID, check which is newer
if (latestEntries.has(id)) {
const existing = latestEntries.get(id);
if (value.updatedAt > existing.value.updatedAt) {
latestEntries.set(id, { hash: entry.hash, value });
}
} else {
latestEntries.set(id, { hash: entry.hash, value });
}
}
// Convert to array of documents and apply filter
let filtered = Array.from(latestEntries.values())
.filter((entry) => filter(entry.value as T))
.map((entry) => ({
...entry.value,
})) as T[];
// Sort if requested
if (options?.sort) {
const { field, order } = options.sort;
filtered.sort((a, b) => {
const valueA = a[field];
const valueB = b[field];
// Handle different data types for sorting
if (typeof valueA === 'string' && typeof valueB === 'string') {
return order === 'asc' ? valueA.localeCompare(valueB) : valueB.localeCompare(valueA);
} else if (typeof valueA === 'number' && typeof valueB === 'number') {
return order === 'asc' ? valueA - valueB : valueB - valueA;
} else if (valueA instanceof Date && valueB instanceof Date) {
return order === 'asc'
? valueA.getTime() - valueB.getTime()
: valueB.getTime() - valueA.getTime();
}
// Default comparison for other types
return order === 'asc'
? String(valueA).localeCompare(String(valueB))
: String(valueB).localeCompare(String(valueA));
});
}
// Apply pagination
const total = filtered.length;
const offset = options?.offset || 0;
const limit = options?.limit || total;
const paginatedDocuments = filtered.slice(offset, offset + limit);
const hasMore = offset + limit < total;
return {
documents: paginatedDocuments,
total,
hasMore,
};
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error querying entries in feed ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to query entries in feed ${collection}`,
error,
);
}
}
/**
* Create an index for a collection - not supported for feeds
*/
async createIndex(collection: string, _field: string, _options?: StoreOptions): Promise<boolean> {
logger.warn(
`Index creation not supported for feed collections, ignoring request for ${collection}`,
);
return false;
}
}

View File

@ -1,181 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import { ErrorCode, StoreType, FileUploadResult, FileResult } from '../types';
import { DBError } from '../core/error';
import { openStore } from './baseStore';
import ipfsService, { getHelia } from '../../ipfs/ipfsService';
import { CreateResult, StoreOptions } from '../types';
async function readAsyncIterableToBuffer(
asyncIterable: AsyncIterable<Uint8Array>,
): Promise<Buffer> {
const chunks: Uint8Array[] = [];
for await (const chunk of asyncIterable) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
}
const logger = createServiceLogger('FILE_STORE');
/**
* Upload a file to IPFS
*/
export const uploadFile = async (
fileData: Buffer,
options?: {
filename?: string;
connectionId?: string;
metadata?: Record<string, any>;
},
): Promise<FileUploadResult> => {
try {
const ipfs = getHelia();
if (!ipfs) {
logger.error('IPFS instance not available - Helia is null or undefined');
// Try to check if IPFS service is running
try {
const heliaInstance = ipfsService.getHelia();
logger.error(
'IPFS Service getHelia() returned:',
heliaInstance ? 'instance available' : 'null/undefined',
);
} catch (importError) {
logger.error('Error importing IPFS service:', importError);
}
throw new DBError(ErrorCode.OPERATION_FAILED, 'IPFS instance not available');
}
logger.info(`Attempting to upload file with size: ${fileData.length} bytes`);
// Add to IPFS
const unixfs = await import('@helia/unixfs');
const fs = unixfs.unixfs(ipfs);
const cid = await fs.addBytes(fileData);
const cidStr = cid.toString();
// Store metadata
const filesDb = await openStore('_files', StoreType.KEYVALUE);
await filesDb.put(cidStr, {
filename: options?.filename,
size: fileData.length,
uploadedAt: Date.now(),
...options?.metadata,
});
logger.info(`Uploaded file with CID: ${cidStr}`);
return { cid: cidStr };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error('Error uploading file:', error);
throw new DBError(ErrorCode.OPERATION_FAILED, 'Failed to upload file', error);
}
};
/**
* Get a file from IPFS by CID
*/
export const getFile = async (cid: string): Promise<FileResult> => {
try {
const ipfs = getHelia();
if (!ipfs) {
throw new DBError(ErrorCode.OPERATION_FAILED, 'IPFS instance not available');
}
// Get from IPFS
const unixfs = await import('@helia/unixfs');
const fs = unixfs.unixfs(ipfs);
const { CID } = await import('multiformats/cid');
const resolvedCid = CID.parse(cid);
try {
// Convert AsyncIterable to Buffer
const bytes = await readAsyncIterableToBuffer(fs.cat(resolvedCid));
// Get metadata if available
let metadata = null;
try {
const filesDb = await openStore('_files', StoreType.KEYVALUE);
metadata = await filesDb.get(cid);
} catch (_err) {
// Metadata might not exist, continue without it
}
return { data: bytes, metadata };
} catch (error) {
throw new DBError(ErrorCode.FILE_NOT_FOUND, `File with CID ${cid} not found`, error);
}
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error getting file with CID ${cid}:`, error);
throw new DBError(ErrorCode.OPERATION_FAILED, `Failed to get file with CID ${cid}`, error);
}
};
/**
* Delete a file from IPFS by CID
*/
export const deleteFile = async (cid: string): Promise<boolean> => {
try {
// Delete metadata
try {
const filesDb = await openStore('_files', StoreType.KEYVALUE);
await filesDb.del(cid);
} catch (_err) {
// Ignore if metadata doesn't exist
}
logger.info(`Deleted file with CID: ${cid}`);
return true;
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error deleting file with CID ${cid}:`, error);
throw new DBError(ErrorCode.OPERATION_FAILED, `Failed to delete file with CID ${cid}`, error);
}
};
export const create = async <T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: StoreOptions,
): Promise<CreateResult> => {
try {
const db = await openStore(collection, StoreType.KEYVALUE, options);
// Prepare document for storage with ID
// const document = {
// id,
// ...prepareDocument<T>(collection, data)
// };
const document = { id, ...data };
// Add to database
const hash = await db.add(document);
// Emit change event
// events.emit('document:created', { collection, id, document, hash });
logger.info(`Created entry in file ${collection} with id ${id} and hash ${hash}`);
return { id, hash };
} catch (error: unknown) {
if (error instanceof DBError) {
throw error;
}
logger.error(`Error creating entry in file ${collection}:`, error);
throw new DBError(
ErrorCode.OPERATION_FAILED,
`Failed to create entry in file ${collection}`,
error,
);
}
};

View File

@ -1,136 +0,0 @@
import { StoreType, StoreOptions, PaginatedResult, QueryOptions, ListOptions } from '../types';
import { AbstractStore } from './abstractStore';
import { DBError, ErrorCode } from '../core/error';
/**
* KeyValue Store implementation using the AbstractStore base class
*/
export class KeyValueStore extends AbstractStore {
constructor() {
super(StoreType.KEYVALUE);
}
protected getLoggerName(): string {
return 'KEYVALUE_STORE';
}
/**
* Implementation for the KeyValue store create operation
*/
protected async performCreate(db: any, id: string, document: any): Promise<string> {
return await db.put(id, document);
}
/**
* Implementation for the KeyValue store get operation
*/
protected async performGet<T>(db: any, id: string): Promise<T | null> {
return (await db.get(id)) as T | null;
}
/**
* Implementation for the KeyValue store update operation
*/
protected async performUpdate(db: any, id: string, document: any): Promise<string> {
return await db.put(id, document);
}
/**
* Implementation for the KeyValue store remove operation
*/
protected async performRemove(db: any, id: string): Promise<void> {
await db.del(id);
}
/**
* List all documents in a collection with pagination
*/
async list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await this.openStore(collection, options);
const all = await db.all();
// Convert the key-value pairs to an array of documents with IDs
let documents = Object.entries(all).map(([key, value]) => ({
id: key,
...(value as any),
})) as T[];
// Apply sorting
documents = this.applySorting(documents, options);
// Apply pagination
return this.applyPagination(documents, options);
} catch (error) {
this.handleError(`Error listing documents in ${collection}`, error);
}
}
/**
* Query documents in a collection with filtering and pagination
*/
async query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>> {
try {
const db = await this.openStore(collection, options);
const all = await db.all();
// Apply filter
let filtered = Object.entries(all)
.filter(([_, value]) => filter(value as T))
.map(([key, value]) => ({
id: key,
...(value as any),
})) as T[];
// Apply sorting
filtered = this.applySorting(filtered, options);
// Apply pagination
return this.applyPagination(filtered, options);
} catch (error) {
this.handleError(`Error querying documents in ${collection}`, error);
}
}
/**
* Create an index for a collection to speed up queries
*/
async createIndex(collection: string, field: string): Promise<boolean> {
try {
// KeyValueStore doesn't support real indexing - this is just a placeholder
this.logger.info(
`Index created on ${field} for collection ${collection} (not supported in KeyValueStore)`,
);
return true;
} catch (error) {
this.handleError(`Error creating index for ${collection}`, error);
}
}
/**
* Helper to open a store of the correct type
*/
private async openStore(collection: string, options?: StoreOptions): Promise<any> {
const { openStore } = await import('./baseStore');
return await openStore(collection, this.storeType, options);
}
/**
* Helper to handle errors consistently
*/
private handleError(message: string, error: any): never {
if (error instanceof DBError) {
throw error;
}
this.logger.error(`${message}:`, error);
throw new DBError(ErrorCode.OPERATION_FAILED, `${message}: ${error.message}`, error);
}
}

View File

@ -1,48 +0,0 @@
import { createServiceLogger } from '../../utils/logger';
import { StoreType, ErrorCode } from '../types';
import { DBError } from '../core/error';
import { BaseStore } from './baseStore';
import { KeyValueStore } from './keyValueStore';
import { DocStore } from './docStore';
import { FeedStore } from './feedStore';
import { CounterStore } from './counterStore';
const logger = createServiceLogger('STORE_FACTORY');
// Initialize instances for each store type - singleton pattern
const storeInstances = new Map<StoreType, BaseStore>();
// Store type mapping to implementations
const storeImplementations = {
[StoreType.KEYVALUE]: KeyValueStore,
[StoreType.DOCSTORE]: DocStore,
[StoreType.FEED]: FeedStore,
[StoreType.EVENTLOG]: FeedStore, // Alias for feed
[StoreType.COUNTER]: CounterStore,
};
/**
* Get a store instance by type (factory and singleton pattern)
*/
export function getStore(type: StoreType): BaseStore {
// Return cached instance if available (singleton pattern)
if (storeInstances.has(type)) {
return storeInstances.get(type)!;
}
// Get the store implementation class
const StoreClass = storeImplementations[type];
if (!StoreClass) {
logger.error(`Unsupported store type: ${type}`);
throw new DBError(ErrorCode.STORE_TYPE_ERROR, `Unsupported store type: ${type}`);
}
// Create a new instance of the store
const store = new StoreClass();
// Cache the instance for future use
storeInstances.set(type, store);
return store;
}

View File

@ -1,71 +0,0 @@
// Transaction operation type
interface TransactionOperation {
type: 'create' | 'update' | 'delete';
collection: string;
id: string;
data?: any;
}
/**
* Transaction object for batching operations
*/
export class Transaction {
private operations: TransactionOperation[] = [];
private connectionId?: string;
constructor(connectionId?: string) {
this.connectionId = connectionId;
}
/**
* Add a create operation to the transaction
*/
create<T>(collection: string, id: string, data: T): Transaction {
this.operations.push({
type: 'create',
collection,
id,
data,
});
return this;
}
/**
* Add an update operation to the transaction
*/
update<T>(collection: string, id: string, data: Partial<T>): Transaction {
this.operations.push({
type: 'update',
collection,
id,
data,
});
return this;
}
/**
* Add a delete operation to the transaction
*/
delete(collection: string, id: string): Transaction {
this.operations.push({
type: 'delete',
collection,
id,
});
return this;
}
/**
* Get all operations in this transaction
*/
getOperations(): TransactionOperation[] {
return [...this.operations];
}
/**
* Get connection ID for this transaction
*/
getConnectionId(): string | undefined {
return this.connectionId;
}
}

View File

@ -1,151 +0,0 @@
// Common types for database operations
import { EventEmitter } from 'events';
import { Transaction } from '../transactions/transactionService';
export type { Transaction };
// Resource locking for concurrent operations
const locks = new Map<string, boolean>();
export const acquireLock = (resourceId: string): boolean => {
if (locks.has(resourceId)) {
return false;
}
locks.set(resourceId, true);
return true;
};
export const releaseLock = (resourceId: string): void => {
locks.delete(resourceId);
};
export const isLocked = (resourceId: string): boolean => {
return locks.has(resourceId);
};
// Database Types
export enum StoreType {
KEYVALUE = 'keyvalue',
DOCSTORE = 'documents',
FEED = 'feed',
EVENTLOG = 'events',
COUNTER = 'counter',
}
// Common result types
export interface CreateResult {
id: string;
hash: string;
}
export interface UpdateResult {
id: string;
hash: string;
}
export interface FileUploadResult {
cid: string;
}
export interface FileMetadata {
filename?: string;
size: number;
uploadedAt: number;
[key: string]: any;
}
export interface FileResult {
data: Buffer;
metadata: FileMetadata | null;
}
export interface PaginatedResult<T> {
documents: T[];
total: number;
hasMore: boolean;
}
// Define error codes
export enum ErrorCode {
NOT_INITIALIZED = 'ERR_NOT_INITIALIZED',
INITIALIZATION_FAILED = 'ERR_INIT_FAILED',
DOCUMENT_NOT_FOUND = 'ERR_DOC_NOT_FOUND',
INVALID_SCHEMA = 'ERR_INVALID_SCHEMA',
OPERATION_FAILED = 'ERR_OPERATION_FAILED',
TRANSACTION_FAILED = 'ERR_TRANSACTION_FAILED',
FILE_NOT_FOUND = 'ERR_FILE_NOT_FOUND',
INVALID_PARAMETERS = 'ERR_INVALID_PARAMS',
CONNECTION_ERROR = 'ERR_CONNECTION',
STORE_TYPE_ERROR = 'ERR_STORE_TYPE',
}
// Connection pool interface
export interface DBConnection {
ipfs: any;
orbitdb: any;
timestamp: number;
isActive: boolean;
}
// Schema validation
export interface SchemaDefinition {
type: string;
required?: boolean;
pattern?: string;
min?: number;
max?: number;
enum?: any[];
items?: SchemaDefinition; // For arrays
properties?: Record<string, SchemaDefinition>; // For objects
}
export interface CollectionSchema {
properties: Record<string, SchemaDefinition>;
required?: string[];
}
// Metrics tracking
export interface Metrics {
operations: {
creates: number;
reads: number;
updates: number;
deletes: number;
queries: number;
fileUploads: number;
fileDownloads: number;
};
performance: {
totalOperationTime: number;
operationCount: number;
averageOperationTime?: number;
};
errors: {
count: number;
byCode: Record<string, number>;
};
cacheStats: {
hits: number;
misses: number;
};
startTime: number;
}
// Store options
export interface ListOptions {
limit?: number;
offset?: number;
connectionId?: string;
sort?: { field: string; order: 'asc' | 'desc' };
}
export interface QueryOptions extends ListOptions {
indexBy?: string;
}
export interface StoreOptions {
connectionId?: string;
}
// Event bus for database events
export const dbEvents = new EventEmitter();

View File

@ -0,0 +1,767 @@
/**
* DebrosFramework - Main Framework Class
*
* This is the primary entry point for the DebrosFramework, providing a unified
* API that integrates all framework components:
* - Model system with decorators and validation
* - Database management and sharding
* - Query system with optimization
* - Relationship management with lazy/eager loading
* - Automatic pinning and PubSub features
* - Migration system for schema evolution
* - Configuration and lifecycle management
*/
import { BaseModel } from './models/BaseModel';
import { ModelRegistry } from './core/ModelRegistry';
import { DatabaseManager } from './core/DatabaseManager';
import { ShardManager } from './sharding/ShardManager';
import { ConfigManager } from './core/ConfigManager';
import { FrameworkOrbitDBService, FrameworkIPFSService } from './services/OrbitDBService';
import { QueryCache } from './query/QueryCache';
import { RelationshipManager } from './relationships/RelationshipManager';
import { PinningManager } from './pinning/PinningManager';
import { PubSubManager } from './pubsub/PubSubManager';
import { MigrationManager } from './migrations/MigrationManager';
import { FrameworkConfig } from './types/framework';
export interface DebrosFrameworkConfig extends FrameworkConfig {
// Environment settings
environment?: 'development' | 'production' | 'test';
// Service configurations
orbitdb?: {
directory?: string;
options?: any;
};
ipfs?: {
config?: any;
options?: any;
};
// Feature toggles
features?: {
autoMigration?: boolean;
automaticPinning?: boolean;
pubsub?: boolean;
queryCache?: boolean;
relationshipCache?: boolean;
};
// Performance settings
performance?: {
queryTimeout?: number;
migrationTimeout?: number;
maxConcurrentOperations?: number;
batchSize?: number;
};
// Monitoring and logging
monitoring?: {
enableMetrics?: boolean;
logLevel?: 'error' | 'warn' | 'info' | 'debug';
metricsInterval?: number;
};
}
export interface FrameworkMetrics {
uptime: number;
totalModels: number;
totalDatabases: number;
totalShards: number;
queriesExecuted: number;
migrationsRun: number;
cacheHitRate: number;
averageQueryTime: number;
memoryUsage: {
queryCache: number;
relationshipCache: number;
total: number;
};
performance: {
slowQueries: number;
failedOperations: number;
averageResponseTime: number;
};
}
export interface FrameworkStatus {
initialized: boolean;
healthy: boolean;
version: string;
environment: string;
services: {
orbitdb: 'connected' | 'disconnected' | 'error';
ipfs: 'connected' | 'disconnected' | 'error';
pinning: 'active' | 'inactive' | 'error';
pubsub: 'active' | 'inactive' | 'error';
};
lastHealthCheck: number;
}
export class DebrosFramework {
private config: DebrosFrameworkConfig;
private configManager: ConfigManager;
// Core services
private orbitDBService: FrameworkOrbitDBService | null = null;
private ipfsService: FrameworkIPFSService | null = null;
// Framework components
private databaseManager: DatabaseManager | null = null;
private shardManager: ShardManager | null = null;
private queryCache: QueryCache | null = null;
private relationshipManager: RelationshipManager | null = null;
private pinningManager: PinningManager | null = null;
private pubsubManager: PubSubManager | null = null;
private migrationManager: MigrationManager | null = null;
// Framework state
private initialized: boolean = false;
private startTime: number = 0;
private healthCheckInterval: any = null;
private metricsCollector: any = null;
private status: FrameworkStatus;
private metrics: FrameworkMetrics;
constructor(config: DebrosFrameworkConfig = {}) {
this.config = this.mergeDefaultConfig(config);
this.configManager = new ConfigManager(this.config);
this.status = {
initialized: false,
healthy: false,
version: '1.0.0', // This would come from package.json
environment: this.config.environment || 'development',
services: {
orbitdb: 'disconnected',
ipfs: 'disconnected',
pinning: 'inactive',
pubsub: 'inactive',
},
lastHealthCheck: 0,
};
this.metrics = {
uptime: 0,
totalModels: 0,
totalDatabases: 0,
totalShards: 0,
queriesExecuted: 0,
migrationsRun: 0,
cacheHitRate: 0,
averageQueryTime: 0,
memoryUsage: {
queryCache: 0,
relationshipCache: 0,
total: 0,
},
performance: {
slowQueries: 0,
failedOperations: 0,
averageResponseTime: 0,
},
};
}
// Main initialization method
async initialize(
existingOrbitDBService?: any,
existingIPFSService?: any,
overrideConfig?: Partial<DebrosFrameworkConfig>,
): Promise<void> {
if (this.initialized) {
throw new Error('Framework is already initialized');
}
try {
this.startTime = Date.now();
console.log('🚀 Initializing DebrosFramework...');
// Apply config overrides
if (overrideConfig) {
this.config = { ...this.config, ...overrideConfig };
this.configManager = new ConfigManager(this.config);
}
// Initialize services
await this.initializeServices(existingOrbitDBService, existingIPFSService);
// Initialize core components
await this.initializeCoreComponents();
// Initialize feature components
await this.initializeFeatureComponents();
// Setup global framework access
this.setupGlobalAccess();
// Start background processes
await this.startBackgroundProcesses();
// Run automatic migrations if enabled
if (this.config.features?.autoMigration && this.migrationManager) {
await this.runAutomaticMigrations();
}
this.initialized = true;
this.status.initialized = true;
this.status.healthy = true;
console.log('✅ DebrosFramework initialized successfully');
this.logFrameworkInfo();
} catch (error) {
console.error('❌ Framework initialization failed:', error);
await this.cleanup();
throw error;
}
}
// Service initialization
private async initializeServices(
existingOrbitDBService?: any,
existingIPFSService?: any,
): Promise<void> {
console.log('📡 Initializing core services...');
try {
// Initialize IPFS service
if (existingIPFSService) {
this.ipfsService = new FrameworkIPFSService(existingIPFSService);
} else {
// In a real implementation, create IPFS instance
throw new Error('IPFS service is required. Please provide an existing IPFS instance.');
}
await this.ipfsService.init();
this.status.services.ipfs = 'connected';
console.log('✅ IPFS service initialized');
// Initialize OrbitDB service
if (existingOrbitDBService) {
this.orbitDBService = new FrameworkOrbitDBService(existingOrbitDBService);
} else {
// In a real implementation, create OrbitDB instance
throw new Error(
'OrbitDB service is required. Please provide an existing OrbitDB instance.',
);
}
await this.orbitDBService.init();
this.status.services.orbitdb = 'connected';
console.log('✅ OrbitDB service initialized');
} catch (error) {
this.status.services.ipfs = 'error';
this.status.services.orbitdb = 'error';
throw new Error(`Service initialization failed: ${error}`);
}
}
// Core component initialization
private async initializeCoreComponents(): Promise<void> {
console.log('🔧 Initializing core components...');
// Database Manager
this.databaseManager = new DatabaseManager(this.orbitDBService!);
await this.databaseManager.initializeAllDatabases();
console.log('✅ DatabaseManager initialized');
// Shard Manager
this.shardManager = new ShardManager();
this.shardManager.setOrbitDBService(this.orbitDBService!);
// Initialize shards for registered models
const globalModels = ModelRegistry.getGlobalModels();
for (const model of globalModels) {
if (model.sharding) {
await this.shardManager.createShards(model.modelName, model.sharding, model.dbType);
}
}
console.log('✅ ShardManager initialized');
// Query Cache
if (this.config.features?.queryCache !== false) {
const cacheConfig = this.configManager.cacheConfig;
this.queryCache = new QueryCache(cacheConfig?.maxSize || 1000, cacheConfig?.ttl || 300000);
console.log('✅ QueryCache initialized');
}
// Relationship Manager
this.relationshipManager = new RelationshipManager({
databaseManager: this.databaseManager,
shardManager: this.shardManager,
queryCache: this.queryCache,
});
console.log('✅ RelationshipManager initialized');
}
// Feature component initialization
private async initializeFeatureComponents(): Promise<void> {
console.log('🎛️ Initializing feature components...');
// Pinning Manager
if (this.config.features?.automaticPinning !== false) {
this.pinningManager = new PinningManager(this.ipfsService!.getHelia(), {
maxTotalPins: this.config.performance?.maxConcurrentOperations || 10000,
cleanupIntervalMs: 60000,
});
// Setup default pinning rules based on config
if (this.config.defaultPinning) {
const globalModels = ModelRegistry.getGlobalModels();
for (const model of globalModels) {
this.pinningManager.setPinningRule(model.modelName, this.config.defaultPinning);
}
}
this.status.services.pinning = 'active';
console.log('✅ PinningManager initialized');
}
// PubSub Manager
if (this.config.features?.pubsub !== false) {
this.pubsubManager = new PubSubManager(this.ipfsService!.getHelia(), {
enabled: true,
autoPublishModelEvents: true,
autoPublishDatabaseEvents: true,
topicPrefix: `debros-${this.config.environment || 'dev'}`,
});
await this.pubsubManager.initialize();
this.status.services.pubsub = 'active';
console.log('✅ PubSubManager initialized');
}
// Migration Manager
this.migrationManager = new MigrationManager(
this.databaseManager,
this.shardManager,
this.createMigrationLogger(),
);
console.log('✅ MigrationManager initialized');
}
// Setup global framework access for models
private setupGlobalAccess(): void {
(globalThis as any).__debrosFramework = {
databaseManager: this.databaseManager,
shardManager: this.shardManager,
configManager: this.configManager,
queryCache: this.queryCache,
relationshipManager: this.relationshipManager,
pinningManager: this.pinningManager,
pubsubManager: this.pubsubManager,
migrationManager: this.migrationManager,
framework: this,
};
}
// Start background processes
private async startBackgroundProcesses(): Promise<void> {
console.log('⚙️ Starting background processes...');
// Health check interval
this.healthCheckInterval = setInterval(() => {
this.performHealthCheck();
}, 30000); // Every 30 seconds
// Metrics collection
if (this.config.monitoring?.enableMetrics !== false) {
this.metricsCollector = setInterval(() => {
this.collectMetrics();
}, this.config.monitoring?.metricsInterval || 60000); // Every minute
}
console.log('✅ Background processes started');
}
// Automatic migration execution
private async runAutomaticMigrations(): Promise<void> {
if (!this.migrationManager) return;
try {
console.log('🔄 Running automatic migrations...');
const pendingMigrations = this.migrationManager.getPendingMigrations();
if (pendingMigrations.length > 0) {
console.log(`Found ${pendingMigrations.length} pending migrations`);
const results = await this.migrationManager.runPendingMigrations({
stopOnError: true,
batchSize: this.config.performance?.batchSize || 100,
});
const successful = results.filter((r) => r.success).length;
console.log(`✅ Completed ${successful}/${results.length} migrations`);
this.metrics.migrationsRun += successful;
} else {
console.log('No pending migrations found');
}
} catch (error) {
console.error('❌ Automatic migration failed:', error);
if (this.config.environment === 'production') {
// In production, don't fail initialization due to migration errors
console.warn('Continuing initialization despite migration failure');
} else {
throw error;
}
}
}
// Public API methods
// Model registration
registerModel(modelClass: typeof BaseModel, config?: any): void {
ModelRegistry.register(modelClass.name, modelClass, config || {});
console.log(`📝 Registered model: ${modelClass.name}`);
this.metrics.totalModels = ModelRegistry.getModelNames().length;
}
// Get model instance
getModel(modelName: string): typeof BaseModel | null {
return ModelRegistry.get(modelName) || null;
}
// Database operations
async createUserDatabase(userId: string): Promise<void> {
if (!this.databaseManager) {
throw new Error('Framework not initialized');
}
await this.databaseManager.createUserDatabases(userId);
this.metrics.totalDatabases++;
}
async getUserDatabase(userId: string, modelName: string): Promise<any> {
if (!this.databaseManager) {
throw new Error('Framework not initialized');
}
return await this.databaseManager.getUserDatabase(userId, modelName);
}
async getGlobalDatabase(modelName: string): Promise<any> {
if (!this.databaseManager) {
throw new Error('Framework not initialized');
}
return await this.databaseManager.getGlobalDatabase(modelName);
}
// Migration operations
async runMigration(migrationId: string, options?: any): Promise<any> {
if (!this.migrationManager) {
throw new Error('MigrationManager not initialized');
}
const result = await this.migrationManager.runMigration(migrationId, options);
this.metrics.migrationsRun++;
return result;
}
async registerMigration(migration: any): Promise<void> {
if (!this.migrationManager) {
throw new Error('MigrationManager not initialized');
}
this.migrationManager.registerMigration(migration);
}
getPendingMigrations(modelName?: string): any[] {
if (!this.migrationManager) {
return [];
}
return this.migrationManager.getPendingMigrations(modelName);
}
// Cache management
clearQueryCache(): void {
if (this.queryCache) {
this.queryCache.clear();
}
}
clearRelationshipCache(): void {
if (this.relationshipManager) {
this.relationshipManager.clearRelationshipCache();
}
}
async warmupCaches(): Promise<void> {
console.log('🔥 Warming up caches...');
if (this.queryCache) {
// Warm up common queries
const commonQueries: any[] = []; // Would be populated with actual queries
await this.queryCache.warmup(commonQueries);
}
if (this.relationshipManager && this.pinningManager) {
// Warm up relationship cache for popular content
// Implementation would depend on actual models
}
console.log('✅ Cache warmup completed');
}
// Health and monitoring
performHealthCheck(): void {
try {
this.status.lastHealthCheck = Date.now();
// Check service health
this.status.services.orbitdb = this.orbitDBService ? 'connected' : 'disconnected';
this.status.services.ipfs = this.ipfsService ? 'connected' : 'disconnected';
this.status.services.pinning = this.pinningManager ? 'active' : 'inactive';
this.status.services.pubsub = this.pubsubManager ? 'active' : 'inactive';
// Overall health check
const allServicesHealthy = Object.values(this.status.services).every(
(status) => status === 'connected' || status === 'active',
);
this.status.healthy = this.initialized && allServicesHealthy;
} catch (error) {
console.error('Health check failed:', error);
this.status.healthy = false;
}
}
collectMetrics(): void {
try {
this.metrics.uptime = Date.now() - this.startTime;
this.metrics.totalModels = ModelRegistry.getModelNames().length;
if (this.queryCache) {
const cacheStats = this.queryCache.getStats();
this.metrics.cacheHitRate = cacheStats.hitRate;
this.metrics.averageQueryTime = 0; // Would need to be calculated from cache stats
this.metrics.memoryUsage.queryCache = cacheStats.size * 1024; // Estimate
}
if (this.relationshipManager) {
const relStats = this.relationshipManager.getRelationshipCacheStats();
this.metrics.memoryUsage.relationshipCache = relStats.cache.memoryUsage;
}
this.metrics.memoryUsage.total =
this.metrics.memoryUsage.queryCache + this.metrics.memoryUsage.relationshipCache;
} catch (error) {
console.error('Metrics collection failed:', error);
}
}
getStatus(): FrameworkStatus {
return { ...this.status };
}
getMetrics(): FrameworkMetrics {
this.collectMetrics(); // Ensure fresh metrics
return { ...this.metrics };
}
getConfig(): DebrosFrameworkConfig {
return { ...this.config };
}
// Component access
getDatabaseManager(): DatabaseManager | null {
return this.databaseManager;
}
getShardManager(): ShardManager | null {
return this.shardManager;
}
getRelationshipManager(): RelationshipManager | null {
return this.relationshipManager;
}
getPinningManager(): PinningManager | null {
return this.pinningManager;
}
getPubSubManager(): PubSubManager | null {
return this.pubsubManager;
}
getMigrationManager(): MigrationManager | null {
return this.migrationManager;
}
// Framework lifecycle
async stop(): Promise<void> {
if (!this.initialized) {
return;
}
console.log('🛑 Stopping DebrosFramework...');
try {
await this.cleanup();
this.initialized = false;
this.status.initialized = false;
this.status.healthy = false;
console.log('✅ DebrosFramework stopped successfully');
} catch (error) {
console.error('❌ Error during framework shutdown:', error);
throw error;
}
}
async restart(newConfig?: Partial<DebrosFrameworkConfig>): Promise<void> {
console.log('🔄 Restarting DebrosFramework...');
const orbitDB = this.orbitDBService?.getOrbitDB();
const ipfs = this.ipfsService?.getHelia();
await this.stop();
if (newConfig) {
this.config = { ...this.config, ...newConfig };
}
await this.initialize(orbitDB, ipfs);
}
// Cleanup method
private async cleanup(): Promise<void> {
// Stop background processes
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
}
if (this.metricsCollector) {
clearInterval(this.metricsCollector);
this.metricsCollector = null;
}
// Cleanup components
if (this.pubsubManager) {
await this.pubsubManager.shutdown();
}
if (this.pinningManager) {
await this.pinningManager.shutdown();
}
if (this.migrationManager) {
await this.migrationManager.cleanup();
}
if (this.queryCache) {
this.queryCache.clear();
}
if (this.relationshipManager) {
this.relationshipManager.clearRelationshipCache();
}
if (this.databaseManager) {
await this.databaseManager.stop();
}
if (this.shardManager) {
await this.shardManager.stop();
}
// Clear global access
delete (globalThis as any).__debrosFramework;
}
// Utility methods
private mergeDefaultConfig(config: DebrosFrameworkConfig): DebrosFrameworkConfig {
return {
environment: 'development',
features: {
autoMigration: true,
automaticPinning: true,
pubsub: true,
queryCache: true,
relationshipCache: true,
},
performance: {
queryTimeout: 30000,
migrationTimeout: 300000,
maxConcurrentOperations: 100,
batchSize: 100,
},
monitoring: {
enableMetrics: true,
logLevel: 'info',
metricsInterval: 60000,
},
...config,
};
}
private createMigrationLogger(): any {
const logLevel = this.config.monitoring?.logLevel || 'info';
return {
info: (message: string, meta?: any) => {
if (['info', 'debug'].includes(logLevel)) {
console.log(`[MIGRATION INFO] ${message}`, meta || '');
}
},
warn: (message: string, meta?: any) => {
if (['warn', 'info', 'debug'].includes(logLevel)) {
console.warn(`[MIGRATION WARN] ${message}`, meta || '');
}
},
error: (message: string, meta?: any) => {
console.error(`[MIGRATION ERROR] ${message}`, meta || '');
},
debug: (message: string, meta?: any) => {
if (logLevel === 'debug') {
console.log(`[MIGRATION DEBUG] ${message}`, meta || '');
}
},
};
}
private logFrameworkInfo(): void {
console.log('\n📋 DebrosFramework Information:');
console.log('==============================');
console.log(`Version: ${this.status.version}`);
console.log(`Environment: ${this.status.environment}`);
console.log(`Models registered: ${this.metrics.totalModels}`);
console.log(
`Services: ${Object.entries(this.status.services)
.map(([name, status]) => `${name}:${status}`)
.join(', ')}`,
);
console.log(
`Features enabled: ${Object.entries(this.config.features || {})
.filter(([, enabled]) => enabled)
.map(([feature]) => feature)
.join(', ')}`,
);
console.log('');
}
// Static factory methods
static async create(config: DebrosFrameworkConfig = {}): Promise<DebrosFramework> {
const framework = new DebrosFramework(config);
return framework;
}
static async createWithServices(
orbitDBService: any,
ipfsService: any,
config: DebrosFrameworkConfig = {},
): Promise<DebrosFramework> {
const framework = new DebrosFramework(config);
await framework.initialize(orbitDBService, ipfsService);
return framework;
}
}
// Export the main framework class as default
export default DebrosFramework;

View File

@ -0,0 +1,197 @@
import { FrameworkConfig, CacheConfig, PinningConfig } from '../types/framework';
export interface DatabaseConfig {
userDirectoryShards?: number;
defaultGlobalShards?: number;
cacheSize?: number;
}
export interface ExtendedFrameworkConfig extends FrameworkConfig {
database?: DatabaseConfig;
debug?: boolean;
logLevel?: 'error' | 'warn' | 'info' | 'debug';
}
export class ConfigManager {
private config: ExtendedFrameworkConfig;
private defaults: ExtendedFrameworkConfig = {
cache: {
enabled: true,
maxSize: 1000,
ttl: 300000, // 5 minutes
},
defaultPinning: {
strategy: 'fixed' as const,
factor: 2,
},
database: {
userDirectoryShards: 4,
defaultGlobalShards: 8,
cacheSize: 100,
},
autoMigration: true,
debug: false,
logLevel: 'info',
};
constructor(config: ExtendedFrameworkConfig = {}) {
this.config = this.mergeWithDefaults(config);
this.validateConfig();
}
private mergeWithDefaults(config: ExtendedFrameworkConfig): ExtendedFrameworkConfig {
return {
...this.defaults,
...config,
cache: {
...this.defaults.cache,
...config.cache,
},
defaultPinning: {
...this.defaults.defaultPinning,
...(config.defaultPinning || {}),
},
database: {
...this.defaults.database,
...config.database,
},
};
}
private validateConfig(): void {
// Validate cache configuration
if (this.config.cache) {
if (this.config.cache.maxSize && this.config.cache.maxSize < 1) {
throw new Error('Cache maxSize must be at least 1');
}
if (this.config.cache.ttl && this.config.cache.ttl < 1000) {
throw new Error('Cache TTL must be at least 1000ms');
}
}
// Validate pinning configuration
if (this.config.defaultPinning) {
if (this.config.defaultPinning.factor && this.config.defaultPinning.factor < 1) {
throw new Error('Pinning factor must be at least 1');
}
}
// Validate database configuration
if (this.config.database) {
if (
this.config.database.userDirectoryShards &&
this.config.database.userDirectoryShards < 1
) {
throw new Error('User directory shards must be at least 1');
}
if (
this.config.database.defaultGlobalShards &&
this.config.database.defaultGlobalShards < 1
) {
throw new Error('Default global shards must be at least 1');
}
}
}
// Getters for configuration values
get cacheConfig(): CacheConfig | undefined {
return this.config.cache;
}
get defaultPinningConfig(): PinningConfig | undefined {
return this.config.defaultPinning;
}
get databaseConfig(): DatabaseConfig | undefined {
return this.config.database;
}
get autoMigration(): boolean {
return this.config.autoMigration || false;
}
get debug(): boolean {
return this.config.debug || false;
}
get logLevel(): string {
return this.config.logLevel || 'info';
}
// Update configuration at runtime
updateConfig(newConfig: Partial<ExtendedFrameworkConfig>): void {
this.config = this.mergeWithDefaults({
...this.config,
...newConfig,
});
this.validateConfig();
}
// Get full configuration
getConfig(): ExtendedFrameworkConfig {
return { ...this.config };
}
// Configuration presets
static developmentConfig(): ExtendedFrameworkConfig {
return {
debug: true,
logLevel: 'debug',
cache: {
enabled: true,
maxSize: 100,
ttl: 60000, // 1 minute for development
},
database: {
userDirectoryShards: 2,
defaultGlobalShards: 2,
cacheSize: 50,
},
defaultPinning: {
strategy: 'fixed' as const,
factor: 1, // Minimal pinning for development
},
};
}
static productionConfig(): ExtendedFrameworkConfig {
return {
debug: false,
logLevel: 'warn',
cache: {
enabled: true,
maxSize: 10000,
ttl: 600000, // 10 minutes
},
database: {
userDirectoryShards: 16,
defaultGlobalShards: 32,
cacheSize: 1000,
},
defaultPinning: {
strategy: 'popularity' as const,
factor: 5, // Higher redundancy for production
},
};
}
static testConfig(): ExtendedFrameworkConfig {
return {
debug: true,
logLevel: 'error', // Minimal logging during tests
cache: {
enabled: false, // Disable caching for predictable tests
},
database: {
userDirectoryShards: 1,
defaultGlobalShards: 1,
cacheSize: 10,
},
defaultPinning: {
strategy: 'fixed',
factor: 1,
},
autoMigration: false, // Manual migration control in tests
};
}
}

View File

@ -0,0 +1,368 @@
import { ModelRegistry } from './ModelRegistry';
import { FrameworkOrbitDBService } from '../services/OrbitDBService';
import { StoreType } from '../types/framework';
import { UserMappings } from '../types/models';
export class UserMappingsData implements UserMappings {
constructor(
public userId: string,
public databases: Record<string, string>,
) {}
}
export class DatabaseManager {
private orbitDBService: FrameworkOrbitDBService;
private databases: Map<string, any> = new Map();
private userMappings: Map<string, any> = new Map();
private globalDatabases: Map<string, any> = new Map();
private globalDirectoryShards: any[] = [];
private initialized: boolean = false;
constructor(orbitDBService: FrameworkOrbitDBService) {
this.orbitDBService = orbitDBService;
}
async initializeAllDatabases(): Promise<void> {
if (this.initialized) {
return;
}
console.log('🚀 Initializing DebrosFramework databases...');
// Initialize global databases first
await this.initializeGlobalDatabases();
// Initialize system databases (user directory, etc.)
await this.initializeSystemDatabases();
this.initialized = true;
console.log('✅ Database initialization complete');
}
private async initializeGlobalDatabases(): Promise<void> {
const globalModels = ModelRegistry.getGlobalModels();
console.log(`📊 Creating ${globalModels.length} global databases...`);
for (const model of globalModels) {
const dbName = `global-${model.modelName.toLowerCase()}`;
try {
const db = await this.createDatabase(dbName, model.dbType, 'global');
this.globalDatabases.set(model.modelName, db);
console.log(`✓ Created global database: ${dbName} (${model.dbType})`);
} catch (error) {
console.error(`❌ Failed to create global database ${dbName}:`, error);
throw error;
}
}
}
private async initializeSystemDatabases(): Promise<void> {
console.log('🔧 Creating system databases...');
// Create global user directory shards
const DIRECTORY_SHARD_COUNT = 4; // Configurable
for (let i = 0; i < DIRECTORY_SHARD_COUNT; i++) {
const shardName = `global-user-directory-shard-${i}`;
try {
const shard = await this.createDatabase(shardName, 'keyvalue', 'system');
this.globalDirectoryShards.push(shard);
console.log(`✓ Created directory shard: ${shardName}`);
} catch (error) {
console.error(`❌ Failed to create directory shard ${shardName}:`, error);
throw error;
}
}
console.log(`✅ Created ${this.globalDirectoryShards.length} directory shards`);
}
async createUserDatabases(userId: string): Promise<UserMappingsData> {
console.log(`👤 Creating databases for user: ${userId}`);
const userScopedModels = ModelRegistry.getUserScopedModels();
const databases: Record<string, string> = {};
// Create mappings database first
const mappingsDBName = `${userId}-mappings`;
const mappingsDB = await this.createDatabase(mappingsDBName, 'keyvalue', 'user');
// Create database for each user-scoped model
for (const model of userScopedModels) {
const dbName = `${userId}-${model.modelName.toLowerCase()}`;
try {
const db = await this.createDatabase(dbName, model.dbType, 'user');
databases[`${model.modelName.toLowerCase()}DB`] = db.address.toString();
console.log(`✓ Created user database: ${dbName} (${model.dbType})`);
} catch (error) {
console.error(`❌ Failed to create user database ${dbName}:`, error);
throw error;
}
}
// Store mappings in the mappings database
await mappingsDB.set('mappings', databases);
console.log(`✓ Stored database mappings for user ${userId}`);
// Register in global directory
await this.registerUserInDirectory(userId, mappingsDB.address.toString());
const userMappings = new UserMappingsData(userId, databases);
// Cache for future use
this.userMappings.set(userId, userMappings);
console.log(`✅ User databases created successfully for ${userId}`);
return userMappings;
}
async getUserDatabase(userId: string, modelName: string): Promise<any> {
const mappings = await this.getUserMappings(userId);
const dbKey = `${modelName.toLowerCase()}DB`;
const dbAddress = mappings.databases[dbKey];
if (!dbAddress) {
throw new Error(`Database not found for user ${userId} and model ${modelName}`);
}
// Check if we have this database cached
const cacheKey = `${userId}-${modelName}`;
if (this.databases.has(cacheKey)) {
return this.databases.get(cacheKey);
}
// Open the database
const db = await this.openDatabase(dbAddress);
this.databases.set(cacheKey, db);
return db;
}
async getUserMappings(userId: string): Promise<UserMappingsData> {
// Check cache first
if (this.userMappings.has(userId)) {
return this.userMappings.get(userId);
}
// Get from global directory
const shardIndex = this.getShardIndex(userId, this.globalDirectoryShards.length);
const shard = this.globalDirectoryShards[shardIndex];
if (!shard) {
throw new Error('Global directory not initialized');
}
const mappingsAddress = await shard.get(userId);
if (!mappingsAddress) {
throw new Error(`User ${userId} not found in directory`);
}
const mappingsDB = await this.openDatabase(mappingsAddress);
const mappings = await mappingsDB.get('mappings');
if (!mappings) {
throw new Error(`No database mappings found for user ${userId}`);
}
const userMappings = new UserMappingsData(userId, mappings);
// Cache for future use
this.userMappings.set(userId, userMappings);
return userMappings;
}
async getGlobalDatabase(modelName: string): Promise<any> {
const db = this.globalDatabases.get(modelName);
if (!db) {
throw new Error(`Global database not found for model: ${modelName}`);
}
return db;
}
async getGlobalDirectoryShards(): Promise<any[]> {
return this.globalDirectoryShards;
}
private async createDatabase(name: string, type: StoreType, _scope: string): Promise<any> {
try {
const db = await this.orbitDBService.openDatabase(name, type);
// Store database reference
this.databases.set(name, db);
return db;
} catch (error) {
console.error(`Failed to create database ${name}:`, error);
throw new Error(`Database creation failed for ${name}: ${error}`);
}
}
private async openDatabase(address: string): Promise<any> {
try {
// Check if we already have this database cached by address
if (this.databases.has(address)) {
return this.databases.get(address);
}
// Open database by address (implementation may vary based on OrbitDB version)
const orbitdb = this.orbitDBService.getOrbitDB();
const db = await orbitdb.open(address);
// Cache the database
this.databases.set(address, db);
return db;
} catch (error) {
console.error(`Failed to open database at address ${address}:`, error);
throw new Error(`Database opening failed: ${error}`);
}
}
private async registerUserInDirectory(userId: string, mappingsAddress: string): Promise<void> {
const shardIndex = this.getShardIndex(userId, this.globalDirectoryShards.length);
const shard = this.globalDirectoryShards[shardIndex];
if (!shard) {
throw new Error('Global directory shards not initialized');
}
try {
await shard.set(userId, mappingsAddress);
console.log(`✓ Registered user ${userId} in directory shard ${shardIndex}`);
} catch (error) {
console.error(`Failed to register user ${userId} in directory:`, error);
throw error;
}
}
private getShardIndex(key: string, shardCount: number): number {
// Simple hash-based sharding
let hash = 0;
for (let i = 0; i < key.length; i++) {
hash = ((hash << 5) - hash + key.charCodeAt(i)) & 0xffffffff;
}
return Math.abs(hash) % shardCount;
}
// Database operation helpers
async getAllDocuments(database: any, dbType: StoreType): Promise<any[]> {
try {
switch (dbType) {
case 'eventlog':
const iterator = database.iterator();
return iterator.collect();
case 'keyvalue':
return Object.values(database.all());
case 'docstore':
return database.query(() => true);
case 'feed':
const feedIterator = database.iterator();
return feedIterator.collect();
case 'counter':
return [{ value: database.value, id: database.id }];
default:
throw new Error(`Unsupported database type: ${dbType}`);
}
} catch (error) {
console.error(`Error fetching documents from ${dbType} database:`, error);
throw error;
}
}
async addDocument(database: any, dbType: StoreType, data: any): Promise<string> {
try {
switch (dbType) {
case 'eventlog':
return await database.add(data);
case 'keyvalue':
await database.set(data.id, data);
return data.id;
case 'docstore':
return await database.put(data);
case 'feed':
return await database.add(data);
case 'counter':
await database.inc(data.amount || 1);
return database.id;
default:
throw new Error(`Unsupported database type: ${dbType}`);
}
} catch (error) {
console.error(`Error adding document to ${dbType} database:`, error);
throw error;
}
}
async updateDocument(database: any, dbType: StoreType, id: string, data: any): Promise<void> {
try {
switch (dbType) {
case 'keyvalue':
await database.set(id, data);
break;
case 'docstore':
await database.put(data);
break;
default:
// For append-only stores, we add a new entry
await this.addDocument(database, dbType, data);
}
} catch (error) {
console.error(`Error updating document in ${dbType} database:`, error);
throw error;
}
}
async deleteDocument(database: any, dbType: StoreType, id: string): Promise<void> {
try {
switch (dbType) {
case 'keyvalue':
await database.del(id);
break;
case 'docstore':
await database.del(id);
break;
default:
// For append-only stores, we might add a deletion marker
await this.addDocument(database, dbType, { _deleted: true, id, deletedAt: Date.now() });
}
} catch (error) {
console.error(`Error deleting document from ${dbType} database:`, error);
throw error;
}
}
// Cleanup methods
async stop(): Promise<void> {
console.log('🛑 Stopping DatabaseManager...');
// Clear caches
this.databases.clear();
this.userMappings.clear();
this.globalDatabases.clear();
this.globalDirectoryShards = [];
this.initialized = false;
console.log('✅ DatabaseManager stopped');
}
}

View File

@ -0,0 +1,104 @@
import { BaseModel } from '../models/BaseModel';
import { ModelConfig } from '../types/models';
import { StoreType } from '../types/framework';
export class ModelRegistry {
private static models: Map<string, typeof BaseModel> = new Map();
private static configs: Map<string, ModelConfig> = new Map();
static register(name: string, modelClass: typeof BaseModel, config: ModelConfig): void {
this.models.set(name, modelClass);
this.configs.set(name, config);
// Validate model configuration
this.validateModel(modelClass, config);
console.log(`Registered model: ${name} with scope: ${config.scope || 'global'}`);
}
static get(name: string): typeof BaseModel | undefined {
return this.models.get(name);
}
static getConfig(name: string): ModelConfig | undefined {
return this.configs.get(name);
}
static getAllModels(): Map<string, typeof BaseModel> {
return new Map(this.models);
}
static getUserScopedModels(): Array<typeof BaseModel> {
return Array.from(this.models.values()).filter((model) => model.scope === 'user');
}
static getGlobalModels(): Array<typeof BaseModel> {
return Array.from(this.models.values()).filter((model) => model.scope === 'global');
}
static getModelNames(): string[] {
return Array.from(this.models.keys());
}
static clear(): void {
this.models.clear();
this.configs.clear();
}
private static validateModel(modelClass: typeof BaseModel, config: ModelConfig): void {
// Validate model name
if (!modelClass.name) {
throw new Error('Model class must have a name');
}
// Validate database type
if (config.type && !this.isValidStoreType(config.type)) {
throw new Error(`Invalid store type: ${config.type}`);
}
// Validate scope
if (config.scope && !['user', 'global'].includes(config.scope)) {
throw new Error(`Invalid scope: ${config.scope}. Must be 'user' or 'global'`);
}
// Validate sharding configuration
if (config.sharding) {
this.validateShardingConfig(config.sharding);
}
// Validate pinning configuration
if (config.pinning) {
this.validatePinningConfig(config.pinning);
}
console.log(`✓ Model ${modelClass.name} configuration validated`);
}
private static isValidStoreType(type: StoreType): boolean {
return ['eventlog', 'keyvalue', 'docstore', 'counter', 'feed'].includes(type);
}
private static validateShardingConfig(config: any): void {
if (!config.strategy || !['hash', 'range', 'user'].includes(config.strategy)) {
throw new Error('Sharding strategy must be one of: hash, range, user');
}
if (!config.count || config.count < 1) {
throw new Error('Sharding count must be a positive number');
}
if (!config.key) {
throw new Error('Sharding key is required');
}
}
private static validatePinningConfig(config: any): void {
if (config.strategy && !['fixed', 'popularity', 'tiered'].includes(config.strategy)) {
throw new Error('Pinning strategy must be one of: fixed, popularity, tiered');
}
if (config.factor && (typeof config.factor !== 'number' || config.factor < 1)) {
throw new Error('Pinning factor must be a positive number');
}
}
}

169
src/framework/index.ts Normal file
View File

@ -0,0 +1,169 @@
/**
* DebrosFramework - Main Export File
*
* This file exports all framework components for easy import and usage.
* It provides a clean API surface for consumers of the framework.
*/
// Main framework class
export { DebrosFramework as default, DebrosFramework } from './DebrosFramework';
export type { DebrosFrameworkConfig, FrameworkMetrics, FrameworkStatus } from './DebrosFramework';
// Core model system
export { BaseModel } from './models/BaseModel';
export { ModelRegistry } from './core/ModelRegistry';
// Decorators
export { Model } from './models/decorators/Model';
export { Field } from './models/decorators/Field';
export { BelongsTo, HasMany, HasOne, ManyToMany } from './models/decorators/relationships';
export {
BeforeCreate,
AfterCreate,
BeforeUpdate,
AfterUpdate,
BeforeDelete,
AfterDelete,
} from './models/decorators/hooks';
// Core services
export { DatabaseManager } from './core/DatabaseManager';
export { ShardManager } from './sharding/ShardManager';
export { ConfigManager } from './core/ConfigManager';
export { FrameworkOrbitDBService, FrameworkIPFSService } from './services/OrbitDBService';
// Query system
export { QueryBuilder } from './query/QueryBuilder';
export { QueryExecutor } from './query/QueryExecutor';
export { QueryOptimizer } from './query/QueryOptimizer';
export { QueryCache } from './query/QueryCache';
// Relationship system
export { RelationshipManager } from './relationships/RelationshipManager';
export { RelationshipCache } from './relationships/RelationshipCache';
export { LazyLoader } from './relationships/LazyLoader';
export type { RelationshipLoadOptions, EagerLoadPlan } from './relationships/RelationshipManager';
// Automatic features
export { PinningManager } from './pinning/PinningManager';
export { PubSubManager } from './pubsub/PubSubManager';
// Migration system
export { MigrationManager } from './migrations/MigrationManager';
export { MigrationBuilder, createMigration } from './migrations/MigrationBuilder';
export type {
Migration,
MigrationOperation,
MigrationValidator,
MigrationContext,
MigrationProgress,
MigrationResult,
} from './migrations/MigrationManager';
// Type definitions
export type {
StoreType,
FrameworkConfig,
CacheConfig,
PinningConfig,
PinningStrategy,
PinningStats,
ShardingConfig,
ValidationResult,
} from './types/framework';
export type { FieldConfig, RelationshipConfig, ModelConfig, ValidationError } from './types/models';
// Utility functions and helpers
// export { ValidationError } from './types/models'; // Already exported above
// Version information
export const FRAMEWORK_VERSION = '1.0.0';
export const API_VERSION = '1.0';
// Feature flags for conditional exports
export const FEATURES = {
MODELS: true,
RELATIONSHIPS: true,
QUERIES: true,
MIGRATIONS: true,
PINNING: true,
PUBSUB: true,
CACHING: true,
SHARDING: true,
} as const;
// Quick setup helpers
import { DebrosFramework, DebrosFrameworkConfig } from './DebrosFramework';
export function createFramework(config?: DebrosFrameworkConfig) {
return DebrosFramework.create(config);
}
export async function createFrameworkWithServices(
orbitDBService: any,
ipfsService: any,
config?: DebrosFrameworkConfig,
) {
return DebrosFramework.createWithServices(orbitDBService, ipfsService, config);
}
// Export default configuration presets
export const DEVELOPMENT_CONFIG: Partial<DebrosFrameworkConfig> = {
environment: 'development',
features: {
autoMigration: true,
automaticPinning: false,
pubsub: true,
queryCache: true,
relationshipCache: true,
},
performance: {
queryTimeout: 30000,
batchSize: 50,
},
monitoring: {
enableMetrics: true,
logLevel: 'debug',
},
};
export const PRODUCTION_CONFIG: Partial<DebrosFrameworkConfig> = {
environment: 'production',
features: {
autoMigration: false, // Require manual migration in production
automaticPinning: true,
pubsub: true,
queryCache: true,
relationshipCache: true,
},
performance: {
queryTimeout: 10000,
batchSize: 200,
maxConcurrentOperations: 500,
},
monitoring: {
enableMetrics: true,
logLevel: 'warn',
metricsInterval: 30000,
},
};
export const TEST_CONFIG: Partial<DebrosFrameworkConfig> = {
environment: 'test',
features: {
autoMigration: true,
automaticPinning: false,
pubsub: false,
queryCache: false,
relationshipCache: false,
},
performance: {
queryTimeout: 5000,
batchSize: 10,
},
monitoring: {
enableMetrics: false,
logLevel: 'error',
},
};

View File

@ -0,0 +1,460 @@
/**
* MigrationBuilder - Fluent API for Creating Migrations
*
* This class provides a convenient fluent interface for creating migration objects
* with built-in validation and common operation patterns.
*/
import { Migration, MigrationOperation, MigrationValidator } from './MigrationManager';
import { FieldConfig } from '../types/models';
export class MigrationBuilder {
private migration: Partial<Migration>;
private upOperations: MigrationOperation[] = [];
private downOperations: MigrationOperation[] = [];
private validators: MigrationValidator[] = [];
constructor(id: string, version: string, name: string) {
this.migration = {
id,
version,
name,
description: '',
targetModels: [],
createdAt: Date.now(),
tags: [],
};
}
// Basic migration metadata
description(desc: string): this {
this.migration.description = desc;
return this;
}
author(author: string): this {
this.migration.author = author;
return this;
}
tags(...tags: string[]): this {
this.migration.tags = tags;
return this;
}
targetModels(...models: string[]): this {
this.migration.targetModels = models;
return this;
}
dependencies(...migrationIds: string[]): this {
this.migration.dependencies = migrationIds;
return this;
}
// Field operations
addField(modelName: string, fieldName: string, fieldConfig: FieldConfig): this {
this.upOperations.push({
type: 'add_field',
modelName,
fieldName,
fieldConfig,
});
// Auto-generate reverse operation
this.downOperations.unshift({
type: 'remove_field',
modelName,
fieldName,
});
this.ensureTargetModel(modelName);
return this;
}
removeField(modelName: string, fieldName: string, preserveData: boolean = false): this {
this.upOperations.push({
type: 'remove_field',
modelName,
fieldName,
});
if (!preserveData) {
// Cannot auto-reverse field removal without knowing the original config
this.downOperations.unshift({
type: 'custom',
modelName,
customOperation: async (context) => {
context.logger.warn(`Cannot reverse removal of field ${fieldName} - data may be lost`);
},
});
}
this.ensureTargetModel(modelName);
return this;
}
modifyField(
modelName: string,
fieldName: string,
newFieldConfig: FieldConfig,
oldFieldConfig?: FieldConfig,
): this {
this.upOperations.push({
type: 'modify_field',
modelName,
fieldName,
fieldConfig: newFieldConfig,
});
if (oldFieldConfig) {
this.downOperations.unshift({
type: 'modify_field',
modelName,
fieldName,
fieldConfig: oldFieldConfig,
});
}
this.ensureTargetModel(modelName);
return this;
}
renameField(modelName: string, oldFieldName: string, newFieldName: string): this {
this.upOperations.push({
type: 'rename_field',
modelName,
fieldName: oldFieldName,
newFieldName,
});
// Auto-generate reverse operation
this.downOperations.unshift({
type: 'rename_field',
modelName,
fieldName: newFieldName,
newFieldName: oldFieldName,
});
this.ensureTargetModel(modelName);
return this;
}
// Data transformation operations
transformData(
modelName: string,
transformer: (data: any) => any,
reverseTransformer?: (data: any) => any,
): this {
this.upOperations.push({
type: 'transform_data',
modelName,
transformer,
});
if (reverseTransformer) {
this.downOperations.unshift({
type: 'transform_data',
modelName,
transformer: reverseTransformer,
});
}
this.ensureTargetModel(modelName);
return this;
}
// Custom operations
customOperation(
modelName: string,
operation: (context: any) => Promise<void>,
rollbackOperation?: (context: any) => Promise<void>,
): this {
this.upOperations.push({
type: 'custom',
modelName,
customOperation: operation,
});
if (rollbackOperation) {
this.downOperations.unshift({
type: 'custom',
modelName,
customOperation: rollbackOperation,
});
}
this.ensureTargetModel(modelName);
return this;
}
// Common patterns
addTimestamps(modelName: string): this {
this.addField(modelName, 'createdAt', {
type: 'number',
required: false,
default: Date.now(),
});
this.addField(modelName, 'updatedAt', {
type: 'number',
required: false,
default: Date.now(),
});
return this;
}
addSoftDeletes(modelName: string): this {
this.addField(modelName, 'deletedAt', {
type: 'number',
required: false,
default: null,
});
return this;
}
addUuid(modelName: string, fieldName: string = 'uuid'): this {
this.addField(modelName, fieldName, {
type: 'string',
required: true,
unique: true,
default: () => this.generateUuid(),
});
return this;
}
renameModel(oldModelName: string, newModelName: string): this {
// This would require more complex operations across the entire system
this.customOperation(
oldModelName,
async (context) => {
context.logger.info(`Renaming model ${oldModelName} to ${newModelName}`);
// Implementation would involve updating model registry, database names, etc.
},
async (context) => {
context.logger.info(`Reverting model rename ${newModelName} to ${oldModelName}`);
},
);
return this;
}
// Migration patterns for common scenarios
createIndex(modelName: string, fieldNames: string[], options: any = {}): this {
this.upOperations.push({
type: 'add_index',
modelName,
indexConfig: {
fields: fieldNames,
...options,
},
});
this.downOperations.unshift({
type: 'remove_index',
modelName,
indexConfig: {
fields: fieldNames,
...options,
},
});
this.ensureTargetModel(modelName);
return this;
}
// Data migration helpers
migrateData(
fromModel: string,
toModel: string,
fieldMapping: Record<string, string>,
options: {
batchSize?: number;
condition?: (data: any) => boolean;
transform?: (data: any) => any;
} = {},
): this {
this.customOperation(fromModel, async (context) => {
context.logger.info(`Migrating data from ${fromModel} to ${toModel}`);
const records = await context.databaseManager.getAllRecords(fromModel);
const batchSize = options.batchSize || 100;
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
for (const record of batch) {
if (options.condition && !options.condition(record)) {
continue;
}
const newRecord: any = {};
// Map fields
for (const [oldField, newField] of Object.entries(fieldMapping)) {
if (oldField in record) {
newRecord[newField] = record[oldField];
}
}
// Apply transformation if provided
if (options.transform) {
Object.assign(newRecord, options.transform(newRecord));
}
await context.databaseManager.createRecord(toModel, newRecord);
}
}
});
this.ensureTargetModel(fromModel);
this.ensureTargetModel(toModel);
return this;
}
// Validation
addValidator(
name: string,
description: string,
validateFn: (context: any) => Promise<any>,
): this {
this.validators.push({
name,
description,
validate: validateFn,
});
return this;
}
validateFieldExists(modelName: string, fieldName: string): this {
return this.addValidator(
`validate_${fieldName}_exists`,
`Ensure field ${fieldName} exists in ${modelName}`,
async (_context) => {
// Implementation would check if field exists
return { valid: true, errors: [], warnings: [] };
},
);
}
validateDataIntegrity(modelName: string, checkFn: (records: any[]) => any): this {
return this.addValidator(
`validate_${modelName}_integrity`,
`Validate data integrity for ${modelName}`,
async (context) => {
const records = await context.databaseManager.getAllRecords(modelName);
return checkFn(records);
},
);
}
// Build the final migration
build(): Migration {
if (!this.migration.targetModels || this.migration.targetModels.length === 0) {
throw new Error('Migration must have at least one target model');
}
if (this.upOperations.length === 0) {
throw new Error('Migration must have at least one operation');
}
return {
id: this.migration.id!,
version: this.migration.version!,
name: this.migration.name!,
description: this.migration.description!,
targetModels: this.migration.targetModels!,
up: this.upOperations,
down: this.downOperations,
dependencies: this.migration.dependencies,
validators: this.validators.length > 0 ? this.validators : undefined,
createdAt: this.migration.createdAt!,
author: this.migration.author,
tags: this.migration.tags,
};
}
// Helper methods
private ensureTargetModel(modelName: string): void {
if (!this.migration.targetModels!.includes(modelName)) {
this.migration.targetModels!.push(modelName);
}
}
private generateUuid(): string {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = (Math.random() * 16) | 0;
const v = c === 'x' ? r : (r & 0x3) | 0x8;
return v.toString(16);
});
}
// Static factory methods for common migration types
static create(id: string, version: string, name: string): MigrationBuilder {
return new MigrationBuilder(id, version, name);
}
static addFieldMigration(
id: string,
version: string,
modelName: string,
fieldName: string,
fieldConfig: FieldConfig,
): Migration {
return new MigrationBuilder(id, version, `Add ${fieldName} to ${modelName}`)
.description(`Add new field ${fieldName} to ${modelName} model`)
.addField(modelName, fieldName, fieldConfig)
.build();
}
static removeFieldMigration(
id: string,
version: string,
modelName: string,
fieldName: string,
): Migration {
return new MigrationBuilder(id, version, `Remove ${fieldName} from ${modelName}`)
.description(`Remove field ${fieldName} from ${modelName} model`)
.removeField(modelName, fieldName)
.build();
}
static renameFieldMigration(
id: string,
version: string,
modelName: string,
oldFieldName: string,
newFieldName: string,
): Migration {
return new MigrationBuilder(
id,
version,
`Rename ${oldFieldName} to ${newFieldName} in ${modelName}`,
)
.description(`Rename field ${oldFieldName} to ${newFieldName} in ${modelName} model`)
.renameField(modelName, oldFieldName, newFieldName)
.build();
}
static dataTransformMigration(
id: string,
version: string,
modelName: string,
description: string,
transformer: (data: any) => any,
reverseTransformer?: (data: any) => any,
): Migration {
return new MigrationBuilder(id, version, `Transform data in ${modelName}`)
.description(description)
.transformData(modelName, transformer, reverseTransformer)
.build();
}
}
// Export convenience function for creating migrations
export function createMigration(id: string, version: string, name: string): MigrationBuilder {
return MigrationBuilder.create(id, version, name);
}

View File

@ -0,0 +1,972 @@
/**
* MigrationManager - Schema Migration and Data Transformation System
*
* This class handles:
* - Schema version management across distributed databases
* - Automatic data migration and transformation
* - Rollback capabilities for failed migrations
* - Conflict resolution during migration
* - Migration validation and integrity checks
* - Cross-shard migration coordination
*/
import { FieldConfig } from '../types/models';
export interface Migration {
id: string;
version: string;
name: string;
description: string;
targetModels: string[];
up: MigrationOperation[];
down: MigrationOperation[];
dependencies?: string[]; // Migration IDs that must run before this one
validators?: MigrationValidator[];
createdAt: number;
author?: string;
tags?: string[];
}
export interface MigrationOperation {
type:
| 'add_field'
| 'remove_field'
| 'modify_field'
| 'rename_field'
| 'add_index'
| 'remove_index'
| 'transform_data'
| 'custom';
modelName: string;
fieldName?: string;
newFieldName?: string;
fieldConfig?: FieldConfig;
indexConfig?: any;
transformer?: (data: any) => any;
customOperation?: (context: MigrationContext) => Promise<void>;
rollbackOperation?: (context: MigrationContext) => Promise<void>;
options?: {
batchSize?: number;
parallel?: boolean;
skipValidation?: boolean;
};
}
export interface MigrationValidator {
name: string;
description: string;
validate: (context: MigrationContext) => Promise<ValidationResult>;
}
export interface MigrationContext {
migration: Migration;
modelName: string;
databaseManager: any;
shardManager: any;
currentData?: any[];
operation: MigrationOperation;
progress: MigrationProgress;
logger: MigrationLogger;
}
export interface MigrationProgress {
migrationId: string;
status: 'pending' | 'running' | 'completed' | 'failed' | 'rolled_back';
startedAt?: number;
completedAt?: number;
totalRecords: number;
processedRecords: number;
errorCount: number;
warnings: string[];
errors: string[];
currentOperation?: string;
estimatedTimeRemaining?: number;
}
export interface MigrationResult {
migrationId: string;
success: boolean;
duration: number;
recordsProcessed: number;
recordsModified: number;
warnings: string[];
errors: string[];
rollbackAvailable: boolean;
}
export interface MigrationLogger {
info: (message: string, meta?: any) => void;
warn: (message: string, meta?: any) => void;
error: (message: string, meta?: any) => void;
debug: (message: string, meta?: any) => void;
}
export interface ValidationResult {
valid: boolean;
errors: string[];
warnings: string[];
}
export class MigrationManager {
private databaseManager: any;
private shardManager: any;
private migrations: Map<string, Migration> = new Map();
private migrationHistory: Map<string, MigrationResult[]> = new Map();
private activeMigrations: Map<string, MigrationProgress> = new Map();
private migrationOrder: string[] = [];
private logger: MigrationLogger;
constructor(databaseManager: any, shardManager: any, logger?: MigrationLogger) {
this.databaseManager = databaseManager;
this.shardManager = shardManager;
this.logger = logger || this.createDefaultLogger();
}
// Register a new migration
registerMigration(migration: Migration): void {
// Validate migration structure
this.validateMigrationStructure(migration);
// Check for version conflicts
const existingMigration = Array.from(this.migrations.values()).find(
(m) => m.version === migration.version,
);
if (existingMigration && existingMigration.id !== migration.id) {
throw new Error(`Migration version ${migration.version} already exists with different ID`);
}
this.migrations.set(migration.id, migration);
this.updateMigrationOrder();
this.logger.info(`Registered migration: ${migration.name} (${migration.version})`, {
migrationId: migration.id,
targetModels: migration.targetModels,
});
}
// Get all registered migrations
getMigrations(): Migration[] {
return Array.from(this.migrations.values()).sort((a, b) =>
this.compareVersions(a.version, b.version),
);
}
// Get migration by ID
getMigration(migrationId: string): Migration | null {
return this.migrations.get(migrationId) || null;
}
// Get pending migrations for a model or all models
getPendingMigrations(modelName?: string): Migration[] {
const allMigrations = this.getMigrations();
const appliedMigrations = this.getAppliedMigrations(modelName);
const appliedIds = new Set(appliedMigrations.map((m) => m.migrationId));
return allMigrations.filter((migration) => {
if (!appliedIds.has(migration.id)) {
return modelName ? migration.targetModels.includes(modelName) : true;
}
return false;
});
}
// Run a specific migration
async runMigration(
migrationId: string,
options: {
dryRun?: boolean;
batchSize?: number;
parallelShards?: boolean;
skipValidation?: boolean;
} = {},
): Promise<MigrationResult> {
const migration = this.migrations.get(migrationId);
if (!migration) {
throw new Error(`Migration ${migrationId} not found`);
}
// Check if migration is already running
if (this.activeMigrations.has(migrationId)) {
throw new Error(`Migration ${migrationId} is already running`);
}
// Check dependencies
await this.validateDependencies(migration);
const startTime = Date.now();
const progress: MigrationProgress = {
migrationId,
status: 'running',
startedAt: startTime,
totalRecords: 0,
processedRecords: 0,
errorCount: 0,
warnings: [],
errors: [],
};
this.activeMigrations.set(migrationId, progress);
try {
this.logger.info(`Starting migration: ${migration.name}`, {
migrationId,
dryRun: options.dryRun,
options,
});
if (options.dryRun) {
return await this.performDryRun(migration, options);
}
// Pre-migration validation
if (!options.skipValidation) {
await this.runPreMigrationValidation(migration);
}
// Execute migration operations
const result = await this.executeMigration(migration, options, progress);
// Post-migration validation
if (!options.skipValidation) {
await this.runPostMigrationValidation(migration);
}
// Record successful migration
progress.status = 'completed';
progress.completedAt = Date.now();
await this.recordMigrationResult(result);
this.logger.info(`Migration completed: ${migration.name}`, {
migrationId,
duration: result.duration,
recordsProcessed: result.recordsProcessed,
});
return result;
} catch (error: any) {
progress.status = 'failed';
progress.errors.push(error.message);
this.logger.error(`Migration failed: ${migration.name}`, {
migrationId,
error: error.message,
stack: error.stack,
});
// Attempt rollback if possible
const rollbackResult = await this.attemptRollback(migration, progress);
const result: MigrationResult = {
migrationId,
success: false,
duration: Date.now() - startTime,
recordsProcessed: progress.processedRecords,
recordsModified: 0,
warnings: progress.warnings,
errors: progress.errors,
rollbackAvailable: rollbackResult.success,
};
await this.recordMigrationResult(result);
throw error;
} finally {
this.activeMigrations.delete(migrationId);
}
}
// Run all pending migrations
async runPendingMigrations(
options: {
modelName?: string;
dryRun?: boolean;
stopOnError?: boolean;
batchSize?: number;
} = {},
): Promise<MigrationResult[]> {
const pendingMigrations = this.getPendingMigrations(options.modelName);
const results: MigrationResult[] = [];
this.logger.info(`Running ${pendingMigrations.length} pending migrations`, {
modelName: options.modelName,
dryRun: options.dryRun,
});
for (const migration of pendingMigrations) {
try {
const result = await this.runMigration(migration.id, {
dryRun: options.dryRun,
batchSize: options.batchSize,
});
results.push(result);
if (!result.success && options.stopOnError) {
this.logger.warn('Stopping migration run due to error', {
failedMigration: migration.id,
stopOnError: options.stopOnError,
});
break;
}
} catch (error) {
if (options.stopOnError) {
throw error;
}
this.logger.error(`Skipping failed migration: ${migration.id}`, { error });
}
}
return results;
}
// Rollback a migration
async rollbackMigration(migrationId: string): Promise<MigrationResult> {
const migration = this.migrations.get(migrationId);
if (!migration) {
throw new Error(`Migration ${migrationId} not found`);
}
const appliedMigrations = this.getAppliedMigrations();
const isApplied = appliedMigrations.some((m) => m.migrationId === migrationId && m.success);
if (!isApplied) {
throw new Error(`Migration ${migrationId} has not been applied`);
}
const startTime = Date.now();
const progress: MigrationProgress = {
migrationId,
status: 'running',
startedAt: startTime,
totalRecords: 0,
processedRecords: 0,
errorCount: 0,
warnings: [],
errors: [],
};
try {
this.logger.info(`Starting rollback: ${migration.name}`, { migrationId });
const result = await this.executeRollback(migration, progress);
result.rollbackAvailable = false;
await this.recordMigrationResult(result);
this.logger.info(`Rollback completed: ${migration.name}`, {
migrationId,
duration: result.duration,
});
return result;
} catch (error: any) {
this.logger.error(`Rollback failed: ${migration.name}`, {
migrationId,
error: error.message,
});
throw error;
}
}
// Execute migration operations
private async executeMigration(
migration: Migration,
options: any,
progress: MigrationProgress,
): Promise<MigrationResult> {
const startTime = Date.now();
let totalProcessed = 0;
let totalModified = 0;
for (const modelName of migration.targetModels) {
for (const operation of migration.up) {
if (operation.modelName !== modelName) continue;
progress.currentOperation = `${operation.type} on ${operation.modelName}.${operation.fieldName || 'N/A'}`;
this.logger.debug(`Executing operation: ${progress.currentOperation}`, {
migrationId: migration.id,
operation: operation.type,
});
const context: MigrationContext = {
migration,
modelName,
databaseManager: this.databaseManager,
shardManager: this.shardManager,
operation,
progress,
logger: this.logger,
};
const operationResult = await this.executeOperation(context, options);
totalProcessed += operationResult.processed;
totalModified += operationResult.modified;
progress.processedRecords = totalProcessed;
}
}
return {
migrationId: migration.id,
success: true,
duration: Date.now() - startTime,
recordsProcessed: totalProcessed,
recordsModified: totalModified,
warnings: progress.warnings,
errors: progress.errors,
rollbackAvailable: migration.down.length > 0,
};
}
// Execute a single migration operation
private async executeOperation(
context: MigrationContext,
options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
switch (operation.type) {
case 'add_field':
return await this.executeAddField(context, options);
case 'remove_field':
return await this.executeRemoveField(context, options);
case 'modify_field':
return await this.executeModifyField(context, options);
case 'rename_field':
return await this.executeRenameField(context, options);
case 'transform_data':
return await this.executeDataTransformation(context, options);
case 'custom':
return await this.executeCustomOperation(context, options);
default:
throw new Error(`Unsupported operation type: ${operation.type}`);
}
}
// Execute add field operation
private async executeAddField(
context: MigrationContext,
options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
if (!operation.fieldName || !operation.fieldConfig) {
throw new Error('Add field operation requires fieldName and fieldConfig');
}
// Update model metadata (in a real implementation, this would update the model registry)
this.logger.info(`Adding field ${operation.fieldName} to ${operation.modelName}`, {
fieldConfig: operation.fieldConfig,
});
// Get all records for this model
const records = await this.getAllRecordsForModel(operation.modelName);
let modified = 0;
// Add default value to existing records
const batchSize = options.batchSize || 100;
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
for (const record of batch) {
if (!(operation.fieldName in record)) {
record[operation.fieldName] = operation.fieldConfig.default || null;
await this.updateRecord(operation.modelName, record);
modified++;
}
}
context.progress.processedRecords += batch.length;
}
return { processed: records.length, modified };
}
// Execute remove field operation
private async executeRemoveField(
context: MigrationContext,
options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
if (!operation.fieldName) {
throw new Error('Remove field operation requires fieldName');
}
this.logger.info(`Removing field ${operation.fieldName} from ${operation.modelName}`);
const records = await this.getAllRecordsForModel(operation.modelName);
let modified = 0;
const batchSize = options.batchSize || 100;
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
for (const record of batch) {
if (operation.fieldName in record) {
delete record[operation.fieldName];
await this.updateRecord(operation.modelName, record);
modified++;
}
}
context.progress.processedRecords += batch.length;
}
return { processed: records.length, modified };
}
// Execute modify field operation
private async executeModifyField(
context: MigrationContext,
options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
if (!operation.fieldName || !operation.fieldConfig) {
throw new Error('Modify field operation requires fieldName and fieldConfig');
}
this.logger.info(`Modifying field ${operation.fieldName} in ${operation.modelName}`, {
newConfig: operation.fieldConfig,
});
const records = await this.getAllRecordsForModel(operation.modelName);
let modified = 0;
const batchSize = options.batchSize || 100;
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
for (const record of batch) {
if (operation.fieldName in record) {
// Apply type conversion if needed
const oldValue = record[operation.fieldName];
const newValue = this.convertFieldValue(oldValue, operation.fieldConfig);
if (newValue !== oldValue) {
record[operation.fieldName] = newValue;
await this.updateRecord(operation.modelName, record);
modified++;
}
}
}
context.progress.processedRecords += batch.length;
}
return { processed: records.length, modified };
}
// Execute rename field operation
private async executeRenameField(
context: MigrationContext,
options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
if (!operation.fieldName || !operation.newFieldName) {
throw new Error('Rename field operation requires fieldName and newFieldName');
}
this.logger.info(
`Renaming field ${operation.fieldName} to ${operation.newFieldName} in ${operation.modelName}`,
);
const records = await this.getAllRecordsForModel(operation.modelName);
let modified = 0;
const batchSize = options.batchSize || 100;
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
for (const record of batch) {
if (operation.fieldName in record) {
record[operation.newFieldName] = record[operation.fieldName];
delete record[operation.fieldName];
await this.updateRecord(operation.modelName, record);
modified++;
}
}
context.progress.processedRecords += batch.length;
}
return { processed: records.length, modified };
}
// Execute data transformation operation
private async executeDataTransformation(
context: MigrationContext,
options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
if (!operation.transformer) {
throw new Error('Transform data operation requires transformer function');
}
this.logger.info(`Transforming data for ${operation.modelName}`);
const records = await this.getAllRecordsForModel(operation.modelName);
let modified = 0;
const batchSize = options.batchSize || 100;
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
for (const record of batch) {
try {
const originalRecord = JSON.stringify(record);
const transformedRecord = await operation.transformer(record);
if (JSON.stringify(transformedRecord) !== originalRecord) {
Object.assign(record, transformedRecord);
await this.updateRecord(operation.modelName, record);
modified++;
}
} catch (error: any) {
context.progress.errors.push(`Transform error for record ${record.id}: ${error.message}`);
context.progress.errorCount++;
}
}
context.progress.processedRecords += batch.length;
}
return { processed: records.length, modified };
}
// Execute custom operation
private async executeCustomOperation(
context: MigrationContext,
_options: any,
): Promise<{ processed: number; modified: number }> {
const { operation } = context;
if (!operation.customOperation) {
throw new Error('Custom operation requires customOperation function');
}
this.logger.info(`Executing custom operation for ${operation.modelName}`);
try {
await operation.customOperation(context);
return { processed: 1, modified: 1 }; // Custom operations handle their own counting
} catch (error: any) {
context.progress.errors.push(`Custom operation error: ${error.message}`);
throw error;
}
}
// Helper methods for data access
private async getAllRecordsForModel(modelName: string): Promise<any[]> {
// In a real implementation, this would query all shards for the model
// For now, return empty array as placeholder
this.logger.debug(`Getting all records for model: ${modelName}`);
return [];
}
private async updateRecord(modelName: string, record: any): Promise<void> {
// In a real implementation, this would update the record in the appropriate database
this.logger.debug(`Updating record in ${modelName}:`, { id: record.id });
}
private convertFieldValue(value: any, fieldConfig: FieldConfig): any {
// Convert value based on field configuration
switch (fieldConfig.type) {
case 'string':
return value != null ? String(value) : null;
case 'number':
return value != null ? Number(value) : null;
case 'boolean':
return value != null ? Boolean(value) : null;
case 'array':
return Array.isArray(value) ? value : [value];
default:
return value;
}
}
// Validation methods
private validateMigrationStructure(migration: Migration): void {
if (!migration.id || !migration.version || !migration.name) {
throw new Error('Migration must have id, version, and name');
}
if (!migration.targetModels || migration.targetModels.length === 0) {
throw new Error('Migration must specify target models');
}
if (!migration.up || migration.up.length === 0) {
throw new Error('Migration must have at least one up operation');
}
// Validate operations
for (const operation of migration.up) {
this.validateOperation(operation);
}
if (migration.down) {
for (const operation of migration.down) {
this.validateOperation(operation);
}
}
}
private validateOperation(operation: MigrationOperation): void {
const validTypes = [
'add_field',
'remove_field',
'modify_field',
'rename_field',
'add_index',
'remove_index',
'transform_data',
'custom',
];
if (!validTypes.includes(operation.type)) {
throw new Error(`Invalid operation type: ${operation.type}`);
}
if (!operation.modelName) {
throw new Error('Operation must specify modelName');
}
}
private async validateDependencies(migration: Migration): Promise<void> {
if (!migration.dependencies) return;
const appliedMigrations = this.getAppliedMigrations();
const appliedIds = new Set(appliedMigrations.map((m) => m.migrationId));
for (const dependencyId of migration.dependencies) {
if (!appliedIds.has(dependencyId)) {
throw new Error(`Migration dependency not satisfied: ${dependencyId}`);
}
}
}
private async runPreMigrationValidation(migration: Migration): Promise<void> {
if (!migration.validators) return;
for (const validator of migration.validators) {
this.logger.debug(`Running pre-migration validator: ${validator.name}`);
const context: MigrationContext = {
migration,
modelName: '', // Will be set per model
databaseManager: this.databaseManager,
shardManager: this.shardManager,
operation: migration.up[0], // First operation for context
progress: this.activeMigrations.get(migration.id)!,
logger: this.logger,
};
const result = await validator.validate(context);
if (!result.valid) {
throw new Error(`Pre-migration validation failed: ${result.errors.join(', ')}`);
}
if (result.warnings.length > 0) {
context.progress.warnings.push(...result.warnings);
}
}
}
private async runPostMigrationValidation(_migration: Migration): Promise<void> {
// Similar to pre-migration validation but runs after
this.logger.debug('Running post-migration validation');
}
// Rollback operations
private async executeRollback(
migration: Migration,
progress: MigrationProgress,
): Promise<MigrationResult> {
if (!migration.down || migration.down.length === 0) {
throw new Error('Migration has no rollback operations defined');
}
const startTime = Date.now();
let totalProcessed = 0;
let totalModified = 0;
// Execute rollback operations in reverse order
for (const modelName of migration.targetModels) {
for (const operation of migration.down.reverse()) {
if (operation.modelName !== modelName) continue;
const context: MigrationContext = {
migration,
modelName,
databaseManager: this.databaseManager,
shardManager: this.shardManager,
operation,
progress,
logger: this.logger,
};
const operationResult = await this.executeOperation(context, {});
totalProcessed += operationResult.processed;
totalModified += operationResult.modified;
}
}
return {
migrationId: migration.id,
success: true,
duration: Date.now() - startTime,
recordsProcessed: totalProcessed,
recordsModified: totalModified,
warnings: progress.warnings,
errors: progress.errors,
rollbackAvailable: false,
};
}
private async attemptRollback(
migration: Migration,
progress: MigrationProgress,
): Promise<{ success: boolean }> {
try {
if (migration.down && migration.down.length > 0) {
await this.executeRollback(migration, progress);
progress.status = 'rolled_back';
return { success: true };
}
} catch (error: any) {
this.logger.error(`Rollback failed for migration ${migration.id}`, { error });
}
return { success: false };
}
// Dry run functionality
private async performDryRun(migration: Migration, _options: any): Promise<MigrationResult> {
this.logger.info(`Performing dry run for migration: ${migration.name}`);
const startTime = Date.now();
let estimatedRecords = 0;
// Estimate the number of records that would be affected
for (const modelName of migration.targetModels) {
const modelRecords = await this.countRecordsForModel(modelName);
estimatedRecords += modelRecords;
}
// Simulate operations without actually modifying data
for (const operation of migration.up) {
this.logger.debug(`Dry run operation: ${operation.type} on ${operation.modelName}`);
}
return {
migrationId: migration.id,
success: true,
duration: Date.now() - startTime,
recordsProcessed: estimatedRecords,
recordsModified: estimatedRecords, // Estimate
warnings: ['This was a dry run - no data was actually modified'],
errors: [],
rollbackAvailable: migration.down.length > 0,
};
}
private async countRecordsForModel(_modelName: string): Promise<number> {
// In a real implementation, this would count records across all shards
return 0;
}
// Migration history and state management
private getAppliedMigrations(_modelName?: string): MigrationResult[] {
const allResults: MigrationResult[] = [];
for (const results of this.migrationHistory.values()) {
allResults.push(...results.filter((r) => r.success));
}
return allResults;
}
private async recordMigrationResult(result: MigrationResult): Promise<void> {
if (!this.migrationHistory.has(result.migrationId)) {
this.migrationHistory.set(result.migrationId, []);
}
this.migrationHistory.get(result.migrationId)!.push(result);
// In a real implementation, this would persist to database
this.logger.debug('Recorded migration result', { result });
}
// Version comparison
private compareVersions(version1: string, version2: string): number {
const v1Parts = version1.split('.').map(Number);
const v2Parts = version2.split('.').map(Number);
for (let i = 0; i < Math.max(v1Parts.length, v2Parts.length); i++) {
const v1Part = v1Parts[i] || 0;
const v2Part = v2Parts[i] || 0;
if (v1Part < v2Part) return -1;
if (v1Part > v2Part) return 1;
}
return 0;
}
private updateMigrationOrder(): void {
const migrations = Array.from(this.migrations.values());
this.migrationOrder = migrations
.sort((a, b) => this.compareVersions(a.version, b.version))
.map((m) => m.id);
}
// Utility methods
private createDefaultLogger(): MigrationLogger {
return {
info: (message: string, meta?: any) => console.log(`[MIGRATION INFO] ${message}`, meta || ''),
warn: (message: string, meta?: any) =>
console.warn(`[MIGRATION WARN] ${message}`, meta || ''),
error: (message: string, meta?: any) =>
console.error(`[MIGRATION ERROR] ${message}`, meta || ''),
debug: (message: string, meta?: any) =>
console.log(`[MIGRATION DEBUG] ${message}`, meta || ''),
};
}
// Status and monitoring
getMigrationProgress(migrationId: string): MigrationProgress | null {
return this.activeMigrations.get(migrationId) || null;
}
getActiveMigrations(): MigrationProgress[] {
return Array.from(this.activeMigrations.values());
}
getMigrationHistory(migrationId?: string): MigrationResult[] {
if (migrationId) {
return this.migrationHistory.get(migrationId) || [];
}
const allResults: MigrationResult[] = [];
for (const results of this.migrationHistory.values()) {
allResults.push(...results);
}
return allResults.sort((a, b) => b.duration - a.duration);
}
// Cleanup and maintenance
async cleanup(): Promise<void> {
this.logger.info('Cleaning up migration manager');
this.activeMigrations.clear();
}
}

View File

@ -0,0 +1,529 @@
import { StoreType, ValidationResult, ShardingConfig, PinningConfig } from '../types/framework';
import { FieldConfig, RelationshipConfig, ValidationError } from '../types/models';
import { QueryBuilder } from '../query/QueryBuilder';
export abstract class BaseModel {
// Instance properties
public id: string = '';
public createdAt: number = 0;
public updatedAt: number = 0;
public _loadedRelations: Map<string, any> = new Map();
protected _isDirty: boolean = false;
protected _isNew: boolean = true;
// Static properties for model configuration
static modelName: string;
static dbType: StoreType = 'docstore';
static scope: 'user' | 'global' = 'global';
static sharding?: ShardingConfig;
static pinning?: PinningConfig;
static fields: Map<string, FieldConfig> = new Map();
static relationships: Map<string, RelationshipConfig> = new Map();
static hooks: Map<string, Function[]> = new Map();
constructor(data: any = {}) {
this.fromJSON(data);
}
// Core CRUD operations
async save(): Promise<this> {
await this.validate();
if (this._isNew) {
await this.beforeCreate();
// Generate ID if not provided
if (!this.id) {
this.id = this.generateId();
}
this.createdAt = Date.now();
this.updatedAt = this.createdAt;
// Save to database (will be implemented when database manager is ready)
await this._saveToDatabase();
this._isNew = false;
this._isDirty = false;
await this.afterCreate();
} else if (this._isDirty) {
await this.beforeUpdate();
this.updatedAt = Date.now();
// Update in database
await this._updateInDatabase();
this._isDirty = false;
await this.afterUpdate();
}
return this;
}
static async create<T extends BaseModel>(this: new (data?: any) => T, data: any): Promise<T> {
const instance = new this(data);
return await instance.save();
}
static async get<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
_id: string,
): Promise<T | null> {
// Will be implemented when query system is ready
throw new Error('get method not yet implemented - requires query system');
}
static async find<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
id: string,
): Promise<T> {
const result = await this.get(id);
if (!result) {
throw new Error(`${this.name} with id ${id} not found`);
}
return result;
}
async update(data: Partial<this>): Promise<this> {
Object.assign(this, data);
this._isDirty = true;
return await this.save();
}
async delete(): Promise<boolean> {
await this.beforeDelete();
// Delete from database (will be implemented when database manager is ready)
const success = await this._deleteFromDatabase();
if (success) {
await this.afterDelete();
}
return success;
}
// Query operations (return QueryBuilder instances)
static where<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
field: string,
operator: string,
value: any,
): QueryBuilder<T> {
return new QueryBuilder<T>(this as any).where(field, operator, value);
}
static whereIn<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
field: string,
values: any[],
): QueryBuilder<T> {
return new QueryBuilder<T>(this as any).whereIn(field, values);
}
static orderBy<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
field: string,
direction: 'asc' | 'desc' = 'asc',
): QueryBuilder<T> {
return new QueryBuilder<T>(this as any).orderBy(field, direction);
}
static limit<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
count: number,
): QueryBuilder<T> {
return new QueryBuilder<T>(this as any).limit(count);
}
static async all<T extends BaseModel>(
this: typeof BaseModel & (new (data?: any) => T),
): Promise<T[]> {
return await new QueryBuilder<T>(this as any).exec();
}
// Relationship operations
async load(relationships: string[]): Promise<this> {
const framework = this.getFrameworkInstance();
if (!framework?.relationshipManager) {
console.warn('RelationshipManager not available, skipping relationship loading');
return this;
}
await framework.relationshipManager.eagerLoadRelationships([this], relationships);
return this;
}
async loadRelation(relationName: string): Promise<any> {
// Check if already loaded
if (this._loadedRelations.has(relationName)) {
return this._loadedRelations.get(relationName);
}
const framework = this.getFrameworkInstance();
if (!framework?.relationshipManager) {
console.warn('RelationshipManager not available, cannot load relationship');
return null;
}
return await framework.relationshipManager.loadRelationship(this, relationName);
}
// Advanced relationship loading methods
async loadRelationWithConstraints(
relationName: string,
constraints: (query: any) => any,
): Promise<any> {
const framework = this.getFrameworkInstance();
if (!framework?.relationshipManager) {
console.warn('RelationshipManager not available, cannot load relationship');
return null;
}
return await framework.relationshipManager.loadRelationship(this, relationName, {
constraints,
});
}
async reloadRelation(relationName: string): Promise<any> {
// Clear cached relationship
this._loadedRelations.delete(relationName);
const framework = this.getFrameworkInstance();
if (framework?.relationshipManager) {
framework.relationshipManager.invalidateRelationshipCache(this, relationName);
}
return await this.loadRelation(relationName);
}
getLoadedRelations(): string[] {
return Array.from(this._loadedRelations.keys());
}
isRelationLoaded(relationName: string): boolean {
return this._loadedRelations.has(relationName);
}
getRelation(relationName: string): any {
return this._loadedRelations.get(relationName);
}
setRelation(relationName: string, value: any): void {
this._loadedRelations.set(relationName, value);
}
clearRelation(relationName: string): void {
this._loadedRelations.delete(relationName);
}
// Serialization
toJSON(): any {
const result: any = {};
// Include all enumerable properties
for (const key in this) {
if (this.hasOwnProperty(key) && !key.startsWith('_')) {
result[key] = (this as any)[key];
}
}
// Include loaded relations
this._loadedRelations.forEach((value, key) => {
result[key] = value;
});
return result;
}
fromJSON(data: any): this {
if (!data) return this;
// Set basic properties
Object.keys(data).forEach((key) => {
if (key !== '_loadedRelations' && key !== '_isDirty' && key !== '_isNew') {
(this as any)[key] = data[key];
}
});
// Mark as existing if it has an ID
if (this.id) {
this._isNew = false;
}
return this;
}
// Validation
async validate(): Promise<ValidationResult> {
const errors: string[] = [];
const modelClass = this.constructor as typeof BaseModel;
// Validate each field
for (const [fieldName, fieldConfig] of modelClass.fields) {
const value = (this as any)[fieldName];
const fieldErrors = this.validateField(fieldName, value, fieldConfig);
errors.push(...fieldErrors);
}
const result = { valid: errors.length === 0, errors };
if (!result.valid) {
throw new ValidationError(errors);
}
return result;
}
private validateField(fieldName: string, value: any, config: FieldConfig): string[] {
const errors: string[] = [];
// Required validation
if (config.required && (value === undefined || value === null || value === '')) {
errors.push(`${fieldName} is required`);
return errors; // No point in further validation if required field is missing
}
// Skip further validation if value is empty and not required
if (value === undefined || value === null) {
return errors;
}
// Type validation
if (!this.isValidType(value, config.type)) {
errors.push(`${fieldName} must be of type ${config.type}`);
}
// Custom validation
if (config.validate) {
const customResult = config.validate(value);
if (customResult === false) {
errors.push(`${fieldName} failed custom validation`);
} else if (typeof customResult === 'string') {
errors.push(customResult);
}
}
return errors;
}
private isValidType(value: any, expectedType: FieldConfig['type']): boolean {
switch (expectedType) {
case 'string':
return typeof value === 'string';
case 'number':
return typeof value === 'number' && !isNaN(value);
case 'boolean':
return typeof value === 'boolean';
case 'array':
return Array.isArray(value);
case 'object':
return typeof value === 'object' && !Array.isArray(value);
case 'date':
return value instanceof Date || (typeof value === 'number' && !isNaN(value));
default:
return true;
}
}
// Hook methods (can be overridden by subclasses)
async beforeCreate(): Promise<void> {
await this.runHooks('beforeCreate');
}
async afterCreate(): Promise<void> {
await this.runHooks('afterCreate');
}
async beforeUpdate(): Promise<void> {
await this.runHooks('beforeUpdate');
}
async afterUpdate(): Promise<void> {
await this.runHooks('afterUpdate');
}
async beforeDelete(): Promise<void> {
await this.runHooks('beforeDelete');
}
async afterDelete(): Promise<void> {
await this.runHooks('afterDelete');
}
private async runHooks(hookName: string): Promise<void> {
const modelClass = this.constructor as typeof BaseModel;
const hooks = modelClass.hooks.get(hookName) || [];
for (const hook of hooks) {
await hook.call(this);
}
}
// Utility methods
private generateId(): string {
return Date.now().toString(36) + Math.random().toString(36).substr(2);
}
// Database operations integrated with DatabaseManager
private async _saveToDatabase(): Promise<void> {
const framework = this.getFrameworkInstance();
if (!framework) {
console.warn('Framework not initialized, skipping database save');
return;
}
const modelClass = this.constructor as typeof BaseModel;
try {
if (modelClass.scope === 'user') {
// For user-scoped models, we need a userId
const userId = (this as any).userId;
if (!userId) {
throw new Error('User-scoped models must have a userId field');
}
const database = await framework.databaseManager.getUserDatabase(
userId,
modelClass.modelName,
);
await framework.databaseManager.addDocument(database, modelClass.dbType, this.toJSON());
} else {
// For global models
if (modelClass.sharding) {
// Use sharded database
const shard = framework.shardManager.getShardForKey(modelClass.modelName, this.id);
await framework.databaseManager.addDocument(
shard.database,
modelClass.dbType,
this.toJSON(),
);
} else {
// Use single global database
const database = await framework.databaseManager.getGlobalDatabase(modelClass.modelName);
await framework.databaseManager.addDocument(database, modelClass.dbType, this.toJSON());
}
}
} catch (error) {
console.error('Failed to save to database:', error);
throw error;
}
}
private async _updateInDatabase(): Promise<void> {
const framework = this.getFrameworkInstance();
if (!framework) {
console.warn('Framework not initialized, skipping database update');
return;
}
const modelClass = this.constructor as typeof BaseModel;
try {
if (modelClass.scope === 'user') {
const userId = (this as any).userId;
if (!userId) {
throw new Error('User-scoped models must have a userId field');
}
const database = await framework.databaseManager.getUserDatabase(
userId,
modelClass.modelName,
);
await framework.databaseManager.updateDocument(
database,
modelClass.dbType,
this.id,
this.toJSON(),
);
} else {
if (modelClass.sharding) {
const shard = framework.shardManager.getShardForKey(modelClass.modelName, this.id);
await framework.databaseManager.updateDocument(
shard.database,
modelClass.dbType,
this.id,
this.toJSON(),
);
} else {
const database = await framework.databaseManager.getGlobalDatabase(modelClass.modelName);
await framework.databaseManager.updateDocument(
database,
modelClass.dbType,
this.id,
this.toJSON(),
);
}
}
} catch (error) {
console.error('Failed to update in database:', error);
throw error;
}
}
private async _deleteFromDatabase(): Promise<boolean> {
const framework = this.getFrameworkInstance();
if (!framework) {
console.warn('Framework not initialized, skipping database delete');
return false;
}
const modelClass = this.constructor as typeof BaseModel;
try {
if (modelClass.scope === 'user') {
const userId = (this as any).userId;
if (!userId) {
throw new Error('User-scoped models must have a userId field');
}
const database = await framework.databaseManager.getUserDatabase(
userId,
modelClass.modelName,
);
await framework.databaseManager.deleteDocument(database, modelClass.dbType, this.id);
} else {
if (modelClass.sharding) {
const shard = framework.shardManager.getShardForKey(modelClass.modelName, this.id);
await framework.databaseManager.deleteDocument(
shard.database,
modelClass.dbType,
this.id,
);
} else {
const database = await framework.databaseManager.getGlobalDatabase(modelClass.modelName);
await framework.databaseManager.deleteDocument(database, modelClass.dbType, this.id);
}
}
return true;
} catch (error) {
console.error('Failed to delete from database:', error);
throw error;
}
}
private getFrameworkInstance(): any {
// This will be properly typed when DebrosFramework is created
return (globalThis as any).__debrosFramework;
}
// Static methods for framework integration
static setStore(store: any): void {
(this as any)._store = store;
}
static setShards(shards: any[]): void {
(this as any)._shards = shards;
}
static getStore(): any {
return (this as any)._store;
}
static getShards(): any[] {
return (this as any)._shards || [];
}
}

View File

@ -0,0 +1,119 @@
import { FieldConfig, ValidationError } from '../../types/models';
export function Field(config: FieldConfig) {
return function (target: any, propertyKey: string) {
// Initialize fields map if it doesn't exist
if (!target.constructor.fields) {
target.constructor.fields = new Map();
}
// Store field configuration
target.constructor.fields.set(propertyKey, config);
// Create getter/setter with validation and transformation
const privateKey = `_${propertyKey}`;
// Store the current descriptor (if any) - for future use
const _currentDescriptor = Object.getOwnPropertyDescriptor(target, propertyKey);
Object.defineProperty(target, propertyKey, {
get() {
return this[privateKey];
},
set(value) {
// Apply transformation first
const transformedValue = config.transform ? config.transform(value) : value;
// Validate the field value
const validationResult = validateFieldValue(transformedValue, config, propertyKey);
if (!validationResult.valid) {
throw new ValidationError(validationResult.errors);
}
// Set the value and mark as dirty
this[privateKey] = transformedValue;
if (this._isDirty !== undefined) {
this._isDirty = true;
}
},
enumerable: true,
configurable: true,
});
// Set default value if provided
if (config.default !== undefined) {
Object.defineProperty(target, privateKey, {
value: config.default,
writable: true,
enumerable: false,
configurable: true,
});
}
};
}
function validateFieldValue(
value: any,
config: FieldConfig,
fieldName: string,
): { valid: boolean; errors: string[] } {
const errors: string[] = [];
// Required validation
if (config.required && (value === undefined || value === null || value === '')) {
errors.push(`${fieldName} is required`);
return { valid: false, errors };
}
// Skip further validation if value is empty and not required
if (value === undefined || value === null) {
return { valid: true, errors: [] };
}
// Type validation
if (!isValidType(value, config.type)) {
errors.push(`${fieldName} must be of type ${config.type}`);
}
// Custom validation
if (config.validate) {
const customResult = config.validate(value);
if (customResult === false) {
errors.push(`${fieldName} failed custom validation`);
} else if (typeof customResult === 'string') {
errors.push(customResult);
}
}
return { valid: errors.length === 0, errors };
}
function isValidType(value: any, expectedType: FieldConfig['type']): boolean {
switch (expectedType) {
case 'string':
return typeof value === 'string';
case 'number':
return typeof value === 'number' && !isNaN(value);
case 'boolean':
return typeof value === 'boolean';
case 'array':
return Array.isArray(value);
case 'object':
return typeof value === 'object' && !Array.isArray(value);
case 'date':
return value instanceof Date || (typeof value === 'number' && !isNaN(value));
default:
return true;
}
}
// Utility function to get field configuration
export function getFieldConfig(target: any, propertyKey: string): FieldConfig | undefined {
if (!target.constructor.fields) {
return undefined;
}
return target.constructor.fields.get(propertyKey);
}
// Export the decorator type for TypeScript
export type FieldDecorator = (config: FieldConfig) => (target: any, propertyKey: string) => void;

View File

@ -0,0 +1,55 @@
import { BaseModel } from '../BaseModel';
import { ModelConfig } from '../../types/models';
import { StoreType } from '../../types/framework';
import { ModelRegistry } from '../../core/ModelRegistry';
export function Model(config: ModelConfig = {}) {
return function <T extends typeof BaseModel>(target: T): T {
// Set model configuration on the class
target.modelName = config.tableName || target.name;
target.dbType = config.type || autoDetectType(target);
target.scope = config.scope || 'global';
target.sharding = config.sharding;
target.pinning = config.pinning;
// Register with framework
ModelRegistry.register(target.name, target, config);
// TODO: Set up automatic database creation when DatabaseManager is ready
// DatabaseManager.scheduleCreation(target);
return target;
};
}
function autoDetectType(modelClass: typeof BaseModel): StoreType {
// Analyze model fields to suggest optimal database type
const fields = modelClass.fields;
if (!fields || fields.size === 0) {
return 'docstore'; // Default for complex objects
}
let hasComplexFields = false;
let _hasSimpleFields = false;
for (const [_fieldName, fieldConfig] of fields) {
if (fieldConfig.type === 'object' || fieldConfig.type === 'array') {
hasComplexFields = true;
} else {
_hasSimpleFields = true;
}
}
// If we have complex fields, use docstore
if (hasComplexFields) {
return 'docstore';
}
// If we only have simple fields, we could use keyvalue
// But docstore is more flexible, so let's default to that
return 'docstore';
}
// Export the decorator type for TypeScript
export type ModelDecorator = (config?: ModelConfig) => <T extends typeof BaseModel>(target: T) => T;

View File

@ -0,0 +1,64 @@
export function BeforeCreate(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'beforeCreate', descriptor.value);
}
export function AfterCreate(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'afterCreate', descriptor.value);
}
export function BeforeUpdate(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'beforeUpdate', descriptor.value);
}
export function AfterUpdate(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'afterUpdate', descriptor.value);
}
export function BeforeDelete(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'beforeDelete', descriptor.value);
}
export function AfterDelete(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'afterDelete', descriptor.value);
}
export function BeforeSave(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'beforeSave', descriptor.value);
}
export function AfterSave(target: any, propertyKey: string, descriptor: PropertyDescriptor) {
registerHook(target, 'afterSave', descriptor.value);
}
function registerHook(target: any, hookName: string, hookFunction: Function): void {
// Initialize hooks map if it doesn't exist
if (!target.constructor.hooks) {
target.constructor.hooks = new Map();
}
// Get existing hooks for this hook name
const existingHooks = target.constructor.hooks.get(hookName) || [];
// Add the new hook
existingHooks.push(hookFunction);
// Store updated hooks array
target.constructor.hooks.set(hookName, existingHooks);
console.log(`Registered ${hookName} hook for ${target.constructor.name}`);
}
// Utility function to get hooks for a specific event
export function getHooks(target: any, hookName: string): Function[] {
if (!target.constructor.hooks) {
return [];
}
return target.constructor.hooks.get(hookName) || [];
}
// Export decorator types for TypeScript
export type HookDecorator = (
target: any,
propertyKey: string,
descriptor: PropertyDescriptor,
) => void;

View File

@ -0,0 +1,35 @@
// Model decorator
export { Model } from './Model';
// Field decorator
export { Field, getFieldConfig } from './Field';
// Relationship decorators
export { BelongsTo, HasMany, HasOne, ManyToMany, getRelationshipConfig } from './relationships';
// Hook decorators
export {
BeforeCreate,
AfterCreate,
BeforeUpdate,
AfterUpdate,
BeforeDelete,
AfterDelete,
BeforeSave,
AfterSave,
getHooks,
} from './hooks';
// Type exports
export type { ModelDecorator } from './Model';
export type { FieldDecorator } from './Field';
export type {
BelongsToDecorator,
HasManyDecorator,
HasOneDecorator,
ManyToManyDecorator,
} from './relationships';
export type { HookDecorator } from './hooks';

View File

@ -0,0 +1,167 @@
import { BaseModel } from '../BaseModel';
import { RelationshipConfig } from '../../types/models';
export function BelongsTo(
model: typeof BaseModel,
foreignKey: string,
options: { localKey?: string } = {},
) {
return function (target: any, propertyKey: string) {
const config: RelationshipConfig = {
type: 'belongsTo',
model,
foreignKey,
localKey: options.localKey || 'id',
lazy: true,
};
registerRelationship(target, propertyKey, config);
createRelationshipProperty(target, propertyKey, config);
};
}
export function HasMany(
model: typeof BaseModel,
foreignKey: string,
options: { localKey?: string; through?: typeof BaseModel } = {},
) {
return function (target: any, propertyKey: string) {
const config: RelationshipConfig = {
type: 'hasMany',
model,
foreignKey,
localKey: options.localKey || 'id',
through: options.through,
lazy: true,
};
registerRelationship(target, propertyKey, config);
createRelationshipProperty(target, propertyKey, config);
};
}
export function HasOne(
model: typeof BaseModel,
foreignKey: string,
options: { localKey?: string } = {},
) {
return function (target: any, propertyKey: string) {
const config: RelationshipConfig = {
type: 'hasOne',
model,
foreignKey,
localKey: options.localKey || 'id',
lazy: true,
};
registerRelationship(target, propertyKey, config);
createRelationshipProperty(target, propertyKey, config);
};
}
export function ManyToMany(
model: typeof BaseModel,
through: typeof BaseModel,
foreignKey: string,
options: { localKey?: string; throughForeignKey?: string } = {},
) {
return function (target: any, propertyKey: string) {
const config: RelationshipConfig = {
type: 'manyToMany',
model,
foreignKey,
localKey: options.localKey || 'id',
through,
lazy: true,
};
registerRelationship(target, propertyKey, config);
createRelationshipProperty(target, propertyKey, config);
};
}
function registerRelationship(target: any, propertyKey: string, config: RelationshipConfig): void {
// Initialize relationships map if it doesn't exist
if (!target.constructor.relationships) {
target.constructor.relationships = new Map();
}
// Store relationship configuration
target.constructor.relationships.set(propertyKey, config);
console.log(
`Registered ${config.type} relationship: ${target.constructor.name}.${propertyKey} -> ${config.model.name}`,
);
}
function createRelationshipProperty(
target: any,
propertyKey: string,
config: RelationshipConfig,
): void {
const _relationshipKey = `_relationship_${propertyKey}`; // For future use
Object.defineProperty(target, propertyKey, {
get() {
// Check if relationship is already loaded
if (this._loadedRelations && this._loadedRelations.has(propertyKey)) {
return this._loadedRelations.get(propertyKey);
}
if (config.lazy) {
// Return a promise for lazy loading
return this.loadRelation(propertyKey);
} else {
throw new Error(
`Relationship '${propertyKey}' not loaded. Use .load(['${propertyKey}']) first.`,
);
}
},
set(value) {
// Allow manual setting of relationship values
if (!this._loadedRelations) {
this._loadedRelations = new Map();
}
this._loadedRelations.set(propertyKey, value);
},
enumerable: true,
configurable: true,
});
}
// Utility function to get relationship configuration
export function getRelationshipConfig(
target: any,
propertyKey: string,
): RelationshipConfig | undefined {
if (!target.constructor.relationships) {
return undefined;
}
return target.constructor.relationships.get(propertyKey);
}
// Type definitions for decorators
export type BelongsToDecorator = (
model: typeof BaseModel,
foreignKey: string,
options?: { localKey?: string },
) => (target: any, propertyKey: string) => void;
export type HasManyDecorator = (
model: typeof BaseModel,
foreignKey: string,
options?: { localKey?: string; through?: typeof BaseModel },
) => (target: any, propertyKey: string) => void;
export type HasOneDecorator = (
model: typeof BaseModel,
foreignKey: string,
options?: { localKey?: string },
) => (target: any, propertyKey: string) => void;
export type ManyToManyDecorator = (
model: typeof BaseModel,
through: typeof BaseModel,
foreignKey: string,
options?: { localKey?: string; throughForeignKey?: string },
) => (target: any, propertyKey: string) => void;

View File

@ -0,0 +1,598 @@
/**
* PinningManager - Automatic IPFS Pinning with Smart Strategies
*
* This class implements intelligent pinning strategies for IPFS content:
* - Fixed: Pin a fixed number of most important items
* - Popularity: Pin based on access frequency and recency
* - Size-based: Pin smaller items preferentially
* - Custom: User-defined pinning logic
* - Automatic cleanup of unpinned content
*/
import { PinningStrategy, PinningStats } from '../types/framework';
// Node.js types for compatibility
declare global {
namespace NodeJS {
interface Timeout {}
}
}
export interface PinningRule {
modelName: string;
strategy?: PinningStrategy;
factor?: number;
maxPins?: number;
minAccessCount?: number;
maxAge?: number; // in milliseconds
customLogic?: (item: any, stats: any) => number; // returns priority score
}
export interface PinnedItem {
hash: string;
modelName: string;
itemId: string;
pinnedAt: number;
lastAccessed: number;
accessCount: number;
size: number;
priority: number;
metadata?: any;
}
export interface PinningMetrics {
totalPinned: number;
totalSize: number;
averageSize: number;
oldestPin: number;
newestPin: number;
mostAccessed: PinnedItem | null;
leastAccessed: PinnedItem | null;
strategyBreakdown: Map<PinningStrategy, number>;
}
export class PinningManager {
private ipfsService: any;
private pinnedItems: Map<string, PinnedItem> = new Map();
private pinningRules: Map<string, PinningRule> = new Map();
private accessLog: Map<string, { count: number; lastAccess: number }> = new Map();
private cleanupInterval: NodeJS.Timeout | null = null;
private maxTotalPins: number = 10000;
private maxTotalSize: number = 10 * 1024 * 1024 * 1024; // 10GB
private cleanupIntervalMs: number = 60000; // 1 minute
constructor(
ipfsService: any,
options: {
maxTotalPins?: number;
maxTotalSize?: number;
cleanupIntervalMs?: number;
} = {},
) {
this.ipfsService = ipfsService;
this.maxTotalPins = options.maxTotalPins || this.maxTotalPins;
this.maxTotalSize = options.maxTotalSize || this.maxTotalSize;
this.cleanupIntervalMs = options.cleanupIntervalMs || this.cleanupIntervalMs;
// Start automatic cleanup
this.startAutoCleanup();
}
// Configure pinning rules for models
setPinningRule(modelName: string, rule: Partial<PinningRule>): void {
const existingRule = this.pinningRules.get(modelName);
const newRule: PinningRule = {
modelName,
strategy: 'popularity' as const,
factor: 1,
...existingRule,
...rule,
};
this.pinningRules.set(modelName, newRule);
console.log(
`📌 Set pinning rule for ${modelName}: ${newRule.strategy} (factor: ${newRule.factor})`,
);
}
// Pin content based on configured strategy
async pinContent(
hash: string,
modelName: string,
itemId: string,
metadata: any = {},
): Promise<boolean> {
try {
// Check if already pinned
if (this.pinnedItems.has(hash)) {
await this.recordAccess(hash);
return true;
}
const rule = this.pinningRules.get(modelName);
if (!rule) {
console.warn(`No pinning rule found for model ${modelName}, skipping pin`);
return false;
}
// Get content size
const size = await this.getContentSize(hash);
// Calculate priority based on strategy
const priority = this.calculatePinningPriority(rule, metadata, size);
// Check if we should pin based on priority and limits
const shouldPin = await this.shouldPinContent(rule, priority, size);
if (!shouldPin) {
console.log(
`⏭️ Skipping pin for ${hash} (${modelName}): priority too low or limits exceeded`,
);
return false;
}
// Perform the actual pinning
await this.ipfsService.pin(hash);
// Record the pinned item
const pinnedItem: PinnedItem = {
hash,
modelName,
itemId,
pinnedAt: Date.now(),
lastAccessed: Date.now(),
accessCount: 1,
size,
priority,
metadata,
};
this.pinnedItems.set(hash, pinnedItem);
this.recordAccess(hash);
console.log(
`📌 Pinned ${hash} (${modelName}:${itemId}) with priority ${priority.toFixed(2)}`,
);
// Cleanup if we've exceeded limits
await this.enforceGlobalLimits();
return true;
} catch (error) {
console.error(`Failed to pin ${hash}:`, error);
return false;
}
}
// Unpin content
async unpinContent(hash: string, force: boolean = false): Promise<boolean> {
try {
const pinnedItem = this.pinnedItems.get(hash);
if (!pinnedItem) {
console.warn(`Hash ${hash} is not tracked as pinned`);
return false;
}
// Check if content should be protected from unpinning
if (!force && (await this.isProtectedFromUnpinning(pinnedItem))) {
console.log(`🔒 Content ${hash} is protected from unpinning`);
return false;
}
await this.ipfsService.unpin(hash);
this.pinnedItems.delete(hash);
this.accessLog.delete(hash);
console.log(`📌❌ Unpinned ${hash} (${pinnedItem.modelName}:${pinnedItem.itemId})`);
return true;
} catch (error) {
console.error(`Failed to unpin ${hash}:`, error);
return false;
}
}
// Record access to pinned content
async recordAccess(hash: string): Promise<void> {
const pinnedItem = this.pinnedItems.get(hash);
if (pinnedItem) {
pinnedItem.lastAccessed = Date.now();
pinnedItem.accessCount++;
}
// Update access log
const accessInfo = this.accessLog.get(hash) || { count: 0, lastAccess: 0 };
accessInfo.count++;
accessInfo.lastAccess = Date.now();
this.accessLog.set(hash, accessInfo);
}
// Calculate pinning priority based on strategy
private calculatePinningPriority(rule: PinningRule, metadata: any, size: number): number {
const now = Date.now();
let priority = 0;
switch (rule.strategy || 'popularity') {
case 'fixed':
// Fixed strategy: all items have equal priority
priority = rule.factor || 1;
break;
case 'popularity':
// Popularity-based: recent access + total access count
const accessInfo = this.accessLog.get(metadata.hash) || { count: 0, lastAccess: 0 };
const recencyScore = Math.max(0, 1 - (now - accessInfo.lastAccess) / (24 * 60 * 60 * 1000)); // 24h decay
const accessScore = Math.min(1, accessInfo.count / 100); // Cap at 100 accesses
priority = (recencyScore * 0.6 + accessScore * 0.4) * (rule.factor || 1);
break;
case 'size':
// Size-based: prefer smaller content (inverse relationship)
const maxSize = 100 * 1024 * 1024; // 100MB
const sizeScore = Math.max(0.1, 1 - size / maxSize);
priority = sizeScore * (rule.factor || 1);
break;
case 'age':
// Age-based: prefer newer content
const maxAge = 30 * 24 * 60 * 60 * 1000; // 30 days
const age = now - (metadata.createdAt || now);
const ageScore = Math.max(0.1, 1 - age / maxAge);
priority = ageScore * (rule.factor || 1);
break;
case 'custom':
// Custom logic provided by user
if (rule.customLogic) {
priority =
rule.customLogic(metadata, {
size,
accessInfo: this.accessLog.get(metadata.hash),
now,
}) * (rule.factor || 1);
} else {
priority = rule.factor || 1;
}
break;
default:
priority = rule.factor || 1;
}
return Math.max(0, priority);
}
// Determine if content should be pinned
private async shouldPinContent(
rule: PinningRule,
priority: number,
size: number,
): Promise<boolean> {
// Check rule-specific limits
if (rule.maxPins) {
const currentPinsForModel = Array.from(this.pinnedItems.values()).filter(
(item) => item.modelName === rule.modelName,
).length;
if (currentPinsForModel >= rule.maxPins) {
// Find lowest priority item for this model to potentially replace
const lowestPriorityItem = Array.from(this.pinnedItems.values())
.filter((item) => item.modelName === rule.modelName)
.sort((a, b) => a.priority - b.priority)[0];
if (!lowestPriorityItem || priority <= lowestPriorityItem.priority) {
return false;
}
// Unpin the lowest priority item to make room
await this.unpinContent(lowestPriorityItem.hash, true);
}
}
// Check global limits
const metrics = this.getMetrics();
if (metrics.totalPinned >= this.maxTotalPins) {
// Find globally lowest priority item to replace
const lowestPriorityItem = Array.from(this.pinnedItems.values()).sort(
(a, b) => a.priority - b.priority,
)[0];
if (!lowestPriorityItem || priority <= lowestPriorityItem.priority) {
return false;
}
await this.unpinContent(lowestPriorityItem.hash, true);
}
if (metrics.totalSize + size > this.maxTotalSize) {
// Need to free up space
const spaceNeeded = metrics.totalSize + size - this.maxTotalSize;
await this.freeUpSpace(spaceNeeded);
}
return true;
}
// Check if content is protected from unpinning
private async isProtectedFromUnpinning(pinnedItem: PinnedItem): Promise<boolean> {
const rule = this.pinningRules.get(pinnedItem.modelName);
if (!rule) return false;
// Recently accessed content is protected
const timeSinceAccess = Date.now() - pinnedItem.lastAccessed;
if (timeSinceAccess < 60 * 60 * 1000) {
// 1 hour
return true;
}
// High-priority content is protected
if (pinnedItem.priority > 0.8) {
return true;
}
// Content with high access count is protected
if (pinnedItem.accessCount > 50) {
return true;
}
return false;
}
// Free up space by unpinning least important content
private async freeUpSpace(spaceNeeded: number): Promise<void> {
let freedSpace = 0;
// Sort by priority (lowest first)
const sortedItems = Array.from(this.pinnedItems.values())
.filter((item) => !this.isProtectedFromUnpinning(item))
.sort((a, b) => a.priority - b.priority);
for (const item of sortedItems) {
if (freedSpace >= spaceNeeded) break;
await this.unpinContent(item.hash, true);
freedSpace += item.size;
}
console.log(`🧹 Freed up ${(freedSpace / 1024 / 1024).toFixed(2)} MB of space`);
}
// Enforce global pinning limits
private async enforceGlobalLimits(): Promise<void> {
const metrics = this.getMetrics();
// Check total pins limit
if (metrics.totalPinned > this.maxTotalPins) {
const excess = metrics.totalPinned - this.maxTotalPins;
const itemsToUnpin = Array.from(this.pinnedItems.values())
.sort((a, b) => a.priority - b.priority)
.slice(0, excess);
for (const item of itemsToUnpin) {
await this.unpinContent(item.hash, true);
}
}
// Check total size limit
if (metrics.totalSize > this.maxTotalSize) {
const excessSize = metrics.totalSize - this.maxTotalSize;
await this.freeUpSpace(excessSize);
}
}
// Automatic cleanup of old/unused pins
private async performCleanup(): Promise<void> {
const now = Date.now();
const itemsToCleanup: PinnedItem[] = [];
for (const item of this.pinnedItems.values()) {
const rule = this.pinningRules.get(item.modelName);
if (!rule) continue;
let shouldCleanup = false;
// Age-based cleanup
if (rule.maxAge) {
const age = now - item.pinnedAt;
if (age > rule.maxAge) {
shouldCleanup = true;
}
}
// Access-based cleanup
if (rule.minAccessCount) {
if (item.accessCount < rule.minAccessCount) {
shouldCleanup = true;
}
}
// Inactivity-based cleanup (not accessed for 7 days)
const inactivityThreshold = 7 * 24 * 60 * 60 * 1000;
if (now - item.lastAccessed > inactivityThreshold && item.priority < 0.3) {
shouldCleanup = true;
}
if (shouldCleanup && !(await this.isProtectedFromUnpinning(item))) {
itemsToCleanup.push(item);
}
}
// Unpin items marked for cleanup
for (const item of itemsToCleanup) {
await this.unpinContent(item.hash, true);
}
if (itemsToCleanup.length > 0) {
console.log(`🧹 Cleaned up ${itemsToCleanup.length} old/unused pins`);
}
}
// Start automatic cleanup
private startAutoCleanup(): void {
this.cleanupInterval = setInterval(() => {
this.performCleanup().catch((error) => {
console.error('Cleanup failed:', error);
});
}, this.cleanupIntervalMs);
}
// Stop automatic cleanup
stopAutoCleanup(): void {
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval as any);
this.cleanupInterval = null;
}
}
// Get content size from IPFS
private async getContentSize(hash: string): Promise<number> {
try {
const stats = await this.ipfsService.object.stat(hash);
return stats.CumulativeSize || stats.BlockSize || 0;
} catch (error) {
console.warn(`Could not get size for ${hash}:`, error);
return 1024; // Default size
}
}
// Get comprehensive metrics
getMetrics(): PinningMetrics {
const items = Array.from(this.pinnedItems.values());
const totalSize = items.reduce((sum, item) => sum + item.size, 0);
const strategyBreakdown = new Map<PinningStrategy, number>();
// Count by strategy
for (const item of items) {
const rule = this.pinningRules.get(item.modelName);
if (rule) {
const strategy = rule.strategy || 'popularity';
const count = strategyBreakdown.get(strategy) || 0;
strategyBreakdown.set(strategy, count + 1);
}
}
// Find most/least accessed
const sortedByAccess = items.sort((a, b) => b.accessCount - a.accessCount);
return {
totalPinned: items.length,
totalSize,
averageSize: items.length > 0 ? totalSize / items.length : 0,
oldestPin: items.length > 0 ? Math.min(...items.map((i) => i.pinnedAt)) : 0,
newestPin: items.length > 0 ? Math.max(...items.map((i) => i.pinnedAt)) : 0,
mostAccessed: sortedByAccess[0] || null,
leastAccessed: sortedByAccess[sortedByAccess.length - 1] || null,
strategyBreakdown,
};
}
// Get pinning statistics
getStats(): PinningStats {
const metrics = this.getMetrics();
return {
totalPinned: metrics.totalPinned,
totalSize: metrics.totalSize,
averageSize: metrics.averageSize,
strategies: Object.fromEntries(metrics.strategyBreakdown),
oldestPin: metrics.oldestPin,
recentActivity: this.getRecentActivity(),
};
}
// Get recent pinning activity
private getRecentActivity(): Array<{ action: string; hash: string; timestamp: number }> {
// This would typically be implemented with a proper activity log
// For now, we'll return recent pins
const recentItems = Array.from(this.pinnedItems.values())
.filter((item) => Date.now() - item.pinnedAt < 24 * 60 * 60 * 1000) // Last 24 hours
.sort((a, b) => b.pinnedAt - a.pinnedAt)
.slice(0, 10)
.map((item) => ({
action: 'pinned',
hash: item.hash,
timestamp: item.pinnedAt,
}));
return recentItems;
}
// Analyze pinning performance
analyzePerformance(): any {
const metrics = this.getMetrics();
const now = Date.now();
// Calculate hit rate (items accessed recently)
const recentlyAccessedCount = Array.from(this.pinnedItems.values()).filter(
(item) => now - item.lastAccessed < 60 * 60 * 1000,
).length; // Last hour
const hitRate = metrics.totalPinned > 0 ? recentlyAccessedCount / metrics.totalPinned : 0;
// Calculate average priority
const averagePriority =
Array.from(this.pinnedItems.values()).reduce((sum, item) => sum + item.priority, 0) /
metrics.totalPinned || 0;
// Storage efficiency
const storageEfficiency =
this.maxTotalSize > 0 ? (this.maxTotalSize - metrics.totalSize) / this.maxTotalSize : 0;
return {
hitRate,
averagePriority,
storageEfficiency,
utilizationRate: metrics.totalPinned / this.maxTotalPins,
averageItemAge: now - (metrics.oldestPin + metrics.newestPin) / 2,
totalRules: this.pinningRules.size,
accessDistribution: this.getAccessDistribution(),
};
}
// Get access distribution statistics
private getAccessDistribution(): any {
const items = Array.from(this.pinnedItems.values());
const accessCounts = items.map((item) => item.accessCount).sort((a, b) => a - b);
if (accessCounts.length === 0) {
return { min: 0, max: 0, median: 0, q1: 0, q3: 0 };
}
const min = accessCounts[0];
const max = accessCounts[accessCounts.length - 1];
const median = accessCounts[Math.floor(accessCounts.length / 2)];
const q1 = accessCounts[Math.floor(accessCounts.length / 4)];
const q3 = accessCounts[Math.floor((accessCounts.length * 3) / 4)];
return { min, max, median, q1, q3 };
}
// Get pinned items for a specific model
getPinnedItemsForModel(modelName: string): PinnedItem[] {
return Array.from(this.pinnedItems.values()).filter((item) => item.modelName === modelName);
}
// Check if specific content is pinned
isPinned(hash: string): boolean {
return this.pinnedItems.has(hash);
}
// Clear all pins (for testing/reset)
async clearAllPins(): Promise<void> {
const hashes = Array.from(this.pinnedItems.keys());
for (const hash of hashes) {
await this.unpinContent(hash, true);
}
this.pinnedItems.clear();
this.accessLog.clear();
console.log(`🧹 Cleared all ${hashes.length} pins`);
}
// Shutdown
async shutdown(): Promise<void> {
this.stopAutoCleanup();
console.log('📌 PinningManager shut down');
}
}

View File

@ -0,0 +1,712 @@
/**
* PubSubManager - Automatic Event Publishing and Subscription
*
* This class handles automatic publishing of model changes and database events
* to IPFS PubSub topics, enabling real-time synchronization across nodes:
* - Model-level events (create, update, delete)
* - Database-level events (replication, sync)
* - Custom application events
* - Topic management and subscription handling
* - Event filtering and routing
*/
import { BaseModel } from '../models/BaseModel';
// Node.js types for compatibility
declare global {
namespace NodeJS {
interface Timeout {}
}
}
export interface PubSubConfig {
enabled: boolean;
autoPublishModelEvents: boolean;
autoPublishDatabaseEvents: boolean;
topicPrefix: string;
maxRetries: number;
retryDelay: number;
eventBuffer: {
enabled: boolean;
maxSize: number;
flushInterval: number;
};
compression: {
enabled: boolean;
threshold: number; // bytes
};
encryption: {
enabled: boolean;
publicKey?: string;
privateKey?: string;
};
}
export interface PubSubEvent {
id: string;
type: string;
topic: string;
data: any;
timestamp: number;
source: string;
metadata?: any;
}
export interface TopicSubscription {
topic: string;
handler: (event: PubSubEvent) => void | Promise<void>;
filter?: (event: PubSubEvent) => boolean;
options: {
autoAck: boolean;
maxRetries: number;
deadLetterTopic?: string;
};
}
export interface PubSubStats {
totalPublished: number;
totalReceived: number;
totalSubscriptions: number;
publishErrors: number;
receiveErrors: number;
averageLatency: number;
topicStats: Map<
string,
{
published: number;
received: number;
subscribers: number;
lastActivity: number;
}
>;
}
export class PubSubManager {
private ipfsService: any;
private config: PubSubConfig;
private subscriptions: Map<string, TopicSubscription[]> = new Map();
private eventBuffer: PubSubEvent[] = [];
private bufferFlushInterval: any = null;
private stats: PubSubStats;
private latencyMeasurements: number[] = [];
private nodeId: string;
private isInitialized: boolean = false;
private eventListeners: Map<string, Function[]> = new Map();
constructor(ipfsService: any, config: Partial<PubSubConfig> = {}) {
this.ipfsService = ipfsService;
this.nodeId = `node-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
this.config = {
enabled: true,
autoPublishModelEvents: true,
autoPublishDatabaseEvents: true,
topicPrefix: 'debros',
maxRetries: 3,
retryDelay: 1000,
eventBuffer: {
enabled: true,
maxSize: 100,
flushInterval: 5000,
},
compression: {
enabled: true,
threshold: 1024,
},
encryption: {
enabled: false,
},
...config,
};
this.stats = {
totalPublished: 0,
totalReceived: 0,
totalSubscriptions: 0,
publishErrors: 0,
receiveErrors: 0,
averageLatency: 0,
topicStats: new Map(),
};
}
// Simple event emitter functionality
emit(event: string, ...args: any[]): boolean {
const listeners = this.eventListeners.get(event) || [];
listeners.forEach((listener) => {
try {
listener(...args);
} catch (error) {
console.error(`Error in event listener for ${event}:`, error);
}
});
return listeners.length > 0;
}
on(event: string, listener: Function): this {
if (!this.eventListeners.has(event)) {
this.eventListeners.set(event, []);
}
this.eventListeners.get(event)!.push(listener);
return this;
}
off(event: string, listener?: Function): this {
if (!listener) {
this.eventListeners.delete(event);
} else {
const listeners = this.eventListeners.get(event) || [];
const index = listeners.indexOf(listener);
if (index >= 0) {
listeners.splice(index, 1);
}
}
return this;
}
// Initialize PubSub system
async initialize(): Promise<void> {
if (!this.config.enabled) {
console.log('📡 PubSub disabled in configuration');
return;
}
try {
console.log('📡 Initializing PubSubManager...');
// Start event buffer flushing if enabled
if (this.config.eventBuffer.enabled) {
this.startEventBuffering();
}
// Subscribe to model events if auto-publishing is enabled
if (this.config.autoPublishModelEvents) {
this.setupModelEventPublishing();
}
// Subscribe to database events if auto-publishing is enabled
if (this.config.autoPublishDatabaseEvents) {
this.setupDatabaseEventPublishing();
}
this.isInitialized = true;
console.log('✅ PubSubManager initialized successfully');
} catch (error) {
console.error('❌ Failed to initialize PubSubManager:', error);
throw error;
}
}
// Publish event to a topic
async publish(
topic: string,
data: any,
options: {
priority?: 'low' | 'normal' | 'high';
retries?: number;
compress?: boolean;
encrypt?: boolean;
metadata?: any;
} = {},
): Promise<boolean> {
if (!this.config.enabled || !this.isInitialized) {
return false;
}
const event: PubSubEvent = {
id: this.generateEventId(),
type: this.extractEventType(topic),
topic: this.prefixTopic(topic),
data,
timestamp: Date.now(),
source: this.nodeId,
metadata: options.metadata,
};
try {
// Process event (compression, encryption, etc.)
const processedData = await this.processEventForPublishing(event, options);
// Publish with buffering or directly
if (this.config.eventBuffer.enabled && options.priority !== 'high') {
return this.bufferEvent(event, processedData);
} else {
return await this.publishDirect(event.topic, processedData, options.retries);
}
} catch (error) {
this.stats.publishErrors++;
console.error(`❌ Failed to publish to ${topic}:`, error);
this.emit('publishError', { topic, error, event });
return false;
}
}
// Subscribe to a topic
async subscribe(
topic: string,
handler: (event: PubSubEvent) => void | Promise<void>,
options: {
filter?: (event: PubSubEvent) => boolean;
autoAck?: boolean;
maxRetries?: number;
deadLetterTopic?: string;
} = {},
): Promise<boolean> {
if (!this.config.enabled || !this.isInitialized) {
return false;
}
const fullTopic = this.prefixTopic(topic);
try {
const subscription: TopicSubscription = {
topic: fullTopic,
handler,
filter: options.filter,
options: {
autoAck: options.autoAck !== false,
maxRetries: options.maxRetries || this.config.maxRetries,
deadLetterTopic: options.deadLetterTopic,
},
};
// Add to subscriptions map
if (!this.subscriptions.has(fullTopic)) {
this.subscriptions.set(fullTopic, []);
// Subscribe to IPFS PubSub topic
await this.ipfsService.pubsub.subscribe(fullTopic, (message: any) => {
this.handleIncomingMessage(fullTopic, message);
});
}
this.subscriptions.get(fullTopic)!.push(subscription);
this.stats.totalSubscriptions++;
// Update topic stats
this.updateTopicStats(fullTopic, 'subscribers', 1);
console.log(`📡 Subscribed to topic: ${fullTopic}`);
this.emit('subscribed', { topic: fullTopic, subscription });
return true;
} catch (error) {
console.error(`❌ Failed to subscribe to ${topic}:`, error);
this.emit('subscribeError', { topic, error });
return false;
}
}
// Unsubscribe from a topic
async unsubscribe(topic: string, handler?: Function): Promise<boolean> {
const fullTopic = this.prefixTopic(topic);
const subscriptions = this.subscriptions.get(fullTopic);
if (!subscriptions) {
return false;
}
try {
if (handler) {
// Remove specific handler
const index = subscriptions.findIndex((sub) => sub.handler === handler);
if (index >= 0) {
subscriptions.splice(index, 1);
this.stats.totalSubscriptions--;
}
} else {
// Remove all handlers for this topic
this.stats.totalSubscriptions -= subscriptions.length;
subscriptions.length = 0;
}
// If no more subscriptions, unsubscribe from IPFS
if (subscriptions.length === 0) {
await this.ipfsService.pubsub.unsubscribe(fullTopic);
this.subscriptions.delete(fullTopic);
this.stats.topicStats.delete(fullTopic);
}
console.log(`📡 Unsubscribed from topic: ${fullTopic}`);
this.emit('unsubscribed', { topic: fullTopic });
return true;
} catch (error) {
console.error(`❌ Failed to unsubscribe from ${topic}:`, error);
return false;
}
}
// Setup automatic model event publishing
private setupModelEventPublishing(): void {
const topics = {
create: 'model.created',
update: 'model.updated',
delete: 'model.deleted',
save: 'model.saved',
};
// Listen for model events on the global framework instance
this.on('modelEvent', async (eventType: string, model: BaseModel, changes?: any) => {
const topic = topics[eventType as keyof typeof topics];
if (!topic) return;
const eventData = {
modelName: model.constructor.name,
modelId: model.id,
userId: (model as any).userId,
changes,
timestamp: Date.now(),
};
await this.publish(topic, eventData, {
priority: eventType === 'delete' ? 'high' : 'normal',
metadata: {
modelType: model.constructor.name,
scope: (model.constructor as any).scope,
},
});
});
}
// Setup automatic database event publishing
private setupDatabaseEventPublishing(): void {
const databaseTopics = {
replication: 'database.replicated',
sync: 'database.synced',
conflict: 'database.conflict',
error: 'database.error',
};
// Listen for database events
this.on('databaseEvent', async (eventType: string, data: any) => {
const topic = databaseTopics[eventType as keyof typeof databaseTopics];
if (!topic) return;
await this.publish(topic, data, {
priority: eventType === 'error' ? 'high' : 'normal',
metadata: {
eventType,
source: 'database',
},
});
});
}
// Handle incoming PubSub messages
private async handleIncomingMessage(topic: string, message: any): Promise<void> {
try {
const startTime = Date.now();
// Parse and validate message
const event = await this.processIncomingMessage(message);
if (!event) return;
// Update stats
this.stats.totalReceived++;
this.updateTopicStats(topic, 'received', 1);
// Calculate latency
const latency = Date.now() - event.timestamp;
this.latencyMeasurements.push(latency);
if (this.latencyMeasurements.length > 100) {
this.latencyMeasurements.shift();
}
this.stats.averageLatency =
this.latencyMeasurements.reduce((a, b) => a + b, 0) / this.latencyMeasurements.length;
// Route to subscribers
const subscriptions = this.subscriptions.get(topic) || [];
for (const subscription of subscriptions) {
try {
// Apply filter if present
if (subscription.filter && !subscription.filter(event)) {
continue;
}
// Call handler
await this.callHandlerWithRetry(subscription, event);
} catch (error: any) {
this.stats.receiveErrors++;
console.error(`❌ Handler error for ${topic}:`, error);
// Send to dead letter topic if configured
if (subscription.options.deadLetterTopic) {
await this.publish(subscription.options.deadLetterTopic, {
originalTopic: topic,
originalEvent: event,
error: error?.message || String(error),
timestamp: Date.now(),
});
}
}
}
this.emit('messageReceived', { topic, event, processingTime: Date.now() - startTime });
} catch (error) {
this.stats.receiveErrors++;
console.error(`❌ Failed to handle message from ${topic}:`, error);
this.emit('messageError', { topic, error });
}
}
// Call handler with retry logic
private async callHandlerWithRetry(
subscription: TopicSubscription,
event: PubSubEvent,
attempt: number = 1,
): Promise<void> {
try {
await subscription.handler(event);
} catch (error) {
if (attempt < subscription.options.maxRetries) {
console.warn(
`🔄 Retrying handler (attempt ${attempt + 1}/${subscription.options.maxRetries})`,
);
await new Promise((resolve) => setTimeout(resolve, this.config.retryDelay * attempt));
return this.callHandlerWithRetry(subscription, event, attempt + 1);
}
throw error;
}
}
// Process event for publishing (compression, encryption, etc.)
private async processEventForPublishing(event: PubSubEvent, options: any): Promise<string> {
let data = JSON.stringify(event);
// Compression
if (
options.compress !== false &&
this.config.compression.enabled &&
data.length > this.config.compression.threshold
) {
// In a real implementation, you'd use a compression library like zlib
// data = await compress(data);
}
// Encryption
if (
options.encrypt !== false &&
this.config.encryption.enabled &&
this.config.encryption.publicKey
) {
// In a real implementation, you'd encrypt with the public key
// data = await encrypt(data, this.config.encryption.publicKey);
}
return data;
}
// Process incoming message
private async processIncomingMessage(message: any): Promise<PubSubEvent | null> {
try {
let data = message.data.toString();
// Decryption
if (this.config.encryption.enabled && this.config.encryption.privateKey) {
// In a real implementation, you'd decrypt with the private key
// data = await decrypt(data, this.config.encryption.privateKey);
}
// Decompression
if (this.config.compression.enabled) {
// In a real implementation, you'd detect and decompress
// data = await decompress(data);
}
const event = JSON.parse(data) as PubSubEvent;
// Validate event structure
if (!event.id || !event.topic || !event.timestamp) {
console.warn('❌ Invalid event structure received');
return null;
}
// Ignore our own messages
if (event.source === this.nodeId) {
return null;
}
return event;
} catch (error) {
console.error('❌ Failed to process incoming message:', error);
return null;
}
}
// Direct publish without buffering
private async publishDirect(
topic: string,
data: string,
retries: number = this.config.maxRetries,
): Promise<boolean> {
for (let attempt = 1; attempt <= retries; attempt++) {
try {
await this.ipfsService.pubsub.publish(topic, data);
this.stats.totalPublished++;
this.updateTopicStats(topic, 'published', 1);
return true;
} catch (error) {
if (attempt === retries) {
throw error;
}
console.warn(`🔄 Retrying publish (attempt ${attempt + 1}/${retries})`);
await new Promise((resolve) => setTimeout(resolve, this.config.retryDelay * attempt));
}
}
return false;
}
// Buffer event for batch publishing
private bufferEvent(event: PubSubEvent, _data: string): boolean {
if (this.eventBuffer.length >= this.config.eventBuffer.maxSize) {
// Buffer is full, flush immediately
this.flushEventBuffer();
}
this.eventBuffer.push(event);
return true;
}
// Start event buffering
private startEventBuffering(): void {
this.bufferFlushInterval = setInterval(() => {
this.flushEventBuffer();
}, this.config.eventBuffer.flushInterval);
}
// Flush event buffer
private async flushEventBuffer(): Promise<void> {
if (this.eventBuffer.length === 0) return;
const events = [...this.eventBuffer];
this.eventBuffer.length = 0;
console.log(`📡 Flushing ${events.length} buffered events`);
// Group events by topic for efficiency
const eventsByTopic = new Map<string, PubSubEvent[]>();
for (const event of events) {
if (!eventsByTopic.has(event.topic)) {
eventsByTopic.set(event.topic, []);
}
eventsByTopic.get(event.topic)!.push(event);
}
// Publish batches
for (const [topic, topicEvents] of eventsByTopic) {
try {
for (const event of topicEvents) {
const data = await this.processEventForPublishing(event, {});
await this.publishDirect(topic, data);
}
} catch (error) {
console.error(`❌ Failed to flush events for ${topic}:`, error);
this.stats.publishErrors += topicEvents.length;
}
}
}
// Update topic statistics
private updateTopicStats(
topic: string,
metric: 'published' | 'received' | 'subscribers',
delta: number,
): void {
if (!this.stats.topicStats.has(topic)) {
this.stats.topicStats.set(topic, {
published: 0,
received: 0,
subscribers: 0,
lastActivity: Date.now(),
});
}
const stats = this.stats.topicStats.get(topic)!;
stats[metric] += delta;
stats.lastActivity = Date.now();
}
// Utility methods
private generateEventId(): string {
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
}
private extractEventType(topic: string): string {
const parts = topic.split('.');
return parts[parts.length - 1];
}
private prefixTopic(topic: string): string {
return `${this.config.topicPrefix}.${topic}`;
}
// Get PubSub statistics
getStats(): PubSubStats {
return { ...this.stats };
}
// Get list of active topics
getActiveTopics(): string[] {
return Array.from(this.subscriptions.keys());
}
// Get subscribers for a topic
getTopicSubscribers(topic: string): number {
const fullTopic = this.prefixTopic(topic);
return this.subscriptions.get(fullTopic)?.length || 0;
}
// Check if topic exists
hasSubscriptions(topic: string): boolean {
const fullTopic = this.prefixTopic(topic);
return this.subscriptions.has(fullTopic) && this.subscriptions.get(fullTopic)!.length > 0;
}
// Clear all subscriptions
async clearAllSubscriptions(): Promise<void> {
const topics = Array.from(this.subscriptions.keys());
for (const topic of topics) {
try {
await this.ipfsService.pubsub.unsubscribe(topic);
} catch (error) {
console.error(`Failed to unsubscribe from ${topic}:`, error);
}
}
this.subscriptions.clear();
this.stats.topicStats.clear();
this.stats.totalSubscriptions = 0;
console.log(`📡 Cleared all ${topics.length} subscriptions`);
}
// Shutdown
async shutdown(): Promise<void> {
console.log('📡 Shutting down PubSubManager...');
// Stop event buffering
if (this.bufferFlushInterval) {
clearInterval(this.bufferFlushInterval as any);
this.bufferFlushInterval = null;
}
// Flush remaining events
await this.flushEventBuffer();
// Clear all subscriptions
await this.clearAllSubscriptions();
// Clear event listeners
this.eventListeners.clear();
this.isInitialized = false;
console.log('✅ PubSubManager shut down successfully');
}
}

View File

@ -0,0 +1,447 @@
import { BaseModel } from '../models/BaseModel';
import { QueryCondition, SortConfig } from '../types/queries';
import { QueryExecutor } from './QueryExecutor';
export class QueryBuilder<T extends BaseModel> {
private model: typeof BaseModel;
private conditions: QueryCondition[] = [];
private relations: string[] = [];
private sorting: SortConfig[] = [];
private limitation?: number;
private offsetValue?: number;
private groupByFields: string[] = [];
private havingConditions: QueryCondition[] = [];
private distinctFields: string[] = [];
constructor(model: typeof BaseModel) {
this.model = model;
}
// Basic filtering
where(field: string, operator: string, value: any): this {
this.conditions.push({ field, operator, value });
return this;
}
whereIn(field: string, values: any[]): this {
return this.where(field, 'in', values);
}
whereNotIn(field: string, values: any[]): this {
return this.where(field, 'not_in', values);
}
whereNull(field: string): this {
return this.where(field, 'is_null', null);
}
whereNotNull(field: string): this {
return this.where(field, 'is_not_null', null);
}
whereBetween(field: string, min: any, max: any): this {
return this.where(field, 'between', [min, max]);
}
whereNot(field: string, operator: string, value: any): this {
return this.where(field, `not_${operator}`, value);
}
whereLike(field: string, pattern: string): this {
return this.where(field, 'like', pattern);
}
whereILike(field: string, pattern: string): this {
return this.where(field, 'ilike', pattern);
}
// Date filtering
whereDate(field: string, operator: string, date: Date | string | number): this {
return this.where(field, `date_${operator}`, date);
}
whereDateBetween(
field: string,
startDate: Date | string | number,
endDate: Date | string | number,
): this {
return this.where(field, 'date_between', [startDate, endDate]);
}
whereYear(field: string, year: number): this {
return this.where(field, 'year', year);
}
whereMonth(field: string, month: number): this {
return this.where(field, 'month', month);
}
whereDay(field: string, day: number): this {
return this.where(field, 'day', day);
}
// User-specific filtering (for user-scoped queries)
whereUser(userId: string): this {
return this.where('userId', '=', userId);
}
whereUserIn(userIds: string[]): this {
this.conditions.push({
field: 'userId',
operator: 'userIn',
value: userIds,
});
return this;
}
// Advanced filtering with OR conditions
orWhere(callback: (query: QueryBuilder<T>) => void): this {
const subQuery = new QueryBuilder<T>(this.model);
callback(subQuery);
this.conditions.push({
field: '__or__',
operator: 'or',
value: subQuery.getConditions(),
});
return this;
}
// Array and object field queries
whereArrayContains(field: string, value: any): this {
return this.where(field, 'array_contains', value);
}
whereArrayLength(field: string, operator: string, length: number): this {
return this.where(field, `array_length_${operator}`, length);
}
whereObjectHasKey(field: string, key: string): this {
return this.where(field, 'object_has_key', key);
}
whereObjectPath(field: string, path: string, operator: string, value: any): this {
return this.where(field, `object_path_${operator}`, { path, value });
}
// Sorting
orderBy(field: string, direction: 'asc' | 'desc' = 'asc'): this {
this.sorting.push({ field, direction });
return this;
}
orderByDesc(field: string): this {
return this.orderBy(field, 'desc');
}
orderByRaw(expression: string): this {
this.sorting.push({ field: expression, direction: 'asc' });
return this;
}
// Multiple field sorting
orderByMultiple(sorts: Array<{ field: string; direction: 'asc' | 'desc' }>): this {
sorts.forEach((sort) => this.orderBy(sort.field, sort.direction));
return this;
}
// Pagination
limit(count: number): this {
this.limitation = count;
return this;
}
offset(count: number): this {
this.offsetValue = count;
return this;
}
skip(count: number): this {
return this.offset(count);
}
take(count: number): this {
return this.limit(count);
}
// Pagination helpers
page(pageNumber: number, pageSize: number): this {
this.limitation = pageSize;
this.offsetValue = (pageNumber - 1) * pageSize;
return this;
}
// Relationship loading
load(relationships: string[]): this {
this.relations = [...this.relations, ...relationships];
return this;
}
with(relationships: string[]): this {
return this.load(relationships);
}
loadNested(relationship: string, _callback: (query: QueryBuilder<any>) => void): this {
// For nested relationship loading with constraints
this.relations.push(relationship);
// Store callback for nested query (implementation in QueryExecutor)
return this;
}
// Aggregation
groupBy(...fields: string[]): this {
this.groupByFields.push(...fields);
return this;
}
having(field: string, operator: string, value: any): this {
this.havingConditions.push({ field, operator, value });
return this;
}
// Distinct
distinct(...fields: string[]): this {
this.distinctFields.push(...fields);
return this;
}
// Execution methods
async exec(): Promise<T[]> {
const executor = new QueryExecutor<T>(this.model, this);
return await executor.execute();
}
async get(): Promise<T[]> {
return await this.exec();
}
async first(): Promise<T | null> {
const results = await this.limit(1).exec();
return results[0] || null;
}
async firstOrFail(): Promise<T> {
const result = await this.first();
if (!result) {
throw new Error(`No ${this.model.name} found matching the query`);
}
return result;
}
async find(id: string): Promise<T | null> {
return await this.where('id', '=', id).first();
}
async findOrFail(id: string): Promise<T> {
const result = await this.find(id);
if (!result) {
throw new Error(`${this.model.name} with id ${id} not found`);
}
return result;
}
async count(): Promise<number> {
const executor = new QueryExecutor<T>(this.model, this);
return await executor.count();
}
async exists(): Promise<boolean> {
const count = await this.count();
return count > 0;
}
async sum(field: string): Promise<number> {
const executor = new QueryExecutor<T>(this.model, this);
return await executor.sum(field);
}
async avg(field: string): Promise<number> {
const executor = new QueryExecutor<T>(this.model, this);
return await executor.avg(field);
}
async min(field: string): Promise<any> {
const executor = new QueryExecutor<T>(this.model, this);
return await executor.min(field);
}
async max(field: string): Promise<any> {
const executor = new QueryExecutor<T>(this.model, this);
return await executor.max(field);
}
// Pagination with metadata
async paginate(
page: number = 1,
perPage: number = 15,
): Promise<{
data: T[];
total: number;
perPage: number;
currentPage: number;
lastPage: number;
hasNextPage: boolean;
hasPrevPage: boolean;
}> {
const total = await this.count();
const lastPage = Math.ceil(total / perPage);
const data = await this.page(page, perPage).exec();
return {
data,
total,
perPage,
currentPage: page,
lastPage,
hasNextPage: page < lastPage,
hasPrevPage: page > 1,
};
}
// Chunked processing
async chunk(
size: number,
callback: (items: T[], page: number) => Promise<void | boolean>,
): Promise<void> {
let page = 1;
let hasMore = true;
while (hasMore) {
const items = await this.page(page, size).exec();
if (items.length === 0) {
break;
}
const result = await callback(items, page);
// If callback returns false, stop processing
if (result === false) {
break;
}
hasMore = items.length === size;
page++;
}
}
// Query optimization hints
useIndex(indexName: string): this {
// Hint for query optimizer (implementation in QueryExecutor)
(this as any)._indexHint = indexName;
return this;
}
preferShard(shardIndex: number): this {
// Force query to specific shard (for global sharded models)
(this as any)._preferredShard = shardIndex;
return this;
}
// Raw queries (for advanced users)
whereRaw(expression: string, bindings: any[] = []): this {
this.conditions.push({
field: '__raw__',
operator: 'raw',
value: { expression, bindings },
});
return this;
}
// Getters for query configuration (used by QueryExecutor)
getConditions(): QueryCondition[] {
return [...this.conditions];
}
getRelations(): string[] {
return [...this.relations];
}
getSorting(): SortConfig[] {
return [...this.sorting];
}
getLimit(): number | undefined {
return this.limitation;
}
getOffset(): number | undefined {
return this.offsetValue;
}
getGroupBy(): string[] {
return [...this.groupByFields];
}
getHaving(): QueryCondition[] {
return [...this.havingConditions];
}
getDistinct(): string[] {
return [...this.distinctFields];
}
getModel(): typeof BaseModel {
return this.model;
}
// Clone query for reuse
clone(): QueryBuilder<T> {
const cloned = new QueryBuilder<T>(this.model);
cloned.conditions = [...this.conditions];
cloned.relations = [...this.relations];
cloned.sorting = [...this.sorting];
cloned.limitation = this.limitation;
cloned.offsetValue = this.offsetValue;
cloned.groupByFields = [...this.groupByFields];
cloned.havingConditions = [...this.havingConditions];
cloned.distinctFields = [...this.distinctFields];
return cloned;
}
// Debug methods
toSQL(): string {
// Generate SQL-like representation for debugging
let sql = `SELECT * FROM ${this.model.name}`;
if (this.conditions.length > 0) {
const whereClause = this.conditions
.map((c) => `${c.field} ${c.operator} ${JSON.stringify(c.value)}`)
.join(' AND ');
sql += ` WHERE ${whereClause}`;
}
if (this.sorting.length > 0) {
const orderClause = this.sorting
.map((s) => `${s.field} ${s.direction.toUpperCase()}`)
.join(', ');
sql += ` ORDER BY ${orderClause}`;
}
if (this.limitation) {
sql += ` LIMIT ${this.limitation}`;
}
if (this.offsetValue) {
sql += ` OFFSET ${this.offsetValue}`;
}
return sql;
}
explain(): any {
return {
model: this.model.name,
scope: this.model.scope,
conditions: this.conditions,
relations: this.relations,
sorting: this.sorting,
limit: this.limitation,
offset: this.offsetValue,
sql: this.toSQL(),
};
}
}

View File

@ -0,0 +1,315 @@
import { QueryBuilder } from './QueryBuilder';
import { BaseModel } from '../models/BaseModel';
export interface CacheEntry<T> {
key: string;
data: T[];
timestamp: number;
ttl: number;
hitCount: number;
}
export interface CacheStats {
totalRequests: number;
cacheHits: number;
cacheMisses: number;
hitRate: number;
size: number;
maxSize: number;
}
export class QueryCache {
private cache: Map<string, CacheEntry<any>> = new Map();
private maxSize: number;
private defaultTTL: number;
private stats: CacheStats;
constructor(maxSize: number = 1000, defaultTTL: number = 300000) {
// 5 minutes default
this.maxSize = maxSize;
this.defaultTTL = defaultTTL;
this.stats = {
totalRequests: 0,
cacheHits: 0,
cacheMisses: 0,
hitRate: 0,
size: 0,
maxSize,
};
}
generateKey<T extends BaseModel>(query: QueryBuilder<T>): string {
const model = query.getModel();
const conditions = query.getConditions();
const relations = query.getRelations();
const sorting = query.getSorting();
const limit = query.getLimit();
const offset = query.getOffset();
// Create a deterministic cache key
const keyParts = [
model.name,
model.scope,
JSON.stringify(conditions.sort((a, b) => a.field.localeCompare(b.field))),
JSON.stringify(relations.sort()),
JSON.stringify(sorting),
limit?.toString() || 'no-limit',
offset?.toString() || 'no-offset',
];
// Create hash of the key parts
return this.hashString(keyParts.join('|'));
}
async get<T extends BaseModel>(query: QueryBuilder<T>): Promise<T[] | null> {
this.stats.totalRequests++;
const key = this.generateKey(query);
const entry = this.cache.get(key);
if (!entry) {
this.stats.cacheMisses++;
this.updateHitRate();
return null;
}
// Check if entry has expired
if (Date.now() - entry.timestamp > entry.ttl) {
this.cache.delete(key);
this.stats.cacheMisses++;
this.updateHitRate();
return null;
}
// Update hit count and stats
entry.hitCount++;
this.stats.cacheHits++;
this.updateHitRate();
// Convert cached data back to model instances
const modelClass = query.getModel() as any; // Type assertion for abstract class
return entry.data.map((item) => new modelClass(item));
}
set<T extends BaseModel>(query: QueryBuilder<T>, data: T[], customTTL?: number): void {
const key = this.generateKey(query);
const ttl = customTTL || this.defaultTTL;
// Serialize model instances to plain objects for caching
const serializedData = data.map((item) => item.toJSON());
const entry: CacheEntry<any> = {
key,
data: serializedData,
timestamp: Date.now(),
ttl,
hitCount: 0,
};
// Check if we need to evict entries
if (this.cache.size >= this.maxSize) {
this.evictLeastUsed();
}
this.cache.set(key, entry);
this.stats.size = this.cache.size;
}
invalidate<T extends BaseModel>(query: QueryBuilder<T>): boolean {
const key = this.generateKey(query);
const deleted = this.cache.delete(key);
this.stats.size = this.cache.size;
return deleted;
}
invalidateByModel(modelName: string): number {
let deletedCount = 0;
for (const [key, _entry] of this.cache.entries()) {
if (key.startsWith(this.hashString(modelName))) {
this.cache.delete(key);
deletedCount++;
}
}
this.stats.size = this.cache.size;
return deletedCount;
}
invalidateByUser(userId: string): number {
let deletedCount = 0;
for (const [key, entry] of this.cache.entries()) {
// Check if the cached entry contains user-specific data
if (this.entryContainsUser(entry, userId)) {
this.cache.delete(key);
deletedCount++;
}
}
this.stats.size = this.cache.size;
return deletedCount;
}
clear(): void {
this.cache.clear();
this.stats.size = 0;
this.stats.totalRequests = 0;
this.stats.cacheHits = 0;
this.stats.cacheMisses = 0;
this.stats.hitRate = 0;
}
getStats(): CacheStats {
return { ...this.stats };
}
// Cache warming - preload frequently used queries
async warmup<T extends BaseModel>(queries: QueryBuilder<T>[]): Promise<void> {
console.log(`🔥 Warming up cache with ${queries.length} queries...`);
const promises = queries.map(async (query) => {
try {
const results = await query.exec();
this.set(query, results);
console.log(`✓ Cached query for ${query.getModel().name}`);
} catch (error) {
console.warn(`Failed to warm cache for ${query.getModel().name}:`, error);
}
});
await Promise.all(promises);
console.log(`✅ Cache warmup completed`);
}
// Get cache entries sorted by various criteria
getPopularEntries(limit: number = 10): Array<{ key: string; hitCount: number; age: number }> {
return Array.from(this.cache.entries())
.map(([key, entry]) => ({
key,
hitCount: entry.hitCount,
age: Date.now() - entry.timestamp,
}))
.sort((a, b) => b.hitCount - a.hitCount)
.slice(0, limit);
}
getExpiredEntries(): string[] {
const now = Date.now();
const expired: string[] = [];
for (const [key, entry] of this.cache.entries()) {
if (now - entry.timestamp > entry.ttl) {
expired.push(key);
}
}
return expired;
}
// Cleanup expired entries
cleanup(): number {
const expired = this.getExpiredEntries();
for (const key of expired) {
this.cache.delete(key);
}
this.stats.size = this.cache.size;
return expired.length;
}
// Configure cache behavior
setMaxSize(size: number): void {
this.maxSize = size;
this.stats.maxSize = size;
// Evict entries if current size exceeds new max
while (this.cache.size > size) {
this.evictLeastUsed();
}
}
setDefaultTTL(ttl: number): void {
this.defaultTTL = ttl;
}
// Cache analysis
analyzeUsage(): {
totalEntries: number;
averageHitCount: number;
averageAge: number;
memoryUsage: number;
} {
const entries = Array.from(this.cache.values());
const now = Date.now();
const totalHits = entries.reduce((sum, entry) => sum + entry.hitCount, 0);
const totalAge = entries.reduce((sum, entry) => sum + (now - entry.timestamp), 0);
// Rough memory usage estimation
const memoryUsage = entries.reduce((sum, entry) => {
return sum + JSON.stringify(entry.data).length;
}, 0);
return {
totalEntries: entries.length,
averageHitCount: entries.length > 0 ? totalHits / entries.length : 0,
averageAge: entries.length > 0 ? totalAge / entries.length : 0,
memoryUsage,
};
}
private evictLeastUsed(): void {
if (this.cache.size === 0) return;
// Find entry with lowest hit count and oldest timestamp
let leastUsedKey: string | null = null;
let leastUsedScore = Infinity;
for (const [key, entry] of this.cache.entries()) {
// Score based on hit count and age (lower is worse)
const age = Date.now() - entry.timestamp;
const score = entry.hitCount - age / 1000000; // Age penalty
if (score < leastUsedScore) {
leastUsedScore = score;
leastUsedKey = key;
}
}
if (leastUsedKey) {
this.cache.delete(leastUsedKey);
this.stats.size = this.cache.size;
}
}
private entryContainsUser(entry: CacheEntry<any>, userId: string): boolean {
// Check if the cached data contains user-specific information
try {
const dataStr = JSON.stringify(entry.data);
return dataStr.includes(userId);
} catch {
return false;
}
}
private updateHitRate(): void {
if (this.stats.totalRequests > 0) {
this.stats.hitRate = this.stats.cacheHits / this.stats.totalRequests;
}
}
private hashString(str: string): string {
let hash = 0;
if (str.length === 0) return hash.toString();
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}

View File

@ -0,0 +1,619 @@
import { BaseModel } from '../models/BaseModel';
import { QueryBuilder } from './QueryBuilder';
import { QueryCondition } from '../types/queries';
import { StoreType } from '../types/framework';
import { QueryOptimizer, QueryPlan } from './QueryOptimizer';
export class QueryExecutor<T extends BaseModel> {
private model: typeof BaseModel;
private query: QueryBuilder<T>;
private framework: any; // Will be properly typed later
private queryPlan?: QueryPlan;
private useCache: boolean = true;
constructor(model: typeof BaseModel, query: QueryBuilder<T>) {
this.model = model;
this.query = query;
this.framework = this.getFrameworkInstance();
}
async execute(): Promise<T[]> {
const startTime = Date.now();
console.log(`🔍 Executing query for ${this.model.name} (${this.model.scope})`);
// Generate query plan for optimization
this.queryPlan = QueryOptimizer.analyzeQuery(this.query);
console.log(
`📊 Query plan: ${this.queryPlan.strategy} (cost: ${this.queryPlan.estimatedCost})`,
);
// Check cache first if enabled
if (this.useCache && this.framework.queryCache) {
const cached = await this.framework.queryCache.get(this.query);
if (cached) {
console.log(`⚡ Cache hit for ${this.model.name} query`);
return cached;
}
}
// Execute query based on scope
let results: T[];
if (this.model.scope === 'user') {
results = await this.executeUserScopedQuery();
} else {
results = await this.executeGlobalQuery();
}
// Cache results if enabled
if (this.useCache && this.framework.queryCache && results.length > 0) {
this.framework.queryCache.set(this.query, results);
}
const duration = Date.now() - startTime;
console.log(`✅ Query completed in ${duration}ms, returned ${results.length} results`);
return results;
}
async count(): Promise<number> {
const results = await this.execute();
return results.length;
}
async sum(field: string): Promise<number> {
const results = await this.execute();
return results.reduce((sum, item) => {
const value = this.getNestedValue(item, field);
return sum + (typeof value === 'number' ? value : 0);
}, 0);
}
async avg(field: string): Promise<number> {
const results = await this.execute();
if (results.length === 0) return 0;
const sum = await this.sum(field);
return sum / results.length;
}
async min(field: string): Promise<any> {
const results = await this.execute();
if (results.length === 0) return null;
return results.reduce((min, item) => {
const value = this.getNestedValue(item, field);
return min === null || value < min ? value : min;
}, null);
}
async max(field: string): Promise<any> {
const results = await this.execute();
if (results.length === 0) return null;
return results.reduce((max, item) => {
const value = this.getNestedValue(item, field);
return max === null || value > max ? value : max;
}, null);
}
private async executeUserScopedQuery(): Promise<T[]> {
const conditions = this.query.getConditions();
// Check if we have user-specific filters
const userFilter = conditions.find((c) => c.field === 'userId' || c.operator === 'userIn');
if (userFilter) {
return await this.executeUserSpecificQuery(userFilter);
} else {
// Global query on user-scoped data - use global index
return await this.executeGlobalIndexQuery();
}
}
private async executeUserSpecificQuery(userFilter: QueryCondition): Promise<T[]> {
const userIds = userFilter.operator === 'userIn' ? userFilter.value : [userFilter.value];
console.log(`👤 Querying user databases for ${userIds.length} users`);
const results: T[] = [];
// Query each user's database in parallel
const promises = userIds.map(async (userId: string) => {
try {
const userDB = await this.framework.databaseManager.getUserDatabase(
userId,
this.model.modelName,
);
return await this.queryDatabase(userDB, this.model.dbType);
} catch (error) {
console.warn(`Failed to query user ${userId} database:`, error);
return [];
}
});
const userResults = await Promise.all(promises);
// Flatten and combine results
for (const userResult of userResults) {
results.push(...userResult);
}
return this.postProcessResults(results);
}
private async executeGlobalIndexQuery(): Promise<T[]> {
console.log(`📇 Querying global index for ${this.model.name}`);
// Query global index for user-scoped models
const globalIndexName = `${this.model.modelName}GlobalIndex`;
const indexShards = this.framework.shardManager.getAllShards(globalIndexName);
if (!indexShards || indexShards.length === 0) {
console.warn(`No global index found for ${this.model.name}, falling back to all users query`);
return await this.executeAllUsersQuery();
}
const indexResults: any[] = [];
// Query all index shards in parallel
const promises = indexShards.map((shard: any) =>
this.queryDatabase(shard.database, 'keyvalue'),
);
const shardResults = await Promise.all(promises);
for (const shardResult of shardResults) {
indexResults.push(...shardResult);
}
// Now fetch actual documents from user databases
return await this.fetchActualDocuments(indexResults);
}
private async executeAllUsersQuery(): Promise<T[]> {
// This is a fallback for when global index is not available
// It's expensive but ensures completeness
console.warn(`⚠️ Executing expensive all-users query for ${this.model.name}`);
// This would require getting all user IDs from the directory
// For now, return empty array and log warning
console.warn('All-users query not implemented - please ensure global indexes are set up');
return [];
}
private async executeGlobalQuery(): Promise<T[]> {
// For globally scoped models
if (this.model.sharding) {
return await this.executeShardedQuery();
} else {
const db = await this.framework.databaseManager.getGlobalDatabase(this.model.modelName);
return await this.queryDatabase(db, this.model.dbType);
}
}
private async executeShardedQuery(): Promise<T[]> {
console.log(`🔀 Executing sharded query for ${this.model.name}`);
const conditions = this.query.getConditions();
const shardingConfig = this.model.sharding!;
// Check if we can route to specific shard(s)
const shardKeyCondition = conditions.find((c) => c.field === shardingConfig.key);
if (shardKeyCondition && shardKeyCondition.operator === '=') {
// Single shard query
const shard = this.framework.shardManager.getShardForKey(
this.model.modelName,
shardKeyCondition.value,
);
return await this.queryDatabase(shard.database, this.model.dbType);
} else if (shardKeyCondition && shardKeyCondition.operator === 'in') {
// Multiple specific shards
const results: T[] = [];
const shardKeys = shardKeyCondition.value;
const shardQueries = shardKeys.map(async (key: string) => {
const shard = this.framework.shardManager.getShardForKey(this.model.modelName, key);
return await this.queryDatabase(shard.database, this.model.dbType);
});
const shardResults = await Promise.all(shardQueries);
for (const shardResult of shardResults) {
results.push(...shardResult);
}
return this.postProcessResults(results);
} else {
// Query all shards
const results: T[] = [];
const allShards = this.framework.shardManager.getAllShards(this.model.modelName);
const promises = allShards.map((shard: any) =>
this.queryDatabase(shard.database, this.model.dbType),
);
const shardResults = await Promise.all(promises);
for (const shardResult of shardResults) {
results.push(...shardResult);
}
return this.postProcessResults(results);
}
}
private async queryDatabase(database: any, dbType: StoreType): Promise<T[]> {
// Get all documents from OrbitDB based on database type
let documents: any[];
try {
documents = await this.framework.databaseManager.getAllDocuments(database, dbType);
} catch (error) {
console.error(`Error querying ${dbType} database:`, error);
return [];
}
// Apply filters in memory
documents = this.applyFilters(documents);
// Apply sorting
documents = this.applySorting(documents);
// Apply limit/offset
documents = this.applyLimitOffset(documents);
// Convert to model instances
const ModelClass = this.model as any; // Type assertion for abstract class
return documents.map((doc) => new ModelClass(doc) as T);
}
private async fetchActualDocuments(indexResults: any[]): Promise<T[]> {
console.log(`📄 Fetching ${indexResults.length} documents from user databases`);
const results: T[] = [];
// Group by userId for efficient database access
const userGroups = new Map<string, any[]>();
for (const indexEntry of indexResults) {
const userId = indexEntry.userId;
if (!userGroups.has(userId)) {
userGroups.set(userId, []);
}
userGroups.get(userId)!.push(indexEntry);
}
// Fetch documents from each user's database
const promises = Array.from(userGroups.entries()).map(async ([userId, entries]) => {
try {
const userDB = await this.framework.databaseManager.getUserDatabase(
userId,
this.model.modelName,
);
const userResults: T[] = [];
// Fetch specific documents by ID
for (const entry of entries) {
try {
const doc = await this.getDocumentById(userDB, this.model.dbType, entry.id);
if (doc) {
const ModelClass = this.model as any; // Type assertion for abstract class
userResults.push(new ModelClass(doc) as T);
}
} catch (error) {
console.warn(`Failed to fetch document ${entry.id} from user ${userId}:`, error);
}
}
return userResults;
} catch (error) {
console.warn(`Failed to access user ${userId} database:`, error);
return [];
}
});
const userResults = await Promise.all(promises);
// Flatten results
for (const userResult of userResults) {
results.push(...userResult);
}
return this.postProcessResults(results);
}
private async getDocumentById(database: any, dbType: StoreType, id: string): Promise<any | null> {
try {
switch (dbType) {
case 'keyvalue':
return await database.get(id);
case 'docstore':
return await database.get(id);
case 'eventlog':
case 'feed':
// For append-only stores, we need to search through entries
const iterator = database.iterator();
const entries = iterator.collect();
return (
entries.find((entry: any) => entry.payload?.value?.id === id)?.payload?.value || null
);
default:
return null;
}
} catch (error) {
console.warn(`Error fetching document ${id} from ${dbType}:`, error);
return null;
}
}
private applyFilters(documents: any[]): any[] {
const conditions = this.query.getConditions();
return documents.filter((doc) => {
return conditions.every((condition) => {
return this.evaluateCondition(doc, condition);
});
});
}
private evaluateCondition(doc: any, condition: QueryCondition): boolean {
const { field, operator, value } = condition;
// Handle special operators
if (operator === 'or') {
return value.some((subCondition: QueryCondition) =>
this.evaluateCondition(doc, subCondition),
);
}
if (field === '__raw__') {
// Raw conditions would need custom evaluation
console.warn('Raw conditions not fully implemented');
return true;
}
const docValue = this.getNestedValue(doc, field);
switch (operator) {
case '=':
case '==':
return docValue === value;
case '!=':
case '<>':
return docValue !== value;
case '>':
return docValue > value;
case '>=':
case 'gte':
return docValue >= value;
case '<':
return docValue < value;
case '<=':
case 'lte':
return docValue <= value;
case 'in':
return Array.isArray(value) && value.includes(docValue);
case 'not_in':
return Array.isArray(value) && !value.includes(docValue);
case 'contains':
return Array.isArray(docValue) && docValue.includes(value);
case 'like':
return String(docValue).toLowerCase().includes(String(value).toLowerCase());
case 'ilike':
return String(docValue).toLowerCase().includes(String(value).toLowerCase());
case 'is_null':
return docValue === null || docValue === undefined;
case 'is_not_null':
return docValue !== null && docValue !== undefined;
case 'between':
return Array.isArray(value) && docValue >= value[0] && docValue <= value[1];
case 'array_contains':
return Array.isArray(docValue) && docValue.includes(value);
case 'array_length_=':
return Array.isArray(docValue) && docValue.length === value;
case 'array_length_>':
return Array.isArray(docValue) && docValue.length > value;
case 'array_length_<':
return Array.isArray(docValue) && docValue.length < value;
case 'object_has_key':
return typeof docValue === 'object' && docValue !== null && value in docValue;
case 'date_=':
return this.compareDates(docValue, '=', value);
case 'date_>':
return this.compareDates(docValue, '>', value);
case 'date_<':
return this.compareDates(docValue, '<', value);
case 'date_between':
return (
this.compareDates(docValue, '>=', value[0]) && this.compareDates(docValue, '<=', value[1])
);
case 'year':
return this.getDatePart(docValue, 'year') === value;
case 'month':
return this.getDatePart(docValue, 'month') === value;
case 'day':
return this.getDatePart(docValue, 'day') === value;
default:
console.warn(`Unsupported operator: ${operator}`);
return true;
}
}
private compareDates(docValue: any, operator: string, compareValue: any): boolean {
const docDate = this.normalizeDate(docValue);
const compDate = this.normalizeDate(compareValue);
if (!docDate || !compDate) return false;
switch (operator) {
case '=':
return docDate.getTime() === compDate.getTime();
case '>':
return docDate.getTime() > compDate.getTime();
case '<':
return docDate.getTime() < compDate.getTime();
case '>=':
return docDate.getTime() >= compDate.getTime();
case '<=':
return docDate.getTime() <= compDate.getTime();
default:
return false;
}
}
private normalizeDate(value: any): Date | null {
if (value instanceof Date) return value;
if (typeof value === 'number') return new Date(value);
if (typeof value === 'string') return new Date(value);
return null;
}
private getDatePart(value: any, part: 'year' | 'month' | 'day'): number | null {
const date = this.normalizeDate(value);
if (!date) return null;
switch (part) {
case 'year':
return date.getFullYear();
case 'month':
return date.getMonth() + 1; // 1-based month
case 'day':
return date.getDate();
default:
return null;
}
}
private applySorting(documents: any[]): any[] {
const sorting = this.query.getSorting();
if (sorting.length === 0) {
return documents;
}
return documents.sort((a, b) => {
for (const sort of sorting) {
const aValue = this.getNestedValue(a, sort.field);
const bValue = this.getNestedValue(b, sort.field);
let comparison = 0;
if (aValue < bValue) comparison = -1;
else if (aValue > bValue) comparison = 1;
if (comparison !== 0) {
return sort.direction === 'desc' ? -comparison : comparison;
}
}
return 0;
});
}
private applyLimitOffset(documents: any[]): any[] {
const limit = this.query.getLimit();
const offset = this.query.getOffset();
let result = documents;
if (offset && offset > 0) {
result = result.slice(offset);
}
if (limit && limit > 0) {
result = result.slice(0, limit);
}
return result;
}
private postProcessResults(results: T[]): T[] {
// Apply global sorting across all results
results = this.applySorting(results);
// Apply global limit/offset
results = this.applyLimitOffset(results);
return results;
}
private getNestedValue(obj: any, path: string): any {
if (!path) return obj;
const keys = path.split('.');
let current = obj;
for (const key of keys) {
if (current === null || current === undefined) {
return undefined;
}
current = current[key];
}
return current;
}
// Public methods for query control
disableCache(): this {
this.useCache = false;
return this;
}
enableCache(): this {
this.useCache = true;
return this;
}
getQueryPlan(): QueryPlan | undefined {
return this.queryPlan;
}
explain(): any {
const plan = this.queryPlan || QueryOptimizer.analyzeQuery(this.query);
const suggestions = QueryOptimizer.suggestOptimizations(this.query);
return {
query: this.query.explain(),
plan,
suggestions,
estimatedResultSize: QueryOptimizer.estimateResultSize(this.query),
};
}
private getFrameworkInstance(): any {
const framework = (globalThis as any).__debrosFramework;
if (!framework) {
throw new Error('Framework not initialized. Call framework.initialize() first.');
}
return framework;
}
}

View File

@ -0,0 +1,254 @@
import { QueryBuilder } from './QueryBuilder';
import { QueryCondition } from '../types/queries';
import { BaseModel } from '../models/BaseModel';
export interface QueryPlan {
strategy: 'single_user' | 'multi_user' | 'global_index' | 'all_shards' | 'specific_shards';
targetDatabases: string[];
estimatedCost: number;
indexHints: string[];
optimizations: string[];
}
export class QueryOptimizer {
static analyzeQuery<T extends BaseModel>(query: QueryBuilder<T>): QueryPlan {
const model = query.getModel();
const conditions = query.getConditions();
const relations = query.getRelations();
const limit = query.getLimit();
let strategy: QueryPlan['strategy'] = 'all_shards';
let targetDatabases: string[] = [];
let estimatedCost = 100; // Base cost
let indexHints: string[] = [];
let optimizations: string[] = [];
// Analyze based on model scope
if (model.scope === 'user') {
const userConditions = conditions.filter(
(c) => c.field === 'userId' || c.operator === 'userIn',
);
if (userConditions.length > 0) {
const userCondition = userConditions[0];
if (userCondition.operator === 'userIn') {
strategy = 'multi_user';
targetDatabases = userCondition.value.map(
(userId: string) => `${userId}-${model.modelName.toLowerCase()}`,
);
estimatedCost = 20 * userCondition.value.length;
optimizations.push('Direct user database access');
} else {
strategy = 'single_user';
targetDatabases = [`${userCondition.value}-${model.modelName.toLowerCase()}`];
estimatedCost = 10;
optimizations.push('Single user database access');
}
} else {
strategy = 'global_index';
targetDatabases = [`${model.modelName}GlobalIndex`];
estimatedCost = 50;
indexHints.push(`${model.modelName}GlobalIndex`);
optimizations.push('Global index lookup');
}
} else {
// Global model
if (model.sharding) {
const shardKeyCondition = conditions.find((c) => c.field === model.sharding!.key);
if (shardKeyCondition) {
if (shardKeyCondition.operator === '=') {
strategy = 'specific_shards';
targetDatabases = [`${model.modelName}-shard-specific`];
estimatedCost = 15;
optimizations.push('Single shard access');
} else if (shardKeyCondition.operator === 'in') {
strategy = 'specific_shards';
targetDatabases = shardKeyCondition.value.map(
(_: any, i: number) => `${model.modelName}-shard-${i}`,
);
estimatedCost = 15 * shardKeyCondition.value.length;
optimizations.push('Multiple specific shards');
}
} else {
strategy = 'all_shards';
estimatedCost = 30 * (model.sharding.count || 4);
optimizations.push('All shards scan');
}
} else {
strategy = 'single_user'; // Actually single global database
targetDatabases = [`global-${model.modelName.toLowerCase()}`];
estimatedCost = 25;
optimizations.push('Single global database');
}
}
// Adjust cost based on other factors
if (limit && limit < 100) {
estimatedCost *= 0.8;
optimizations.push(`Limit optimization (${limit})`);
}
if (relations.length > 0) {
estimatedCost *= 1 + relations.length * 0.3;
optimizations.push(`Relationship loading (${relations.length})`);
}
// Suggest indexes based on conditions
const indexedFields = conditions
.filter((c) => c.field !== 'userId' && c.field !== '__or__' && c.field !== '__raw__')
.map((c) => c.field);
if (indexedFields.length > 0) {
indexHints.push(...indexedFields.map((field) => `${model.modelName}_${field}_idx`));
}
return {
strategy,
targetDatabases,
estimatedCost,
indexHints,
optimizations,
};
}
static optimizeConditions(conditions: QueryCondition[]): QueryCondition[] {
const optimized = [...conditions];
// Remove redundant conditions
const seen = new Set();
const filtered = optimized.filter((condition) => {
const key = `${condition.field}_${condition.operator}_${JSON.stringify(condition.value)}`;
if (seen.has(key)) {
return false;
}
seen.add(key);
return true;
});
// Sort conditions by selectivity (most selective first)
return filtered.sort((a, b) => {
const selectivityA = this.getConditionSelectivity(a);
const selectivityB = this.getConditionSelectivity(b);
return selectivityA - selectivityB;
});
}
private static getConditionSelectivity(condition: QueryCondition): number {
// Lower numbers = more selective (better to evaluate first)
switch (condition.operator) {
case '=':
return 1;
case 'in':
return Array.isArray(condition.value) ? condition.value.length : 10;
case '>':
case '<':
case '>=':
case '<=':
return 50;
case 'like':
case 'ilike':
return 75;
case 'is_not_null':
return 90;
default:
return 100;
}
}
static shouldUseIndex(field: string, operator: string, model: typeof BaseModel): boolean {
// Check if field has index configuration
const fieldConfig = model.fields?.get(field);
if (fieldConfig?.index) {
return true;
}
// Certain operators benefit from indexes
const indexBeneficialOps = ['=', 'in', '>', '<', '>=', '<=', 'between'];
return indexBeneficialOps.includes(operator);
}
static estimateResultSize(query: QueryBuilder<any>): number {
const conditions = query.getConditions();
const limit = query.getLimit();
// If there's a limit, that's our upper bound
if (limit) {
return limit;
}
// Estimate based on conditions
let estimate = 1000; // Base estimate
for (const condition of conditions) {
switch (condition.operator) {
case '=':
estimate *= 0.1; // Very selective
break;
case 'in':
estimate *= Array.isArray(condition.value) ? condition.value.length * 0.1 : 0.1;
break;
case '>':
case '<':
case '>=':
case '<=':
estimate *= 0.5; // Moderately selective
break;
case 'like':
estimate *= 0.3; // Somewhat selective
break;
default:
estimate *= 0.8;
}
}
return Math.max(1, Math.round(estimate));
}
static suggestOptimizations<T extends BaseModel>(query: QueryBuilder<T>): string[] {
const suggestions: string[] = [];
const conditions = query.getConditions();
const model = query.getModel();
const limit = query.getLimit();
// Check for missing userId in user-scoped queries
if (model.scope === 'user') {
const hasUserFilter = conditions.some((c) => c.field === 'userId' || c.operator === 'userIn');
if (!hasUserFilter) {
suggestions.push('Add userId filter to avoid expensive global index query');
}
}
// Check for missing limit on potentially large result sets
if (!limit) {
const estimatedSize = this.estimateResultSize(query);
if (estimatedSize > 100) {
suggestions.push('Add limit() to prevent large result sets');
}
}
// Check for unindexed field queries
for (const condition of conditions) {
if (!this.shouldUseIndex(condition.field, condition.operator, model)) {
suggestions.push(`Consider adding index for field: ${condition.field}`);
}
}
// Check for expensive operations
const expensiveOps = conditions.filter((c) =>
['like', 'ilike', 'array_contains'].includes(c.operator),
);
if (expensiveOps.length > 0) {
suggestions.push('Consider using more selective filters before expensive operations');
}
// Check for OR conditions
const orConditions = conditions.filter((c) => c.operator === 'or');
if (orConditions.length > 0) {
suggestions.push('OR conditions can be expensive, consider restructuring query');
}
return suggestions;
}
}

View File

@ -0,0 +1,441 @@
import { BaseModel } from '../models/BaseModel';
import { RelationshipConfig } from '../types/models';
import { RelationshipManager, RelationshipLoadOptions } from './RelationshipManager';
export interface LazyLoadPromise<T> extends Promise<T> {
isLoaded(): boolean;
getLoadedValue(): T | undefined;
reload(options?: RelationshipLoadOptions): Promise<T>;
}
export class LazyLoader {
private relationshipManager: RelationshipManager;
constructor(relationshipManager: RelationshipManager) {
this.relationshipManager = relationshipManager;
}
createLazyProperty<T>(
instance: BaseModel,
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions = {},
): LazyLoadPromise<T> {
let loadPromise: Promise<T> | null = null;
let loadedValue: T | undefined = undefined;
let isLoaded = false;
const loadRelationship = async (): Promise<T> => {
if (loadPromise) {
return loadPromise;
}
loadPromise = this.relationshipManager
.loadRelationship(instance, relationshipName, options)
.then((result: T) => {
loadedValue = result;
isLoaded = true;
return result;
})
.catch((error) => {
loadPromise = null; // Reset so it can be retried
throw error;
});
return loadPromise;
};
const reload = async (newOptions?: RelationshipLoadOptions): Promise<T> => {
// Clear cache for this relationship
this.relationshipManager.invalidateRelationshipCache(instance, relationshipName);
// Reset state
loadPromise = null;
loadedValue = undefined;
isLoaded = false;
// Load with new options
const finalOptions = newOptions ? { ...options, ...newOptions } : options;
return this.relationshipManager.loadRelationship(instance, relationshipName, finalOptions);
};
// Create the main promise
const promise = loadRelationship() as LazyLoadPromise<T>;
// Add custom methods
promise.isLoaded = () => isLoaded;
promise.getLoadedValue = () => loadedValue;
promise.reload = reload;
return promise;
}
createLazyPropertyWithProxy<T>(
instance: BaseModel,
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions = {},
): T {
const lazyPromise = this.createLazyProperty<T>(instance, relationshipName, config, options);
// For single relationships, return a proxy that loads on property access
if (config.type === 'belongsTo' || config.type === 'hasOne') {
return new Proxy({} as any, {
get(target: any, prop: string | symbol) {
// Special methods
if (prop === 'then') {
return lazyPromise.then.bind(lazyPromise);
}
if (prop === 'catch') {
return lazyPromise.catch.bind(lazyPromise);
}
if (prop === 'finally') {
return lazyPromise.finally.bind(lazyPromise);
}
if (prop === 'isLoaded') {
return lazyPromise.isLoaded;
}
if (prop === 'reload') {
return lazyPromise.reload;
}
// If already loaded, return the property from loaded value
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue();
return loadedValue ? (loadedValue as any)[prop] : undefined;
}
// Trigger loading and return undefined for now
lazyPromise.catch(() => {}); // Prevent unhandled promise rejection
return undefined;
},
has(target: any, prop: string | symbol) {
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue();
return loadedValue ? prop in (loadedValue as any) : false;
}
return false;
},
ownKeys(_target: any) {
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue();
return loadedValue ? Object.keys(loadedValue as any) : [];
}
return [];
},
});
}
// For collection relationships, return a proxy array
if (config.type === 'hasMany' || config.type === 'manyToMany') {
return new Proxy([] as any, {
get(target: any[], prop: string | symbol) {
// Array methods and properties
if (prop === 'length') {
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue() as any[];
return loadedValue ? loadedValue.length : 0;
}
return 0;
}
// Promise methods
if (prop === 'then') {
return lazyPromise.then.bind(lazyPromise);
}
if (prop === 'catch') {
return lazyPromise.catch.bind(lazyPromise);
}
if (prop === 'finally') {
return lazyPromise.finally.bind(lazyPromise);
}
if (prop === 'isLoaded') {
return lazyPromise.isLoaded;
}
if (prop === 'reload') {
return lazyPromise.reload;
}
// Array methods that should trigger loading
if (
typeof prop === 'string' &&
[
'forEach',
'map',
'filter',
'find',
'some',
'every',
'reduce',
'slice',
'indexOf',
'includes',
].includes(prop)
) {
return async (...args: any[]) => {
const loadedValue = await lazyPromise;
return (loadedValue as any)[prop](...args);
};
}
// Numeric index access
if (typeof prop === 'string' && /^\d+$/.test(prop)) {
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue() as any[];
return loadedValue ? loadedValue[parseInt(prop, 10)] : undefined;
}
// Trigger loading
lazyPromise.catch(() => {});
return undefined;
}
// If already loaded, delegate to the actual array
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue() as any[];
return loadedValue ? (loadedValue as any)[prop] : undefined;
}
return undefined;
},
has(target: any[], prop: string | symbol) {
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue() as any[];
return loadedValue ? prop in loadedValue : false;
}
return false;
},
ownKeys(_target: any[]) {
if (lazyPromise.isLoaded()) {
const loadedValue = lazyPromise.getLoadedValue() as any[];
return loadedValue ? Object.keys(loadedValue) : [];
}
return [];
},
}) as T;
}
// Fallback to promise for other types
return lazyPromise as any;
}
// Helper method to check if a value is a lazy-loaded relationship
static isLazyLoaded(value: any): value is LazyLoadPromise<any> {
return (
value &&
typeof value === 'object' &&
typeof value.then === 'function' &&
typeof value.isLoaded === 'function' &&
typeof value.reload === 'function'
);
}
// Helper method to await all lazy relationships in an object
static async resolveAllLazy(obj: any): Promise<any> {
if (!obj || typeof obj !== 'object') {
return obj;
}
if (Array.isArray(obj)) {
return Promise.all(obj.map((item) => this.resolveAllLazy(item)));
}
const resolved: any = {};
const promises: Array<Promise<void>> = [];
for (const [key, value] of Object.entries(obj)) {
if (this.isLazyLoaded(value)) {
promises.push(
value.then((resolvedValue) => {
resolved[key] = resolvedValue;
}),
);
} else {
resolved[key] = value;
}
}
await Promise.all(promises);
return resolved;
}
// Helper method to get loaded relationships without triggering loading
static getLoadedRelationships(instance: BaseModel): Record<string, any> {
const loaded: Record<string, any> = {};
const loadedRelations = instance.getLoadedRelations();
for (const relationName of loadedRelations) {
const value = instance.getRelation(relationName);
if (this.isLazyLoaded(value)) {
if (value.isLoaded()) {
loaded[relationName] = value.getLoadedValue();
}
} else {
loaded[relationName] = value;
}
}
return loaded;
}
// Helper method to preload specific relationships
static async preloadRelationships(
instances: BaseModel[],
relationships: string[],
relationshipManager: RelationshipManager,
): Promise<void> {
await relationshipManager.eagerLoadRelationships(instances, relationships);
}
// Helper method to create lazy collection with advanced features
createLazyCollection<T extends BaseModel>(
instance: BaseModel,
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions = {},
): LazyCollection<T> {
return new LazyCollection<T>(
instance,
relationshipName,
config,
options,
this.relationshipManager,
);
}
}
// Advanced lazy collection with pagination and filtering
export class LazyCollection<T extends BaseModel> {
private instance: BaseModel;
private relationshipName: string;
private config: RelationshipConfig;
private options: RelationshipLoadOptions;
private relationshipManager: RelationshipManager;
private loadedItems: T[] = [];
private isFullyLoaded = false;
private currentPage = 1;
private pageSize = 20;
constructor(
instance: BaseModel,
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions,
relationshipManager: RelationshipManager,
) {
this.instance = instance;
this.relationshipName = relationshipName;
this.config = config;
this.options = options;
this.relationshipManager = relationshipManager;
}
async loadPage(page: number = 1, pageSize: number = this.pageSize): Promise<T[]> {
const offset = (page - 1) * pageSize;
const pageOptions: RelationshipLoadOptions = {
...this.options,
constraints: (query) => {
let q = query.offset(offset).limit(pageSize);
if (this.options.constraints) {
q = this.options.constraints(q);
}
return q;
},
};
const pageItems = (await this.relationshipManager.loadRelationship(
this.instance,
this.relationshipName,
pageOptions,
)) as T[];
// Update loaded items if this is sequential loading
if (page === this.currentPage) {
this.loadedItems.push(...pageItems);
this.currentPage++;
if (pageItems.length < pageSize) {
this.isFullyLoaded = true;
}
}
return pageItems;
}
async loadMore(count: number = this.pageSize): Promise<T[]> {
return this.loadPage(this.currentPage, count);
}
async loadAll(): Promise<T[]> {
if (this.isFullyLoaded) {
return this.loadedItems;
}
const allItems = (await this.relationshipManager.loadRelationship(
this.instance,
this.relationshipName,
this.options,
)) as T[];
this.loadedItems = allItems;
this.isFullyLoaded = true;
return allItems;
}
getLoadedItems(): T[] {
return [...this.loadedItems];
}
isLoaded(): boolean {
return this.loadedItems.length > 0;
}
isCompletelyLoaded(): boolean {
return this.isFullyLoaded;
}
async filter(predicate: (item: T) => boolean): Promise<T[]> {
if (!this.isFullyLoaded) {
await this.loadAll();
}
return this.loadedItems.filter(predicate);
}
async find(predicate: (item: T) => boolean): Promise<T | undefined> {
// Try loaded items first
const found = this.loadedItems.find(predicate);
if (found) {
return found;
}
// If not fully loaded, load all and search
if (!this.isFullyLoaded) {
await this.loadAll();
return this.loadedItems.find(predicate);
}
return undefined;
}
async count(): Promise<number> {
if (this.isFullyLoaded) {
return this.loadedItems.length;
}
// For a complete count, we need to load all items
// In a more sophisticated implementation, we might have a separate count query
await this.loadAll();
return this.loadedItems.length;
}
clear(): void {
this.loadedItems = [];
this.isFullyLoaded = false;
this.currentPage = 1;
}
}

View File

@ -0,0 +1,347 @@
import { BaseModel } from '../models/BaseModel';
export interface RelationshipCacheEntry {
key: string;
data: any;
timestamp: number;
ttl: number;
modelType: string;
relationshipType: string;
}
export interface RelationshipCacheStats {
totalEntries: number;
hitCount: number;
missCount: number;
hitRate: number;
memoryUsage: number;
}
export class RelationshipCache {
private cache: Map<string, RelationshipCacheEntry> = new Map();
private maxSize: number;
private defaultTTL: number;
private stats: RelationshipCacheStats;
constructor(maxSize: number = 1000, defaultTTL: number = 600000) {
// 10 minutes default
this.maxSize = maxSize;
this.defaultTTL = defaultTTL;
this.stats = {
totalEntries: 0,
hitCount: 0,
missCount: 0,
hitRate: 0,
memoryUsage: 0,
};
}
generateKey(instance: BaseModel, relationshipName: string, extraData?: any): string {
const baseKey = `${instance.constructor.name}:${instance.id}:${relationshipName}`;
if (extraData) {
const extraStr = JSON.stringify(extraData);
return `${baseKey}:${this.hashString(extraStr)}`;
}
return baseKey;
}
get(key: string): any | null {
const entry = this.cache.get(key);
if (!entry) {
this.stats.missCount++;
this.updateHitRate();
return null;
}
// Check if entry has expired
if (Date.now() - entry.timestamp > entry.ttl) {
this.cache.delete(key);
this.stats.missCount++;
this.updateHitRate();
return null;
}
this.stats.hitCount++;
this.updateHitRate();
return this.deserializeData(entry.data, entry.modelType);
}
set(
key: string,
data: any,
modelType: string,
relationshipType: string,
customTTL?: number,
): void {
const ttl = customTTL || this.defaultTTL;
// Check if we need to evict entries
if (this.cache.size >= this.maxSize) {
this.evictOldest();
}
const entry: RelationshipCacheEntry = {
key,
data: this.serializeData(data),
timestamp: Date.now(),
ttl,
modelType,
relationshipType,
};
this.cache.set(key, entry);
this.stats.totalEntries = this.cache.size;
this.updateMemoryUsage();
}
invalidate(key: string): boolean {
const deleted = this.cache.delete(key);
this.stats.totalEntries = this.cache.size;
this.updateMemoryUsage();
return deleted;
}
invalidateByInstance(instance: BaseModel): number {
const prefix = `${instance.constructor.name}:${instance.id}:`;
let deletedCount = 0;
for (const [key] of this.cache.entries()) {
if (key.startsWith(prefix)) {
this.cache.delete(key);
deletedCount++;
}
}
this.stats.totalEntries = this.cache.size;
this.updateMemoryUsage();
return deletedCount;
}
invalidateByModel(modelName: string): number {
let deletedCount = 0;
for (const [key, entry] of this.cache.entries()) {
if (key.startsWith(`${modelName}:`) || entry.modelType === modelName) {
this.cache.delete(key);
deletedCount++;
}
}
this.stats.totalEntries = this.cache.size;
this.updateMemoryUsage();
return deletedCount;
}
invalidateByRelationship(relationshipType: string): number {
let deletedCount = 0;
for (const [key, entry] of this.cache.entries()) {
if (entry.relationshipType === relationshipType) {
this.cache.delete(key);
deletedCount++;
}
}
this.stats.totalEntries = this.cache.size;
this.updateMemoryUsage();
return deletedCount;
}
clear(): void {
this.cache.clear();
this.stats = {
totalEntries: 0,
hitCount: 0,
missCount: 0,
hitRate: 0,
memoryUsage: 0,
};
}
getStats(): RelationshipCacheStats {
return { ...this.stats };
}
// Preload relationships for multiple instances
async warmup(
instances: BaseModel[],
relationships: string[],
loadFunction: (instance: BaseModel, relationshipName: string) => Promise<any>,
): Promise<void> {
console.log(`🔥 Warming relationship cache for ${instances.length} instances...`);
const promises: Promise<void>[] = [];
for (const instance of instances) {
for (const relationshipName of relationships) {
promises.push(
loadFunction(instance, relationshipName)
.then((data) => {
const key = this.generateKey(instance, relationshipName);
const modelType = data?.constructor?.name || 'unknown';
this.set(key, data, modelType, relationshipName);
})
.catch((error) => {
console.warn(
`Failed to warm cache for ${instance.constructor.name}:${instance.id}:${relationshipName}:`,
error,
);
}),
);
}
}
await Promise.allSettled(promises);
console.log(`✅ Relationship cache warmed with ${promises.length} entries`);
}
// Get cache entries by relationship type
getEntriesByRelationship(relationshipType: string): RelationshipCacheEntry[] {
return Array.from(this.cache.values()).filter(
(entry) => entry.relationshipType === relationshipType,
);
}
// Get expired entries
getExpiredEntries(): string[] {
const now = Date.now();
const expired: string[] = [];
for (const [key, entry] of this.cache.entries()) {
if (now - entry.timestamp > entry.ttl) {
expired.push(key);
}
}
return expired;
}
// Cleanup expired entries
cleanup(): number {
const expired = this.getExpiredEntries();
for (const key of expired) {
this.cache.delete(key);
}
this.stats.totalEntries = this.cache.size;
this.updateMemoryUsage();
return expired.length;
}
// Performance analysis
analyzePerformance(): {
averageAge: number;
oldestEntry: number;
newestEntry: number;
relationshipTypes: Map<string, number>;
} {
const now = Date.now();
let totalAge = 0;
let oldestAge = 0;
let newestAge = Infinity;
const relationshipTypes = new Map<string, number>();
for (const entry of this.cache.values()) {
const age = now - entry.timestamp;
totalAge += age;
if (age > oldestAge) oldestAge = age;
if (age < newestAge) newestAge = age;
const count = relationshipTypes.get(entry.relationshipType) || 0;
relationshipTypes.set(entry.relationshipType, count + 1);
}
return {
averageAge: this.cache.size > 0 ? totalAge / this.cache.size : 0,
oldestEntry: oldestAge,
newestEntry: newestAge === Infinity ? 0 : newestAge,
relationshipTypes,
};
}
private serializeData(data: any): any {
if (Array.isArray(data)) {
return data.map((item) => this.serializeItem(item));
} else {
return this.serializeItem(data);
}
}
private serializeItem(item: any): any {
if (item && typeof item.toJSON === 'function') {
return {
__type: item.constructor.name,
__data: item.toJSON(),
};
}
return item;
}
private deserializeData(data: any, expectedType: string): any {
if (Array.isArray(data)) {
return data.map((item) => this.deserializeItem(item, expectedType));
} else {
return this.deserializeItem(data, expectedType);
}
}
private deserializeItem(item: any, _expectedType: string): any {
if (item && item.__type && item.__data) {
// For now, return the raw data
// In a full implementation, we would reconstruct the model instance
return item.__data;
}
return item;
}
private evictOldest(): void {
if (this.cache.size === 0) return;
let oldestKey: string | null = null;
let oldestTime = Infinity;
for (const [key, entry] of this.cache.entries()) {
if (entry.timestamp < oldestTime) {
oldestTime = entry.timestamp;
oldestKey = key;
}
}
if (oldestKey) {
this.cache.delete(oldestKey);
}
}
private updateHitRate(): void {
const total = this.stats.hitCount + this.stats.missCount;
this.stats.hitRate = total > 0 ? this.stats.hitCount / total : 0;
}
private updateMemoryUsage(): void {
// Rough estimation of memory usage
let size = 0;
for (const entry of this.cache.values()) {
size += JSON.stringify(entry.data).length;
}
this.stats.memoryUsage = size;
}
private hashString(str: string): string {
let hash = 0;
if (str.length === 0) return hash.toString();
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash;
}
return Math.abs(hash).toString(36);
}
}

View File

@ -0,0 +1,569 @@
import { BaseModel } from '../models/BaseModel';
import { RelationshipConfig } from '../types/models';
import { RelationshipCache } from './RelationshipCache';
import { QueryBuilder } from '../query/QueryBuilder';
export interface RelationshipLoadOptions {
useCache?: boolean;
constraints?: (query: QueryBuilder<any>) => QueryBuilder<any>;
limit?: number;
orderBy?: { field: string; direction: 'asc' | 'desc' };
}
export interface EagerLoadPlan {
relationshipName: string;
config: RelationshipConfig;
instances: BaseModel[];
options?: RelationshipLoadOptions;
}
export class RelationshipManager {
private framework: any;
private cache: RelationshipCache;
constructor(framework: any) {
this.framework = framework;
this.cache = new RelationshipCache();
}
async loadRelationship(
instance: BaseModel,
relationshipName: string,
options: RelationshipLoadOptions = {},
): Promise<any> {
const modelClass = instance.constructor as typeof BaseModel;
const relationConfig = modelClass.relationships?.get(relationshipName);
if (!relationConfig) {
throw new Error(`Relationship '${relationshipName}' not found on ${modelClass.name}`);
}
console.log(
`🔗 Loading ${relationConfig.type} relationship: ${modelClass.name}.${relationshipName}`,
);
// Check cache first if enabled
if (options.useCache !== false) {
const cacheKey = this.cache.generateKey(instance, relationshipName, options.constraints);
const cached = this.cache.get(cacheKey);
if (cached) {
console.log(`⚡ Cache hit for relationship ${relationshipName}`);
instance._loadedRelations.set(relationshipName, cached);
return cached;
}
}
// Load relationship based on type
let result: any;
switch (relationConfig.type) {
case 'belongsTo':
result = await this.loadBelongsTo(instance, relationConfig, options);
break;
case 'hasMany':
result = await this.loadHasMany(instance, relationConfig, options);
break;
case 'hasOne':
result = await this.loadHasOne(instance, relationConfig, options);
break;
case 'manyToMany':
result = await this.loadManyToMany(instance, relationConfig, options);
break;
default:
throw new Error(`Unsupported relationship type: ${relationConfig.type}`);
}
// Cache the result if enabled
if (options.useCache !== false && result) {
const cacheKey = this.cache.generateKey(instance, relationshipName, options.constraints);
const modelType = Array.isArray(result)
? result[0]?.constructor?.name || 'unknown'
: result.constructor?.name || 'unknown';
this.cache.set(cacheKey, result, modelType, relationConfig.type);
}
// Store in instance
instance.setRelation(relationshipName, result);
console.log(
`✅ Loaded ${relationConfig.type} relationship: ${Array.isArray(result) ? result.length : 1} item(s)`,
);
return result;
}
private async loadBelongsTo(
instance: BaseModel,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<BaseModel | null> {
const foreignKeyValue = (instance as any)[config.foreignKey];
if (!foreignKeyValue) {
return null;
}
// Build query for the related model
let query = (config.model as any).where('id', '=', foreignKeyValue);
// Apply constraints if provided
if (options.constraints) {
query = options.constraints(query);
}
const result = await query.first();
return result;
}
private async loadHasMany(
instance: BaseModel,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<BaseModel[]> {
if (config.through) {
return await this.loadManyToMany(instance, config, options);
}
const localKeyValue = (instance as any)[config.localKey || 'id'];
if (!localKeyValue) {
return [];
}
// Build query for the related model
let query = (config.model as any).where(config.foreignKey, '=', localKeyValue);
// Apply constraints if provided
if (options.constraints) {
query = options.constraints(query);
}
// Apply default ordering and limiting
if (options.orderBy) {
query = query.orderBy(options.orderBy.field, options.orderBy.direction);
}
if (options.limit) {
query = query.limit(options.limit);
}
return await query.exec();
}
private async loadHasOne(
instance: BaseModel,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<BaseModel | null> {
const results = await this.loadHasMany(
instance,
{ ...config, type: 'hasMany' },
{
...options,
limit: 1,
},
);
return results[0] || null;
}
private async loadManyToMany(
instance: BaseModel,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<BaseModel[]> {
if (!config.through) {
throw new Error('Many-to-many relationships require a through model');
}
const localKeyValue = (instance as any)[config.localKey || 'id'];
if (!localKeyValue) {
return [];
}
// Step 1: Get junction table records
let junctionQuery = (config.through as any).where(config.localKey || 'id', '=', localKeyValue);
// Apply constraints to junction if needed
if (options.constraints) {
// Note: This is simplified - in a full implementation we'd need to handle
// constraints that apply to the final model vs the junction model
}
const junctionRecords = await junctionQuery.exec();
if (junctionRecords.length === 0) {
return [];
}
// Step 2: Extract foreign keys
const foreignKeys = junctionRecords.map((record: any) => record[config.foreignKey]);
// Step 3: Get related models
let relatedQuery = (config.model as any).whereIn('id', foreignKeys);
// Apply constraints if provided
if (options.constraints) {
relatedQuery = options.constraints(relatedQuery);
}
// Apply ordering and limiting
if (options.orderBy) {
relatedQuery = relatedQuery.orderBy(options.orderBy.field, options.orderBy.direction);
}
if (options.limit) {
relatedQuery = relatedQuery.limit(options.limit);
}
return await relatedQuery.exec();
}
// Eager loading for multiple instances
async eagerLoadRelationships(
instances: BaseModel[],
relationships: string[],
options: Record<string, RelationshipLoadOptions> = {},
): Promise<void> {
if (instances.length === 0) return;
console.log(
`🚀 Eager loading ${relationships.length} relationships for ${instances.length} instances`,
);
// Group instances by model type for efficient processing
const instanceGroups = this.groupInstancesByModel(instances);
// Load each relationship for each model group
for (const relationshipName of relationships) {
await this.eagerLoadSingleRelationship(
instanceGroups,
relationshipName,
options[relationshipName] || {},
);
}
console.log(`✅ Eager loading completed for ${relationships.length} relationships`);
}
private async eagerLoadSingleRelationship(
instanceGroups: Map<string, BaseModel[]>,
relationshipName: string,
options: RelationshipLoadOptions,
): Promise<void> {
for (const [modelName, instances] of instanceGroups) {
if (instances.length === 0) continue;
const firstInstance = instances[0];
const modelClass = firstInstance.constructor as typeof BaseModel;
const relationConfig = modelClass.relationships?.get(relationshipName);
if (!relationConfig) {
console.warn(`Relationship '${relationshipName}' not found on ${modelName}`);
continue;
}
console.log(
`🔗 Eager loading ${relationConfig.type} for ${instances.length} ${modelName} instances`,
);
switch (relationConfig.type) {
case 'belongsTo':
await this.eagerLoadBelongsTo(instances, relationshipName, relationConfig, options);
break;
case 'hasMany':
await this.eagerLoadHasMany(instances, relationshipName, relationConfig, options);
break;
case 'hasOne':
await this.eagerLoadHasOne(instances, relationshipName, relationConfig, options);
break;
case 'manyToMany':
await this.eagerLoadManyToMany(instances, relationshipName, relationConfig, options);
break;
}
}
}
private async eagerLoadBelongsTo(
instances: BaseModel[],
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<void> {
// Get all foreign key values
const foreignKeys = instances
.map((instance) => (instance as any)[config.foreignKey])
.filter((key) => key != null);
if (foreignKeys.length === 0) {
// Set null for all instances
instances.forEach((instance) => {
instance._loadedRelations.set(relationshipName, null);
});
return;
}
// Remove duplicates
const uniqueForeignKeys = [...new Set(foreignKeys)];
// Load all related models at once
let query = (config.model as any).whereIn('id', uniqueForeignKeys);
if (options.constraints) {
query = options.constraints(query);
}
const relatedModels = await query.exec();
// Create lookup map
const relatedMap = new Map();
relatedModels.forEach((model: any) => relatedMap.set(model.id, model));
// Assign to instances and cache
instances.forEach((instance) => {
const foreignKeyValue = (instance as any)[config.foreignKey];
const related = relatedMap.get(foreignKeyValue) || null;
instance.setRelation(relationshipName, related);
// Cache individual relationship
if (options.useCache !== false) {
const cacheKey = this.cache.generateKey(instance, relationshipName, options.constraints);
const modelType = related?.constructor?.name || 'null';
this.cache.set(cacheKey, related, modelType, config.type);
}
});
}
private async eagerLoadHasMany(
instances: BaseModel[],
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<void> {
if (config.through) {
return await this.eagerLoadManyToMany(instances, relationshipName, config, options);
}
// Get all local key values
const localKeys = instances
.map((instance) => (instance as any)[config.localKey || 'id'])
.filter((key) => key != null);
if (localKeys.length === 0) {
instances.forEach((instance) => {
instance.setRelation(relationshipName, []);
});
return;
}
// Load all related models
let query = (config.model as any).whereIn(config.foreignKey, localKeys);
if (options.constraints) {
query = options.constraints(query);
}
if (options.orderBy) {
query = query.orderBy(options.orderBy.field, options.orderBy.direction);
}
const relatedModels = await query.exec();
// Group by foreign key
const relatedGroups = new Map<string, BaseModel[]>();
relatedModels.forEach((model: any) => {
const foreignKeyValue = model[config.foreignKey];
if (!relatedGroups.has(foreignKeyValue)) {
relatedGroups.set(foreignKeyValue, []);
}
relatedGroups.get(foreignKeyValue)!.push(model);
});
// Apply limit per instance if specified
if (options.limit) {
relatedGroups.forEach((group) => {
if (group.length > options.limit!) {
group.splice(options.limit!);
}
});
}
// Assign to instances and cache
instances.forEach((instance) => {
const localKeyValue = (instance as any)[config.localKey || 'id'];
const related = relatedGroups.get(localKeyValue) || [];
instance.setRelation(relationshipName, related);
// Cache individual relationship
if (options.useCache !== false) {
const cacheKey = this.cache.generateKey(instance, relationshipName, options.constraints);
const modelType = related[0]?.constructor?.name || 'array';
this.cache.set(cacheKey, related, modelType, config.type);
}
});
}
private async eagerLoadHasOne(
instances: BaseModel[],
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<void> {
// Load as hasMany but take only the first result for each instance
await this.eagerLoadHasMany(instances, relationshipName, config, {
...options,
limit: 1,
});
// Convert arrays to single items
instances.forEach((instance) => {
const relatedArray = instance._loadedRelations.get(relationshipName) || [];
const relatedItem = relatedArray[0] || null;
instance._loadedRelations.set(relationshipName, relatedItem);
});
}
private async eagerLoadManyToMany(
instances: BaseModel[],
relationshipName: string,
config: RelationshipConfig,
options: RelationshipLoadOptions,
): Promise<void> {
if (!config.through) {
throw new Error('Many-to-many relationships require a through model');
}
// Get all local key values
const localKeys = instances
.map((instance) => (instance as any)[config.localKey || 'id'])
.filter((key) => key != null);
if (localKeys.length === 0) {
instances.forEach((instance) => {
instance.setRelation(relationshipName, []);
});
return;
}
// Step 1: Get all junction records
const junctionRecords = await (config.through as any)
.whereIn(config.localKey || 'id', localKeys)
.exec();
if (junctionRecords.length === 0) {
instances.forEach((instance) => {
instance.setRelation(relationshipName, []);
});
return;
}
// Step 2: Group junction records by local key
const junctionGroups = new Map<string, any[]>();
junctionRecords.forEach((record: any) => {
const localKeyValue = (record as any)[config.localKey || 'id'];
if (!junctionGroups.has(localKeyValue)) {
junctionGroups.set(localKeyValue, []);
}
junctionGroups.get(localKeyValue)!.push(record);
});
// Step 3: Get all foreign keys
const allForeignKeys = junctionRecords.map((record: any) => (record as any)[config.foreignKey]);
const uniqueForeignKeys = [...new Set(allForeignKeys)];
// Step 4: Load all related models
let relatedQuery = (config.model as any).whereIn('id', uniqueForeignKeys);
if (options.constraints) {
relatedQuery = options.constraints(relatedQuery);
}
if (options.orderBy) {
relatedQuery = relatedQuery.orderBy(options.orderBy.field, options.orderBy.direction);
}
const relatedModels = await relatedQuery.exec();
// Create lookup map for related models
const relatedMap = new Map();
relatedModels.forEach((model: any) => relatedMap.set(model.id, model));
// Step 5: Assign to instances
instances.forEach((instance) => {
const localKeyValue = (instance as any)[config.localKey || 'id'];
const junctionRecordsForInstance = junctionGroups.get(localKeyValue) || [];
const relatedForInstance = junctionRecordsForInstance
.map((junction) => {
const foreignKeyValue = (junction as any)[config.foreignKey];
return relatedMap.get(foreignKeyValue);
})
.filter((related) => related != null);
// Apply limit if specified
const finalRelated = options.limit
? relatedForInstance.slice(0, options.limit)
: relatedForInstance;
instance.setRelation(relationshipName, finalRelated);
// Cache individual relationship
if (options.useCache !== false) {
const cacheKey = this.cache.generateKey(instance, relationshipName, options.constraints);
const modelType = finalRelated[0]?.constructor?.name || 'array';
this.cache.set(cacheKey, finalRelated, modelType, config.type);
}
});
}
private groupInstancesByModel(instances: BaseModel[]): Map<string, BaseModel[]> {
const groups = new Map<string, BaseModel[]>();
instances.forEach((instance) => {
const modelName = instance.constructor.name;
if (!groups.has(modelName)) {
groups.set(modelName, []);
}
groups.get(modelName)!.push(instance);
});
return groups;
}
// Cache management methods
invalidateRelationshipCache(instance: BaseModel, relationshipName?: string): number {
if (relationshipName) {
const key = this.cache.generateKey(instance, relationshipName);
return this.cache.invalidate(key) ? 1 : 0;
} else {
return this.cache.invalidateByInstance(instance);
}
}
invalidateModelCache(modelName: string): number {
return this.cache.invalidateByModel(modelName);
}
getRelationshipCacheStats(): any {
return {
cache: this.cache.getStats(),
performance: this.cache.analyzePerformance(),
};
}
// Preload relationships for better performance
async warmupRelationshipCache(instances: BaseModel[], relationships: string[]): Promise<void> {
await this.cache.warmup(instances, relationships, (instance, relationshipName) =>
this.loadRelationship(instance, relationshipName, { useCache: false }),
);
}
// Cleanup and maintenance
cleanupExpiredCache(): number {
return this.cache.cleanup();
}
clearRelationshipCache(): void {
this.cache.clear();
}
}

View File

@ -0,0 +1,98 @@
import { StoreType } from '../types/framework';
export interface OrbitDBInstance {
openDB(name: string, type: string): Promise<any>;
getOrbitDB(): any;
init(): Promise<any>;
stop?(): Promise<void>;
}
export interface IPFSInstance {
init(): Promise<any>;
getHelia(): any;
getLibp2pInstance(): any;
stop?(): Promise<void>;
pubsub?: {
publish(topic: string, data: string): Promise<void>;
subscribe(topic: string, handler: (message: any) => void): Promise<void>;
unsubscribe(topic: string): Promise<void>;
};
}
export class FrameworkOrbitDBService {
private orbitDBService: OrbitDBInstance;
constructor(orbitDBService: OrbitDBInstance) {
this.orbitDBService = orbitDBService;
}
async openDatabase(name: string, type: StoreType): Promise<any> {
return await this.orbitDBService.openDB(name, type);
}
async init(): Promise<void> {
await this.orbitDBService.init();
}
async stop(): Promise<void> {
if (this.orbitDBService.stop) {
await this.orbitDBService.stop();
}
}
getOrbitDB(): any {
return this.orbitDBService.getOrbitDB();
}
}
export class FrameworkIPFSService {
private ipfsService: IPFSInstance;
constructor(ipfsService: IPFSInstance) {
this.ipfsService = ipfsService;
}
async init(): Promise<void> {
await this.ipfsService.init();
}
async stop(): Promise<void> {
if (this.ipfsService.stop) {
await this.ipfsService.stop();
}
}
getHelia(): any {
return this.ipfsService.getHelia();
}
getLibp2p(): any {
return this.ipfsService.getLibp2pInstance();
}
async getConnectedPeers(): Promise<Map<string, any>> {
const libp2p = this.getLibp2p();
if (!libp2p) {
return new Map();
}
const peers = libp2p.getPeers();
const peerMap = new Map();
for (const peerId of peers) {
peerMap.set(peerId.toString(), peerId);
}
return peerMap;
}
async pinOnNode(nodeId: string, cid: string): Promise<void> {
// Implementation depends on your specific pinning setup
// This is a placeholder for the pinning functionality
console.log(`Pinning ${cid} on node ${nodeId}`);
}
get pubsub() {
return this.ipfsService.pubsub;
}
}

View File

@ -0,0 +1,299 @@
import { ShardingConfig, StoreType } from '../types/framework';
import { FrameworkOrbitDBService } from '../services/OrbitDBService';
export interface ShardInfo {
name: string;
index: number;
database: any;
address: string;
}
export class ShardManager {
private orbitDBService?: FrameworkOrbitDBService;
private shards: Map<string, ShardInfo[]> = new Map();
private shardConfigs: Map<string, ShardingConfig> = new Map();
setOrbitDBService(service: FrameworkOrbitDBService): void {
this.orbitDBService = service;
}
async createShards(
modelName: string,
config: ShardingConfig,
dbType: StoreType = 'docstore',
): Promise<void> {
if (!this.orbitDBService) {
throw new Error('OrbitDB service not initialized');
}
console.log(`🔀 Creating ${config.count} shards for model: ${modelName}`);
const shards: ShardInfo[] = [];
this.shardConfigs.set(modelName, config);
for (let i = 0; i < config.count; i++) {
const shardName = `${modelName.toLowerCase()}-shard-${i}`;
try {
const shard = await this.createShard(shardName, i, dbType);
shards.push(shard);
console.log(`✓ Created shard: ${shardName} (${shard.address})`);
} catch (error) {
console.error(`❌ Failed to create shard ${shardName}:`, error);
throw error;
}
}
this.shards.set(modelName, shards);
console.log(`✅ Created ${shards.length} shards for ${modelName}`);
}
getShardForKey(modelName: string, key: string): ShardInfo {
const shards = this.shards.get(modelName);
if (!shards || shards.length === 0) {
throw new Error(`No shards found for model ${modelName}`);
}
const config = this.shardConfigs.get(modelName);
if (!config) {
throw new Error(`No shard configuration found for model ${modelName}`);
}
const shardIndex = this.calculateShardIndex(key, shards.length, config.strategy);
return shards[shardIndex];
}
getAllShards(modelName: string): ShardInfo[] {
return this.shards.get(modelName) || [];
}
getShardByIndex(modelName: string, index: number): ShardInfo | undefined {
const shards = this.shards.get(modelName);
if (!shards || index < 0 || index >= shards.length) {
return undefined;
}
return shards[index];
}
getShardCount(modelName: string): number {
const shards = this.shards.get(modelName);
return shards ? shards.length : 0;
}
private calculateShardIndex(
key: string,
shardCount: number,
strategy: ShardingConfig['strategy'],
): number {
switch (strategy) {
case 'hash':
return this.hashSharding(key, shardCount);
case 'range':
return this.rangeSharding(key, shardCount);
case 'user':
return this.userSharding(key, shardCount);
default:
throw new Error(`Unsupported sharding strategy: ${strategy}`);
}
}
private hashSharding(key: string, shardCount: number): number {
// Consistent hash-based sharding
let hash = 0;
for (let i = 0; i < key.length; i++) {
hash = ((hash << 5) - hash + key.charCodeAt(i)) & 0xffffffff;
}
return Math.abs(hash) % shardCount;
}
private rangeSharding(key: string, shardCount: number): number {
// Range-based sharding (alphabetical)
const firstChar = key.charAt(0).toLowerCase();
const charCode = firstChar.charCodeAt(0);
// Map a-z (97-122) to shard indices
const normalizedCode = Math.max(97, Math.min(122, charCode));
const range = (normalizedCode - 97) / 25; // 0-1 range
return Math.floor(range * shardCount);
}
private userSharding(key: string, shardCount: number): number {
// User-based sharding - similar to hash but optimized for user IDs
return this.hashSharding(key, shardCount);
}
private async createShard(
shardName: string,
index: number,
dbType: StoreType,
): Promise<ShardInfo> {
if (!this.orbitDBService) {
throw new Error('OrbitDB service not initialized');
}
const database = await this.orbitDBService.openDatabase(shardName, dbType);
return {
name: shardName,
index,
database,
address: database.address.toString(),
};
}
// Global indexing support
async createGlobalIndex(modelName: string, indexName: string): Promise<void> {
if (!this.orbitDBService) {
throw new Error('OrbitDB service not initialized');
}
console.log(`📇 Creating global index: ${indexName} for model: ${modelName}`);
// Create sharded global index
const INDEX_SHARD_COUNT = 4; // Configurable
const indexShards: ShardInfo[] = [];
for (let i = 0; i < INDEX_SHARD_COUNT; i++) {
const indexShardName = `${indexName}-shard-${i}`;
try {
const shard = await this.createShard(indexShardName, i, 'keyvalue');
indexShards.push(shard);
console.log(`✓ Created index shard: ${indexShardName}`);
} catch (error) {
console.error(`❌ Failed to create index shard ${indexShardName}:`, error);
throw error;
}
}
// Store index shards
this.shards.set(indexName, indexShards);
console.log(`✅ Created global index ${indexName} with ${indexShards.length} shards`);
}
async addToGlobalIndex(indexName: string, key: string, value: any): Promise<void> {
const indexShards = this.shards.get(indexName);
if (!indexShards) {
throw new Error(`Global index ${indexName} not found`);
}
// Determine which shard to use for this key
const shardIndex = this.hashSharding(key, indexShards.length);
const shard = indexShards[shardIndex];
try {
// For keyvalue stores, we use set
await shard.database.set(key, value);
} catch (error) {
console.error(`Failed to add to global index ${indexName}:`, error);
throw error;
}
}
async getFromGlobalIndex(indexName: string, key: string): Promise<any> {
const indexShards = this.shards.get(indexName);
if (!indexShards) {
throw new Error(`Global index ${indexName} not found`);
}
// Determine which shard contains this key
const shardIndex = this.hashSharding(key, indexShards.length);
const shard = indexShards[shardIndex];
try {
return await shard.database.get(key);
} catch (error) {
console.error(`Failed to get from global index ${indexName}:`, error);
return null;
}
}
async removeFromGlobalIndex(indexName: string, key: string): Promise<void> {
const indexShards = this.shards.get(indexName);
if (!indexShards) {
throw new Error(`Global index ${indexName} not found`);
}
// Determine which shard contains this key
const shardIndex = this.hashSharding(key, indexShards.length);
const shard = indexShards[shardIndex];
try {
await shard.database.del(key);
} catch (error) {
console.error(`Failed to remove from global index ${indexName}:`, error);
throw error;
}
}
// Query all shards for a model
async queryAllShards(
modelName: string,
queryFn: (database: any) => Promise<any[]>,
): Promise<any[]> {
const shards = this.shards.get(modelName);
if (!shards) {
throw new Error(`No shards found for model ${modelName}`);
}
const results: any[] = [];
// Query all shards in parallel
const promises = shards.map(async (shard) => {
try {
return await queryFn(shard.database);
} catch (error) {
console.warn(`Query failed on shard ${shard.name}:`, error);
return [];
}
});
const shardResults = await Promise.all(promises);
// Flatten results
for (const shardResult of shardResults) {
results.push(...shardResult);
}
return results;
}
// Statistics and monitoring
getShardStatistics(modelName: string): any {
const shards = this.shards.get(modelName);
if (!shards) {
return null;
}
return {
modelName,
shardCount: shards.length,
shards: shards.map((shard) => ({
name: shard.name,
index: shard.index,
address: shard.address,
})),
};
}
getAllModelsWithShards(): string[] {
return Array.from(this.shards.keys());
}
// Cleanup
async stop(): Promise<void> {
console.log('🛑 Stopping ShardManager...');
this.shards.clear();
this.shardConfigs.clear();
console.log('✅ ShardManager stopped');
}
}

View File

@ -0,0 +1,54 @@
export type StoreType = 'eventlog' | 'keyvalue' | 'docstore' | 'counter' | 'feed';
export interface FrameworkConfig {
cache?: CacheConfig;
defaultPinning?: PinningConfig;
autoMigration?: boolean;
}
export interface CacheConfig {
enabled?: boolean;
maxSize?: number;
ttl?: number;
}
export type PinningStrategy = 'fixed' | 'popularity' | 'size' | 'age' | 'custom';
export interface PinningConfig {
strategy?: PinningStrategy;
factor?: number;
maxPins?: number;
minAccessCount?: number;
maxAge?: number;
}
export interface PinningStats {
totalPinned: number;
totalSize: number;
averageSize: number;
strategies: Record<string, number>;
oldestPin: number;
recentActivity: Array<{ action: string; hash: string; timestamp: number }>;
}
export interface PubSubConfig {
enabled?: boolean;
events?: string[];
channels?: string[];
}
export interface ShardingConfig {
strategy: 'hash' | 'range' | 'user';
count: number;
key: string;
}
export interface ValidationResult {
valid: boolean;
errors: string[];
}
export interface ValidationError {
field: string;
message: string;
}

View File

@ -0,0 +1,45 @@
import { BaseModel } from '../models/BaseModel';
import { StoreType, ShardingConfig, PinningConfig, PubSubConfig } from './framework';
export interface ModelConfig {
type?: StoreType;
scope?: 'user' | 'global';
sharding?: ShardingConfig;
pinning?: PinningConfig;
pubsub?: PubSubConfig;
tableName?: string;
}
export interface FieldConfig {
type: 'string' | 'number' | 'boolean' | 'array' | 'object' | 'date';
required?: boolean;
unique?: boolean;
index?: boolean | 'global';
default?: any;
validate?: (value: any) => boolean | string;
transform?: (value: any) => any;
}
export interface RelationshipConfig {
type: 'belongsTo' | 'hasMany' | 'hasOne' | 'manyToMany';
model: typeof BaseModel;
foreignKey: string;
localKey?: string;
through?: typeof BaseModel;
lazy?: boolean;
}
export interface UserMappings {
userId: string;
databases: Record<string, string>;
}
export class ValidationError extends Error {
public errors: string[];
constructor(errors: string[]) {
super(`Validation failed: ${errors.join(', ')}`);
this.errors = errors;
this.name = 'ValidationError';
}
}

View File

@ -0,0 +1,16 @@
export interface QueryCondition {
field: string;
operator: string;
value: any;
}
export interface SortConfig {
field: string;
direction: 'asc' | 'desc';
}
export interface QueryOptions {
limit?: number;
offset?: number;
relations?: string[];
}

View File

@ -1,145 +0,0 @@
// Config exports
import { config, defaultConfig, type DebrosConfig } from './config';
import { validateConfig, type ValidationResult } from './ipfs/config/configValidator';
// Database service exports (new abstracted layer)
import {
init as initDB,
create,
get,
update,
remove,
list,
query,
createIndex,
createTransaction,
commitTransaction,
subscribe,
uploadFile,
getFile,
deleteFile,
defineSchema,
closeConnection,
stop as stopDB,
} from './db/dbService';
import { ErrorCode, StoreType } from './db/types';
// Import types
import type {
Transaction,
CreateResult,
UpdateResult,
PaginatedResult,
ListOptions,
QueryOptions,
FileUploadResult,
FileResult,
CollectionSchema,
SchemaDefinition,
Metrics,
} from './db/types';
import { DBError } from './db/core/error';
// Legacy exports (internal use only, not exposed in default export)
import { getConnectedPeers, logPeersStatus } from './ipfs/ipfsService';
// Load balancer exports
import loadBalancerController from './ipfs/loadBalancerController';
// Logger exports
import logger, {
createServiceLogger,
createDebrosLogger,
type LoggerOptions,
} from './utils/logger';
// Export public API
export {
// Configuration
config,
defaultConfig,
validateConfig,
type DebrosConfig,
type ValidationResult,
// Database Service (Main public API)
initDB,
create,
get,
update,
remove,
list,
query,
createIndex,
createTransaction,
commitTransaction,
subscribe,
uploadFile,
getFile,
deleteFile,
defineSchema,
closeConnection,
stopDB,
ErrorCode,
StoreType,
// Load Balancer
loadBalancerController,
getConnectedPeers,
logPeersStatus,
// Types
type Transaction,
type DBError,
type CollectionSchema,
type SchemaDefinition,
type CreateResult,
type UpdateResult,
type PaginatedResult,
type ListOptions,
type QueryOptions,
type FileUploadResult,
type FileResult,
type Metrics,
// Logger
logger,
createServiceLogger,
createDebrosLogger,
type LoggerOptions,
};
// Default export for convenience
export default {
config,
validateConfig,
// Database Service as main interface
db: {
init: initDB,
create,
get,
update,
remove,
list,
query,
createIndex,
createTransaction,
commitTransaction,
subscribe,
uploadFile,
getFile,
deleteFile,
defineSchema,
closeConnection,
stop: stopDB,
ErrorCode,
StoreType,
},
loadBalancerController,
logPeersStatus,
getConnectedPeers,
logger,
createServiceLogger,
};

View File

@ -1,44 +0,0 @@
import { config } from '../../config';
export interface ValidationResult {
valid: boolean;
errors: string[];
}
/**
* Validates the IPFS configuration
*/
export const validateConfig = (): ValidationResult => {
const errors: string[] = [];
// Check fingerprint
if (!config.env.fingerprint || config.env.fingerprint === 'default-fingerprint') {
errors.push('Fingerprint not set or using default value. Please set a unique fingerprint.');
}
// Check port
const port = Number(config.env.port);
if (isNaN(port) || port < 1 || port > 65535) {
errors.push('Invalid port configuration. Port must be a number between 1 and 65535.');
}
// Check service discovery topic
if (!config.ipfs.serviceDiscovery.topic) {
errors.push('Service discovery topic not configured.');
}
// Check blockstore path
if (!config.ipfs.blockstorePath) {
errors.push('Blockstore path not configured.');
}
// Check orbitdb directory
if (!config.orbitdb.directory) {
errors.push('OrbitDB directory not configured.');
}
return {
valid: errors.length === 0,
errors,
};
};

View File

@ -1,34 +0,0 @@
import { config } from '../../config';
// Determine the IPFS port to use
export const getIpfsPort = (): number => {
if (process.env.IPFS_PORT) {
return parseInt(process.env.IPFS_PORT);
}
const httpPort = parseInt(process.env.PORT || '7777');
// Add some randomness to avoid port conflicts during retries
const basePort = httpPort + 1;
const randomOffset = Math.floor(Math.random() * 10);
return basePort + randomOffset; // Add random offset to avoid conflicts
};
// Get a node-specific blockstore path
export const getBlockstorePath = (): string => {
const basePath = config.ipfs.blockstorePath;
const fingerprint = config.env.fingerprint;
return `${basePath}-${fingerprint}`;
};
// IPFS configuration
export const ipfsConfig = {
blockstorePath: getBlockstorePath(),
port: getIpfsPort(),
serviceDiscovery: {
topic: config.ipfs.serviceDiscovery.topic,
heartbeatInterval: config.ipfs.serviceDiscovery.heartbeatInterval || 2000,
staleTimeout: config.ipfs.serviceDiscovery.staleTimeout || 30000,
logInterval: config.ipfs.serviceDiscovery.logInterval || 60000,
publicAddress: config.ipfs.serviceDiscovery.publicAddress,
},
bootstrapNodes: process.env.BOOTSTRAP_NODES,
};

View File

@ -1,104 +0,0 @@
import type { Libp2p } from 'libp2p';
import {
initIpfsNode,
stopIpfsNode,
getHeliaInstance,
getLibp2pInstance,
getProxyAgentInstance,
} from './services/ipfsCoreService';
import {
getConnectedPeers,
getOptimalPeer,
updateNodeLoad,
logPeersStatus,
} from './services/discoveryService';
import { createServiceLogger } from '../utils/logger';
// Create logger for IPFS service
const logger = createServiceLogger('IPFS');
// Interface definition for the IPFS module
export interface IPFSModule {
init: (externalProxyAgent?: any) => Promise<void>;
stop: () => Promise<void>;
getHelia: () => any;
getProxyAgent: () => any;
getInstance: (externalProxyAgent?: any) => Promise<{
getHelia: () => any;
getProxyAgent: () => any;
}>;
getLibp2p: () => Libp2p;
getConnectedPeers: () => Map<string, { lastSeen: number; load: number; publicAddress: string }>;
getOptimalPeer: () => string | null;
updateNodeLoad: (load: number) => void;
logPeersStatus: () => void;
}
const init = async (externalProxyAgent: any = null) => {
try {
await initIpfsNode(externalProxyAgent);
logger.info('IPFS service initialized successfully');
return getHeliaInstance();
} catch (error) {
logger.error('Failed to initialize IPFS service:', error);
throw error;
}
};
const stop = async () => {
await stopIpfsNode();
logger.info('IPFS service stopped');
};
const getHelia = () => {
return getHeliaInstance();
};
const getProxyAgent = () => {
return getProxyAgentInstance();
};
const getLibp2p = () => {
return getLibp2pInstance();
};
const getInstance = async (externalProxyAgent: any = null) => {
if (!getHeliaInstance()) {
await init(externalProxyAgent);
}
return {
getHelia,
getProxyAgent,
};
};
// Export individual functions
export {
init,
stop,
getHelia,
getProxyAgent,
getInstance,
getLibp2p,
getConnectedPeers,
getOptimalPeer,
updateNodeLoad,
logPeersStatus,
};
// Export as default module
export default {
init,
stop,
getHelia,
getProxyAgent,
getInstance,
getLibp2p,
getConnectedPeers,
getOptimalPeer,
updateNodeLoad,
logPeersStatus,
} as IPFSModule;

View File

@ -1,107 +0,0 @@
// Load balancer controller - Handles API routes for service discovery and load balancing
import { Request, Response, NextFunction } from 'express';
import loadBalancerService from './loadBalancerService';
import { config } from '../config';
export interface LoadBalancerControllerModule {
getNodeInfo: (_req: Request, _res: Response, _next: NextFunction) => void;
getOptimalPeer: (_req: Request, _res: Response, _next: NextFunction) => void;
getAllPeers: (_req: Request, _res: Response, _next: NextFunction) => void;
}
/**
* Get information about the node and its load
*/
const getNodeInfo = (req: Request, res: Response, next: NextFunction) => {
try {
const status = loadBalancerService.getNodeStatus();
res.json({
fingerprint: config.env.fingerprint,
peerCount: status.peerCount,
isLoadBalancer: config.features.enableLoadBalancing,
loadBalancerStrategy: config.loadBalancer.strategy,
maxConnections: config.loadBalancer.maxConnections,
});
} catch (error) {
next(error);
}
};
/**
* Get the optimal peer for client connection
*/
const getOptimalPeer = (req: Request, res: Response, next: NextFunction) => {
try {
// Check if load balancing is enabled
if (!config.features.enableLoadBalancing) {
res.status(200).json({
useThisNode: true,
message: 'Load balancing is disabled, use this node',
fingerprint: config.env.fingerprint,
publicAddress: config.ipfs.serviceDiscovery.publicAddress,
});
return;
}
// Get the optimal peer
const optimalPeer = loadBalancerService.getOptimalPeer();
// If there are no peer nodes, use this node
if (!optimalPeer) {
res.status(200).json({
useThisNode: true,
message: 'No other peers available, use this node',
fingerprint: config.env.fingerprint,
publicAddress: config.ipfs.serviceDiscovery.publicAddress,
});
return;
}
// Check if this node is the optimal peer
const isThisNodeOptimal = optimalPeer.peerId === config.env.fingerprint;
if (isThisNodeOptimal) {
res.status(200).json({
useThisNode: true,
message: 'This node is optimal',
fingerprint: config.env.fingerprint,
publicAddress: config.ipfs.serviceDiscovery.publicAddress,
});
return;
}
// Return the optimal peer information
res.status(200).json({
useThisNode: false,
optimalPeer: {
peerId: optimalPeer.peerId,
load: optimalPeer.load,
publicAddress: optimalPeer.publicAddress,
},
message: 'Found optimal peer',
});
} catch (error) {
next(error);
}
};
/**
* Get all available peers
*/
const getAllPeers = (req: Request, res: Response, next: NextFunction) => {
try {
const peers = loadBalancerService.getAllPeers();
res.status(200).json({
peerCount: peers.length,
peers,
});
} catch (error) {
next(error);
}
};
export default {
getNodeInfo,
getOptimalPeer,
getAllPeers,
} as LoadBalancerControllerModule;

View File

@ -1,112 +0,0 @@
import * as ipfsService from './ipfsService';
import { config } from '../config';
import { createServiceLogger } from '../utils/logger';
const logger = createServiceLogger('LOAD_BALANCER');
// Track last peer chosen for round-robin strategy
let lastPeerIndex = -1;
// Type definitions
export interface PeerInfo {
peerId: string;
load: number;
publicAddress: string;
}
export interface PeerStatus extends PeerInfo {
lastSeen: number;
}
export interface NodeStatus {
fingerprint: string;
peerCount: number;
isHealthy: boolean;
}
type LoadBalancerStrategy = 'leastLoaded' | 'roundRobin' | 'random';
/**
* Strategies for peer selection
*/
const strategies = {
leastLoaded: (peers: PeerStatus[]): PeerStatus => {
return peers.reduce((min, current) => (current.load < min.load ? current : min), peers[0]);
},
roundRobin: (peers: PeerStatus[]): PeerStatus => {
lastPeerIndex = (lastPeerIndex + 1) % peers.length;
return peers[lastPeerIndex];
},
random: (peers: PeerStatus[]): PeerStatus => {
const randomIndex = Math.floor(Math.random() * peers.length);
return peers[randomIndex];
},
};
/**
* Get the optimal peer based on the configured load balancing strategy
*/
export const getOptimalPeer = (): PeerInfo | null => {
const connectedPeers = ipfsService.getConnectedPeers();
if (connectedPeers.size === 0) {
logger.info('No peers available for load balancing');
return null;
}
// Convert Map to Array for easier manipulation
const peersArray = Array.from(connectedPeers.entries()).map(([peerId, data]) => ({
peerId,
load: data.load,
lastSeen: data.lastSeen,
publicAddress: data.publicAddress,
}));
// Apply the selected load balancing strategy
const strategy = config.loadBalancer.strategy as LoadBalancerStrategy;
let selectedPeer;
// Select strategy function or default to least loaded
const strategyFn = strategies[strategy] || strategies.leastLoaded;
selectedPeer = strategyFn(peersArray);
logger.info(
`Selected peer (${strategy}): ${selectedPeer.peerId.substring(0, 15)}... with load ${selectedPeer.load}%`,
);
return {
peerId: selectedPeer.peerId,
load: selectedPeer.load,
publicAddress: selectedPeer.publicAddress,
};
};
/**
* Get all available peers with their load information
*/
export const getAllPeers = (): PeerStatus[] => {
const connectedPeers = ipfsService.getConnectedPeers();
return Array.from(connectedPeers.entries()).map(([peerId, data]) => ({
peerId,
load: data.load,
lastSeen: data.lastSeen,
publicAddress: data.publicAddress,
}));
};
/**
* Get information about the current node's load
*/
export const getNodeStatus = (): NodeStatus => {
const connectedPeers = ipfsService.getConnectedPeers();
return {
fingerprint: config.env.fingerprint,
peerCount: connectedPeers.size,
isHealthy: true,
};
};
export default { getOptimalPeer, getAllPeers, getNodeStatus };

View File

@ -1,162 +0,0 @@
import type { PubSub } from '@libp2p/interface';
import { config } from '../../config';
import { ipfsConfig } from '../config/ipfsConfig';
import { createServiceLogger } from '../../utils/logger';
// Create loggers for service discovery and heartbeat
const discoveryLogger = createServiceLogger('SERVICE-DISCOVERY');
const heartbeatLogger = createServiceLogger('HEARTBEAT');
// Node metadata
const fingerprint = config.env.fingerprint;
const connectedPeers: Map<
string,
{ lastSeen: number; load: number; publicAddress: string; fingerprint: string }
> = new Map();
const SERVICE_DISCOVERY_TOPIC = ipfsConfig.serviceDiscovery.topic;
const HEARTBEAT_INTERVAL = ipfsConfig.serviceDiscovery.heartbeatInterval;
let heartbeatInterval: NodeJS.Timeout;
let nodeLoad = 0;
export const setupServiceDiscovery = async (pubsub: PubSub) => {
await pubsub.subscribe(SERVICE_DISCOVERY_TOPIC);
discoveryLogger.info(`Subscribed to topic: ${SERVICE_DISCOVERY_TOPIC}`);
// Listen for other peers heartbeats
pubsub.addEventListener('message', (event: any) => {
try {
const message = JSON.parse(event.detail.data.toString());
if (message.type === 'heartbeat' && message.fingerprint !== fingerprint) {
const peerId = event.detail.from.toString();
const existingPeer = connectedPeers.has(peerId);
connectedPeers.set(peerId, {
lastSeen: Date.now(),
load: message.load,
publicAddress: message.publicAddress,
fingerprint: message.fingerprint,
});
if (!existingPeer) {
discoveryLogger.info(
`New peer discovered: ${peerId} (fingerprint=${message.fingerprint})`,
);
}
heartbeatLogger.info(
`Received from ${peerId}: load=${message.load}, addr=${message.publicAddress}`,
);
}
} catch (err) {
discoveryLogger.error(`Error processing message:`, err);
}
});
// Send periodic heartbeats with our load information
heartbeatInterval = setInterval(async () => {
try {
nodeLoad = calculateNodeLoad();
const heartbeatMsg = {
type: 'heartbeat',
fingerprint,
load: nodeLoad,
timestamp: Date.now(),
publicAddress: ipfsConfig.serviceDiscovery.publicAddress,
};
await pubsub.publish(
SERVICE_DISCOVERY_TOPIC,
new TextEncoder().encode(JSON.stringify(heartbeatMsg)),
);
heartbeatLogger.info(
`Sent: fingerprint=${fingerprint}, load=${nodeLoad}, addr=${heartbeatMsg.publicAddress}`,
);
const now = Date.now();
const staleTime = ipfsConfig.serviceDiscovery.staleTimeout;
for (const [peerId, peerData] of connectedPeers.entries()) {
if (now - peerData.lastSeen > staleTime) {
discoveryLogger.info(
`Peer ${peerId.substring(0, 15)}... is stale, removing from load balancer`,
);
connectedPeers.delete(peerId);
}
}
if (Date.now() % 60000 < HEARTBEAT_INTERVAL) {
logPeersStatus();
}
} catch (err) {
discoveryLogger.error(`Error sending heartbeat:`, err);
}
}, HEARTBEAT_INTERVAL);
discoveryLogger.info(`Service initialized with fingerprint: ${fingerprint}`);
};
/**
* Calculates the current node load
*/
export const calculateNodeLoad = (): number => {
// This is a simple implementation and could be enhanced with
// actual metrics like CPU usage, memory, active connections, etc.
return Math.floor(Math.random() * 100); // Placeholder implementation
};
/**
* Logs the status of connected peers
*/
export const logPeersStatus = () => {
const peerCount = connectedPeers.size;
discoveryLogger.info(`Connected peers: ${peerCount}`);
discoveryLogger.info(`Current node load: ${nodeLoad}`);
if (peerCount > 0) {
discoveryLogger.info('Peer status:');
connectedPeers.forEach((data, peerId) => {
discoveryLogger.debug(
` - ${peerId} Load: ${data.load}% Last seen: ${new Date(data.lastSeen).toISOString()}`,
);
});
}
};
export const getOptimalPeer = (): string | null => {
if (connectedPeers.size === 0) return null;
let lowestLoad = Number.MAX_SAFE_INTEGER;
let optimalPeer: string | null = null;
connectedPeers.forEach((data, peerId) => {
if (data.load < lowestLoad) {
lowestLoad = data.load;
optimalPeer = peerId;
}
});
return optimalPeer;
};
export const updateNodeLoad = (load: number) => {
nodeLoad = load;
};
export const getConnectedPeers = () => {
return connectedPeers;
};
export const stopDiscoveryService = async (pubsub: PubSub | null) => {
if (heartbeatInterval) {
clearInterval(heartbeatInterval);
}
if (pubsub) {
try {
await pubsub.unsubscribe(SERVICE_DISCOVERY_TOPIC);
discoveryLogger.info(`Unsubscribed from topic: ${SERVICE_DISCOVERY_TOPIC}`);
} catch (err) {
discoveryLogger.error(`Error unsubscribing from topic:`, err);
}
}
};

View File

@ -1,259 +0,0 @@
import fs from 'fs';
import { createHelia } from 'helia';
import { FsBlockstore } from 'blockstore-fs';
import { createLibp2p } from 'libp2p';
import { gossipsub } from '@chainsafe/libp2p-gossipsub';
import { tcp } from '@libp2p/tcp';
import { noise } from '@chainsafe/libp2p-noise';
import { yamux } from '@chainsafe/libp2p-yamux';
import { identify } from '@libp2p/identify';
import { bootstrap } from '@libp2p/bootstrap';
import type { Libp2p } from 'libp2p';
import { FaultTolerance, PubSub } from '@libp2p/interface';
import { ipfsConfig } from '../config/ipfsConfig';
import { getPrivateKey } from '../utils/crypto';
import { setupServiceDiscovery, stopDiscoveryService } from './discoveryService';
import { createServiceLogger } from '../../utils/logger';
const logger = createServiceLogger('IPFS');
const p2pLogger = createServiceLogger('P2P');
let helia: any;
let proxyAgent: any;
let libp2pNode: Libp2p;
let reconnectInterval: NodeJS.Timeout;
export const initIpfsNode = async (externalProxyAgent: any = null) => {
try {
// If already initialized, return existing instance
if (helia && libp2pNode) {
logger.info('IPFS node already initialized, returning existing instance');
return helia;
}
// Clean up any existing instances first
if (helia || libp2pNode) {
logger.info('Cleaning up existing IPFS instances before reinitializing');
await stopIpfsNode();
}
proxyAgent = externalProxyAgent;
const blockstorePath = ipfsConfig.blockstorePath;
try {
if (!fs.existsSync(blockstorePath)) {
fs.mkdirSync(blockstorePath, { recursive: true, mode: 0o755 });
logger.info(`Created blockstore directory: ${blockstorePath}`);
}
// Check write permissions
fs.accessSync(blockstorePath, fs.constants.W_OK);
logger.info(`Verified write permissions for blockstore directory: ${blockstorePath}`);
} catch (permError: any) {
logger.error(`Permission error with blockstore directory: ${blockstorePath}`, permError);
throw new Error(`Cannot access or write to blockstore directory: ${permError.message}`);
}
const blockstore = new FsBlockstore(blockstorePath);
const currentNodeIp = process.env.HOSTNAME || '';
logger.info(`Current node public IP: ${currentNodeIp}`);
const bootstrapList = getBootstrapList();
logger.info(`Bootstrap peers: ${JSON.stringify(bootstrapList)}`);
const bootStrap = bootstrap({
list: bootstrapList,
}) as unknown as any;
logger.info(`Configuring bootstrap with peers: ${JSON.stringify(bootstrapList)}`);
const ipfsPort = ipfsConfig.port;
logger.info(`Using port ${ipfsPort} for IPFS/libp2p`);
libp2pNode = await createLibp2p({
transports: [tcp()],
streamMuxers: [yamux()],
connectionEncrypters: [noise()],
services: {
identify: identify(),
pubsub: gossipsub({
allowPublishToZeroTopicPeers: true,
emitSelf: false,
}),
},
peerDiscovery: [bootStrap],
addresses: {
listen: [`/ip4/0.0.0.0/tcp/${ipfsPort}`],
},
transportManager: {
faultTolerance: FaultTolerance.NO_FATAL,
},
privateKey: await getPrivateKey(),
});
p2pLogger.info(`PEER ID: ${libp2pNode.peerId.toString()}`);
logger.info(
`Listening on: ${libp2pNode
.getMultiaddrs()
.map((addr: any) => addr.toString())
.join(', ')}`,
);
helia = await createHelia({
blockstore,
libp2p: libp2pNode,
});
const pubsub = libp2pNode.services.pubsub as PubSub;
await setupServiceDiscovery(pubsub);
setupPeerEventListeners(libp2pNode);
connectToSpecificPeers(libp2pNode);
return helia;
} catch (error) {
logger.error('Failed to initialize node:', error);
throw error;
}
};
function getBootstrapList(): string[] {
let bootstrapList: string[] = [];
bootstrapList = process.env.BOOTSTRAP_NODES?.split(',').map((node) => node.trim()) || [];
return bootstrapList;
}
function setupPeerEventListeners(node: Libp2p) {
node.addEventListener('peer:discovery', (event) => {
const peerId = event.detail.id.toString();
logger.info(`Discovered peer: ${peerId}`);
});
node.addEventListener('peer:connect', (event) => {
const peerId = event.detail.toString();
logger.info(`Peer connection succeeded: ${peerId}`);
node.peerStore
.get(event.detail)
.then((peerInfo) => {
const multiaddrs = peerInfo?.addresses.map((addr) => addr.multiaddr.toString()) || [
'unknown',
];
logger.info(`Peer multiaddrs: ${multiaddrs.join(', ')}`);
})
.catch((error) => {
logger.error(`Error fetching peer info for ${peerId}: ${error.message}`);
});
});
node.addEventListener('peer:disconnect', (event) => {
const peerId = event.detail.toString();
logger.info(`Disconnected from peer: ${peerId}`);
});
node.addEventListener('peer:reconnect-failure', (event) => {
const peerId = event.detail.toString();
logger.error(`Peer reconnection failed: ${peerId}`);
node.peerStore
.get(event.detail)
.then((peerInfo) => {
const multiaddrs = peerInfo?.addresses.map((addr) => addr.multiaddr.toString()) || [
'unknown',
];
logger.error(`Peer multiaddrs: ${multiaddrs.join(', ')}`);
})
.catch((error) => {
logger.error(`Error fetching peer info for ${peerId}: ${error.message}`);
});
});
node.addEventListener('connection:close', (event) => {
const connection = event.detail;
const peerId = connection.remotePeer.toString();
const remoteAddr = connection.remoteAddr.toString();
logger.info(`Connection closed for peer: ${peerId}`);
logger.info(`Remote address: ${remoteAddr}`);
});
}
export const stopIpfsNode = async () => {
logger.info('Stopping IPFS node...');
if (reconnectInterval) {
clearInterval(reconnectInterval);
reconnectInterval = undefined as any;
}
if (libp2pNode) {
try {
const pubsub = libp2pNode.services.pubsub as PubSub;
await stopDiscoveryService(pubsub);
// Stop libp2p
await libp2pNode.stop();
} catch (error) {
logger.error('Error stopping libp2p node:', error);
}
libp2pNode = undefined as any;
} else {
await stopDiscoveryService(null);
}
if (helia) {
try {
await helia.stop();
} catch (error) {
logger.error('Error stopping Helia:', error);
}
helia = null;
}
logger.info('IPFS node stopped successfully');
};
export const getHeliaInstance = () => {
return helia;
};
export const getLibp2pInstance = () => {
return libp2pNode;
};
export const getProxyAgentInstance = () => {
return proxyAgent;
};
function connectToSpecificPeers(node: Libp2p) {
setTimeout(async () => {
await attemptPeerConnections(node);
reconnectInterval = setInterval(async () => {
await attemptPeerConnections(node);
}, 120000);
}, 5000);
}
async function attemptPeerConnections(node: Libp2p) {
logger.info('Current peer connections:');
const peers = node.getPeers();
if (peers.length === 0) {
logger.info(' - No connected peers');
} else {
for (const peerId of peers) {
try {
// Get peer info including addresses
const peerInfo = await node.peerStore.get(peerId);
const addresses =
peerInfo?.addresses.map((addr) => addr.multiaddr.toString()).join(', ') || 'unknown';
logger.info(` - Connected to peer: ${peerId.toString()}`);
logger.info(` Addresses: ${addresses}`);
} catch (_error) {
// Fallback to just showing the peer ID if we can't get address info
logger.info(` - Connected to peer: ${peerId.toString()}`);
}
}
}
}

View File

@ -1,30 +0,0 @@
import { generateKeyPairFromSeed } from '@libp2p/crypto/keys';
import forge from 'node-forge';
import { config } from '../../config';
import { createServiceLogger } from '../../utils/logger';
const logger = createServiceLogger('CRYPTO');
/**
* Generates a deterministic private key based on the node's fingerprint
*/
export const getPrivateKey = async () => {
try {
const userInput = config.env.fingerprint;
// Use SHA-256 to create a deterministic seed
const md = forge.md.sha256.create();
md.update(userInput);
const seedString = md.digest().getBytes(); // Get raw bytes as a string
// Convert the seed string to Uint8Array
const seed = Uint8Array.from(forge.util.binary.raw.decode(seedString));
// Generate an Ed25519 private key (libp2p-compatible)
const privateKey = await generateKeyPairFromSeed('Ed25519', seed);
return privateKey;
} catch (error) {
logger.error('Error generating private key:', error);
throw error;
}
};

View File

@ -1,159 +0,0 @@
import fs from 'fs';
import path from 'path';
import { createOrbitDB, IPFSAccessController } from '@orbitdb/core';
import { registerFeed } from '@orbitdb/feed-db';
import { config } from '../config';
import { createServiceLogger } from '../utils/logger';
import { getHelia } from '../ipfs/ipfsService';
const logger = createServiceLogger('ORBITDB');
let orbitdb: any;
// Create a node-specific directory based on fingerprint to avoid lock conflicts
export const getOrbitDBDir = (): string => {
const baseDir = config.orbitdb.directory;
const fingerprint = config.env.fingerprint;
// Use path.join for proper cross-platform path handling
return path.join(baseDir, `debros-${fingerprint}`);
};
const ORBITDB_DIR = getOrbitDBDir();
const ADDRESS_DIR = path.join(ORBITDB_DIR, 'addresses');
export const getDBAddress = (name: string): string | null => {
try {
const addressFile = path.join(ADDRESS_DIR, `${name}.address`);
if (fs.existsSync(addressFile)) {
return fs.readFileSync(addressFile, 'utf-8').trim();
}
} catch (error) {
logger.error(`Error reading DB address for ${name}:`, error);
}
return null;
};
export const saveDBAddress = (name: string, address: string): boolean => {
try {
// Ensure the address directory exists
if (!fs.existsSync(ADDRESS_DIR)) {
fs.mkdirSync(ADDRESS_DIR, { recursive: true, mode: 0o755 });
}
const addressFile = path.join(ADDRESS_DIR, `${name}.address`);
fs.writeFileSync(addressFile, address, { mode: 0o644 });
logger.info(`Saved DB address for ${name} at ${addressFile}`);
return true;
} catch (error) {
logger.error(`Failed to save DB address for ${name}:`, error);
return false;
}
};
export const init = async () => {
try {
// Create directory with proper permissions if it doesn't exist
try {
if (!fs.existsSync(ORBITDB_DIR)) {
fs.mkdirSync(ORBITDB_DIR, { recursive: true, mode: 0o755 });
logger.info(`Created OrbitDB directory: ${ORBITDB_DIR}`);
}
// Check write permissions
fs.accessSync(ORBITDB_DIR, fs.constants.W_OK);
} catch (permError: any) {
logger.error(`Permission error with OrbitDB directory: ${ORBITDB_DIR}`, permError);
throw new Error(`Cannot access or write to OrbitDB directory: ${permError.message}`);
}
// Create the addresses directory
try {
if (!fs.existsSync(ADDRESS_DIR)) {
fs.mkdirSync(ADDRESS_DIR, { recursive: true, mode: 0o755 });
logger.info(`Created OrbitDB addresses directory: ${ADDRESS_DIR}`);
}
} catch (dirError) {
logger.error(`Error creating addresses directory: ${ADDRESS_DIR}`, dirError);
// Continue anyway, we'll handle failures when saving addresses
}
registerFeed();
const ipfs = getHelia();
if (!ipfs) {
throw new Error('IPFS instance is not initialized.');
}
logger.info(`Initializing OrbitDB with directory: ${ORBITDB_DIR}`);
orbitdb = await createOrbitDB({
ipfs,
directory: ORBITDB_DIR,
});
logger.info('OrbitDB initialized successfully.');
return orbitdb;
} catch (e: any) {
logger.error('Failed to initialize OrbitDB:', e);
throw new Error(`OrbitDB initialization failed: ${e.message}`);
}
};
export const openDB = async (name: string, type: string) => {
if (!orbitdb) {
throw new Error('OrbitDB not initialized. Call init() first.');
}
const existingAddress = getDBAddress(name);
let db;
try {
const dbOptions = {
type,
overwrite: false,
AccessController: IPFSAccessController({
write: ['*'],
}),
};
if (existingAddress) {
logger.info(`Loading existing database with address: ${existingAddress}`);
db = await orbitdb.open(existingAddress, dbOptions);
} else {
logger.info(`Creating new database: ${name}`);
db = await orbitdb.open(name, dbOptions);
saveDBAddress(name, db.address.toString());
}
// Log the access controller type to verify
logger.info('Access Controller Type:', db.access.type);
return db;
} catch (error) {
logger.error(`Error opening database '${name}':`, error);
throw error;
}
};
export const getOrbitDB = () => {
return orbitdb;
};
export const db = async (dbName: string, type: string) => {
try {
if (!orbitdb) {
throw new Error('OrbitDB not initialized. Call init() first.');
}
return await openDB(dbName, type);
} catch (error: any) {
logger.error(`Error accessing database '${dbName}':`, error);
throw new Error(`Database error: ${error.message}`);
}
};
export default {
init,
openDB,
getOrbitDB,
db,
};

View File

@ -1,162 +0,0 @@
import { createLogger, format, transports } from 'winston';
import fs from 'fs';
import path from 'path';
// Define logger options interface
export interface LoggerOptions {
logsDir?: string;
level?: string;
disableConsole?: boolean;
disableFile?: boolean;
}
// Define colors for different service types
const colors: Record<string, string> = {
error: '\x1b[31m', // red
warn: '\x1b[33m', // yellow
info: '\x1b[32m', // green
debug: '\x1b[36m', // cyan
reset: '\x1b[0m', // reset
// Service specific colors
IPFS: '\x1b[36m', // cyan
HEARTBEAT: '\x1b[33m', // yellow
SOCKET: '\x1b[34m', // blue
'LOAD-BALANCER': '\x1b[35m', // magenta
DEFAULT: '\x1b[37m', // white
};
// Create a customizable logger factory
export function createDebrosLogger(options: LoggerOptions = {}) {
// Set default options
const logsDir = options.logsDir || path.join(process.cwd(), 'logs');
const logLevel = options.level || process.env.LOG_LEVEL || 'info';
// Create logs directory if it doesn't exist
if (!fs.existsSync(logsDir) && !options.disableFile) {
fs.mkdirSync(logsDir, { recursive: true });
}
// Custom format for console output with colors
const customConsoleFormat = format.printf(({ level, message, timestamp, service }: any) => {
// Truncate error messages
if (level === 'error' && typeof message === 'string' && message.length > 300) {
message = message.substring(0, 300) + '... [truncated]';
}
// Handle objects and errors
if (typeof message === 'object' && message !== null) {
if (message instanceof Error) {
message = message.message;
// Truncate error messages
if (message.length > 300) {
message = message.substring(0, 300) + '... [truncated]';
}
} else {
try {
message = JSON.stringify(message, null, 2);
} catch (e: any) {
console.error(e);
message = '[Object]';
}
}
}
const serviceColor = service && colors[service] ? colors[service] : colors.DEFAULT;
const levelColor = colors[level] || colors.DEFAULT;
const serviceTag = service ? `[${service}]` : '';
return `${timestamp} ${levelColor}${level}${colors.reset}: ${serviceColor}${serviceTag}${colors.reset} ${message}`;
});
// Custom format for file output (no colors)
const customFileFormat = format.printf(({ level, message, timestamp, service }) => {
// Handle objects and errors
if (typeof message === 'object' && message !== null) {
if (message instanceof Error) {
message = message.message;
} else {
try {
message = JSON.stringify(message);
} catch (e: any) {
console.error(e);
message = '[Object]';
}
}
}
const serviceTag = service ? `[${service}]` : '';
return `${timestamp} ${level}: ${serviceTag} ${message}`;
});
// Configure transports
const loggerTransports = [];
// Add console transport if not disabled
if (!options.disableConsole) {
loggerTransports.push(
new transports.Console({
format: format.combine(
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
customConsoleFormat,
),
}),
);
}
// Add file transports if not disabled
if (!options.disableFile) {
loggerTransports.push(
// Combined log file
new transports.File({
filename: path.join(logsDir, 'app.log'),
format: format.combine(
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
customFileFormat,
),
}),
// Error log file
new transports.File({
filename: path.join(logsDir, 'error.log'),
level: 'error',
format: format.combine(
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
customFileFormat,
),
}),
);
}
// Create the logger
const logger = createLogger({
level: logLevel,
format: format.combine(format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.splat()),
defaultMeta: { service: 'DEFAULT' },
transports: loggerTransports,
});
// Helper function to create a logger for a specific service
const createServiceLogger = (serviceName: string) => {
return {
error: (message: any, ...meta: any[]) =>
logger.error(message, { service: serviceName, ...meta }),
warn: (message: any, ...meta: any[]) =>
logger.warn(message, { service: serviceName, ...meta }),
info: (message: any, ...meta: any[]) =>
logger.info(message, { service: serviceName, ...meta }),
debug: (message: any, ...meta: any[]) =>
logger.debug(message, { service: serviceName, ...meta }),
};
};
return {
logger,
createServiceLogger,
};
}
// Create a default logger instance
const { logger, createServiceLogger } = createDebrosLogger();
export { logger, createServiceLogger };
export default logger;

347
types.d.ts vendored
View File

@ -3,352 +3,5 @@
// Definitions by: Debros Team // Definitions by: Debros Team
declare module '@debros/network' { declare module '@debros/network' {
import { Request, Response, NextFunction } from 'express';
// Config types
export interface DebrosConfig {
env: {
fingerprint: string;
port: number;
};
ipfs: {
swarm: {
port: number;
announceAddresses: string[];
listenAddresses: string[];
connectAddresses: string[];
};
blockstorePath: string;
bootstrap: string[];
privateKey?: string;
serviceDiscovery?: {
topic: string;
heartbeatInterval: number;
};
};
orbitdb: {
directory: string;
};
logger: {
level: string;
file?: string;
};
}
export interface ValidationResult {
valid: boolean;
errors?: string[];
}
// Core configuration
export const config: DebrosConfig;
export const defaultConfig: DebrosConfig;
export function validateConfig(config: Partial<DebrosConfig>): ValidationResult;
// Store types
export enum StoreType {
KEYVALUE = 'keyvalue',
DOCSTORE = 'docstore',
FEED = 'feed',
EVENTLOG = 'eventlog',
COUNTER = 'counter',
}
// Error handling
export enum ErrorCode {
NOT_INITIALIZED = 'ERR_NOT_INITIALIZED',
INITIALIZATION_FAILED = 'ERR_INIT_FAILED',
DOCUMENT_NOT_FOUND = 'ERR_DOC_NOT_FOUND',
INVALID_SCHEMA = 'ERR_INVALID_SCHEMA',
OPERATION_FAILED = 'ERR_OPERATION_FAILED',
TRANSACTION_FAILED = 'ERR_TRANSACTION_FAILED',
FILE_NOT_FOUND = 'ERR_FILE_NOT_FOUND',
INVALID_PARAMETERS = 'ERR_INVALID_PARAMS',
CONNECTION_ERROR = 'ERR_CONNECTION',
STORE_TYPE_ERROR = 'ERR_STORE_TYPE',
}
export class DBError extends Error {
code: ErrorCode;
details?: any;
constructor(code: ErrorCode, message: string, details?: any);
}
// Schema validation
export interface SchemaDefinition {
type: string;
required?: boolean;
pattern?: string;
min?: number;
max?: number;
enum?: any[];
items?: SchemaDefinition; // For arrays
properties?: Record<string, SchemaDefinition>; // For objects
}
export interface CollectionSchema {
properties: Record<string, SchemaDefinition>;
required?: string[];
}
// Database types
export interface DocumentMetadata {
createdAt: number;
updatedAt: number;
}
export interface Document extends DocumentMetadata {
[key: string]: any;
}
export interface CreateResult {
id: string;
hash: string;
}
export interface UpdateResult {
id: string;
hash: string;
}
export interface FileUploadResult {
cid: string;
}
export interface FileMetadata {
filename?: string;
size: number;
uploadedAt: number;
[key: string]: any;
}
export interface FileResult {
data: Buffer;
metadata: FileMetadata | null;
}
export interface ListOptions {
limit?: number;
offset?: number;
sort?: { field: string; order: 'asc' | 'desc' };
connectionId?: string;
storeType?: StoreType;
}
export interface QueryOptions extends ListOptions {
indexBy?: string;
}
export interface PaginatedResult<T> {
documents: T[];
total: number;
hasMore: boolean;
}
// Transaction API
export class Transaction {
create<T>(collection: string, id: string, data: T): Transaction;
update<T>(collection: string, id: string, data: Partial<T>): Transaction;
delete(collection: string, id: string): Transaction;
commit(): Promise<{ success: boolean; results: any[] }>;
}
// Metrics tracking
export interface Metrics {
operations: {
creates: number;
reads: number;
updates: number;
deletes: number;
queries: number;
fileUploads: number;
fileDownloads: number;
};
performance: {
totalOperationTime: number;
operationCount: number;
averageOperationTime: number;
};
errors: {
count: number;
byCode: Record<string, number>;
};
cacheStats: {
hits: number;
misses: number;
};
startTime: number;
}
// Database Operations
export function initDB(connectionId?: string): Promise<string>;
export function create<T extends Record<string, any>>(
collection: string,
id: string,
data: Omit<T, 'createdAt' | 'updatedAt'>,
options?: { connectionId?: string; storeType?: StoreType },
): Promise<CreateResult>;
export function get<T extends Record<string, any>>(
collection: string,
id: string,
options?: { connectionId?: string; skipCache?: boolean; storeType?: StoreType },
): Promise<T | null>;
export function update<T extends Record<string, any>>(
collection: string,
id: string,
data: Partial<Omit<T, 'createdAt' | 'updatedAt'>>,
options?: { connectionId?: string; upsert?: boolean; storeType?: StoreType },
): Promise<UpdateResult>;
export function remove(
collection: string,
id: string,
options?: { connectionId?: string; storeType?: StoreType },
): Promise<boolean>;
export function list<T extends Record<string, any>>(
collection: string,
options?: ListOptions,
): Promise<PaginatedResult<T>>;
export function query<T extends Record<string, any>>(
collection: string,
filter: (doc: T) => boolean,
options?: QueryOptions,
): Promise<PaginatedResult<T>>;
// Schema operations
export function defineSchema(collection: string, schema: CollectionSchema): void;
// Transaction operations
export function createTransaction(connectionId?: string): Transaction;
export function commitTransaction(
transaction: Transaction,
): Promise<{ success: boolean; results: any[] }>;
// Index operations
export function createIndex(
collection: string,
field: string,
options?: { connectionId?: string; storeType?: StoreType },
): Promise<boolean>;
// Event Subscription API
export interface DocumentCreatedEvent {
collection: string;
id: string;
document: any;
}
export interface DocumentUpdatedEvent {
collection: string;
id: string;
document: any;
previous: any;
}
export interface DocumentDeletedEvent {
collection: string;
id: string;
document: any;
}
export type DBEventType = 'document:created' | 'document:updated' | 'document:deleted';
export function subscribe(
event: 'document:created',
callback: (data: DocumentCreatedEvent) => void,
): () => void;
export function subscribe(
event: 'document:updated',
callback: (data: DocumentUpdatedEvent) => void,
): () => void;
export function subscribe(
event: 'document:deleted',
callback: (data: DocumentDeletedEvent) => void,
): () => void;
export function subscribe(event: DBEventType, callback: (data: any) => void): () => void;
// File operations
export function uploadFile(
fileData: Buffer,
options?: { filename?: string; connectionId?: string; metadata?: Record<string, any> },
): Promise<FileUploadResult>;
export function getFile(cid: string, options?: { connectionId?: string }): Promise<FileResult>;
export function deleteFile(cid: string, options?: { connectionId?: string }): Promise<boolean>;
// Connection management
export function closeConnection(connectionId: string): Promise<boolean>;
// Metrics
// Stop
export function stopDB(): Promise<void>;
// Logger
export interface LoggerOptions {
level?: string;
file?: string;
service?: string;
}
export const logger: any;
export function createServiceLogger(name: string, options?: LoggerOptions): any;
export function createDebrosLogger(options?: LoggerOptions): any;
// Load Balancer
export interface LoadBalancerControllerModule {
getNodeInfo: (req: Request, res: Response, next: NextFunction) => void;
getOptimalPeer: (req: Request, res: Response, next: NextFunction) => void;
getAllPeers: (req: Request, res: Response, next: NextFunction) => void;
}
export const loadBalancerController: LoadBalancerControllerModule;
export const getConnectedPeers: () => Map<
string,
{
lastSeen: number;
load: number;
publicAddress: string;
fingerprint: string;
}
>;
export const logPeersStatus: () => void;
// Default export
const defaultExport: {
config: DebrosConfig;
validateConfig: typeof validateConfig;
db: {
init: typeof initDB;
create: typeof create;
get: typeof get;
update: typeof update;
remove: typeof remove;
list: typeof list;
query: typeof query;
createIndex: typeof createIndex;
createTransaction: typeof createTransaction;
commitTransaction: typeof commitTransaction;
subscribe: typeof subscribe;
uploadFile: typeof uploadFile;
getFile: typeof getFile;
deleteFile: typeof deleteFile;
defineSchema: typeof defineSchema;
closeConnection: typeof closeConnection;
stop: typeof stopDB;
ErrorCode: typeof ErrorCode;
StoreType: typeof StoreType;
};
loadBalancerController: LoadBalancerControllerModule;
getConnectedPeers: () => Map<
string,
{
lastSeen: number;
load: number;
publicAddress: string;
fingerprint: string;
}
>;
logPeersStatus: () => void;
logger: any;
createServiceLogger: typeof createServiceLogger;
};
export default defaultExport;
} }