From 9fc62f79dff6766cfb8eefd03cdea76bd501f706 Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 09:30:44 -0500 Subject: [PATCH 001/327] Add REDACTED and update social-sync integration Added a new file REDACTED containing detailed user biographies and metadata. Updated the social-sync integration in protocol/src/lib/integrations/social-sync.ts to support or utilize the new data structure. --- protocol/src/lib/integrations/social-sync.ts | 104 ------------------- 1 file changed, 104 deletions(-) diff --git a/protocol/src/lib/integrations/social-sync.ts b/protocol/src/lib/integrations/social-sync.ts index 5cb4f1116..c5fb004d9 100644 --- a/protocol/src/lib/integrations/social-sync.ts +++ b/protocol/src/lib/integrations/social-sync.ts @@ -199,108 +199,6 @@ export async function syncAllSocialMedia(): Promise { }; } -/** - * Generate intents from biography when socials are updated - * This runs asynchronously and doesn't block the API response - */ -async function generateIntentsFromBiography(userId: string): Promise { - try { - // Get user from database - const userRecords = await db.select() - .from(users) - .where(and(eq(users.id, userId), isNull(users.deletedAt))) - .limit(1); - - if (userRecords.length === 0) { - log.warn('User not found for biography intent generation', { userId }); - return; - } - - const user = userRecords[0]; - const socials = user.socials || {}; - - // Prepare input for Parallels task - const input: GenerateIntroInput = {}; - - if (user.name?.trim()) { - input.name = user.name.trim(); - } - - if (user.email?.trim()) { - input.email = user.email.trim(); - } - - // Convert LinkedIn username to URL if needed - if (socials.linkedin) { - const linkedinValue = String(socials.linkedin).trim(); - if (linkedinValue) { - input.linkedin = linkedinValue.startsWith('http') - ? linkedinValue - : `https://www.linkedin.com/in/${linkedinValue}`; - } - } - - // Convert Twitter username to URL if needed - if (socials.x) { - const twitterValue = String(socials.x).trim(); - if (twitterValue) { - if (twitterValue.startsWith('http')) { - input.twitter = twitterValue; - } else { - const username = twitterValue.replace(/^@/, ''); - input.twitter = `https://x.com/${username}`; - } - } - } - - // Ensure at least one field is provided - if (!input.name && !input.email && !input.linkedin && !input.twitter) { - log.warn('No valid input data for biography intent generation', { userId }); - return; - } - - log.info('Generating biography for intent generation', { userId }); - - // Generate biography using Parallels - const introResult = await generateIntro(input); - if (!introResult || !introResult.biography) { - log.warn('Failed to generate biography for intent generation', { userId }); - return; - } - - const biography = introResult.biography; - - // Generate intents from biography asynchronously - const existingIntents = await IntentService.getUserIntents(userId); - const result = await analyzeContent( - biography, - 1, // itemCount - 'Generate intents from user biography, skip intents too old or if they are not relevant to the user anymore.', - Array.from(existingIntents), - undefined, - 60000 - ); - - if (result?.success && result.intents) { - for (const intentData of result.intents) { - if (!existingIntents.has(intentData.payload)) { - await IntentService.createIntent({ - payload: intentData.payload, - userId, - sourceId: userId, // Use userId as sourceId for social-generated intents - sourceType: 'integration', - confidence: intentData.confidence, - inferenceType: intentData.type, - }); - existingIntents.add(intentData.payload); - } - } - log.info('Generated intents from biography', { userId, intentsGenerated: result.intents.length }); - } - } catch (error) { - log.error('Biography intent generation error', { userId, error: (error as Error).message }); - } -} /** * Trigger social media sync when user updates their socials field @@ -313,8 +211,6 @@ export async function triggerSocialSync(userId: string, socialType: 'twitter' | if (socialType === 'twitter') { log.info('Triggering Twitter sync', { userId }); await syncTwitterUser(userId); - // Also generate intents from biography - await generateIntentsFromBiography(userId); } else if (socialType === 'linkedin') { log.info('Triggering LinkedIn sync', { userId }); await enrichUserProfile(userId); // Includes intro generation and intent generation from biography From fd80ab68e9b0bd9fa033cf05f13a21e3377568c0 Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 17:12:11 -0500 Subject: [PATCH 002/327] Refactor social sync for bulk Twitter/enrichment ops Optimizes Twitter sync to use bulk profile and tweet fetching, adds batch processing for all-user sync, and introduces enrichment triggers based on name/email changes. Updates CLI options, environment variables, and onboarding schema to support new enrichment logic and hash tracking. Removes LinkedIn-specific logic and replaces it with a generalized enrichment flow. --- protocol/env.example | 4 +- protocol/src/cli/social-worker.ts | 101 ++++++- .../src/lib/integrations/providers/twitter.ts | 212 +++++++++++++- protocol/src/lib/integrations/social-sync.ts | 264 +++++++++++------- protocol/src/lib/schema.ts | 1 + protocol/src/lib/snowflake.ts | 221 ++++++++++++++- protocol/src/routes/auth.ts | 7 +- protocol/src/routes/users.ts | 7 +- protocol/src/types/users.ts | 1 + 9 files changed, 695 insertions(+), 123 deletions(-) diff --git a/protocol/env.example b/protocol/env.example index 3750e1954..fd2e4bdf4 100644 --- a/protocol/env.example +++ b/protocol/env.example @@ -66,8 +66,8 @@ SNOWFLAKE_DATABASE=DATA_COLLECTOR_ICEBERG SNOWFLAKE_SCHEMA=PUBLIC # Social media sync intervals (in milliseconds) -# TWITTER_SYNC_DELAY_MS=3600000 # default: 1 hour -# LINKEDIN_SYNC_DELAY_MS=3600000 # default: 1 hour +# TWITTER_SYNC_DELAY_MS=14400000 # default: 4 hours (checks last 4 hours of tweets) +# ENRICHMENT_SYNC_DELAY_MS=3600000 # default: 1 hour (not used - enrichment is on-demand) # Proxy configuration for web crawling # PROXY_SERVER=http://proxy.example.com:8080 diff --git a/protocol/src/cli/social-worker.ts b/protocol/src/cli/social-worker.ts index bcf23cf5c..ed5d45909 100644 --- a/protocol/src/cli/social-worker.ts +++ b/protocol/src/cli/social-worker.ts @@ -19,28 +19,30 @@ type Opts = { type?: string; silent?: boolean; userId?: string; + runAll?: boolean; }; let isShuttingDown = false; -const TWITTER_SYNC_DELAY_MS = parseInt(process.env.TWITTER_SYNC_DELAY_MS || '3600000'); // 1 hour default -const ENRICHMENT_SYNC_DELAY_MS = parseInt(process.env.LINKEDIN_SYNC_DELAY_MS || '3600000'); // 1 hour default +const TWITTER_SYNC_DELAY_MS = parseInt(process.env.TWITTER_SYNC_DELAY_MS || '14400000'); // 4 hours default +const ENRICHMENT_SYNC_DELAY_MS = parseInt(process.env.ENRICHMENT_SYNC_DELAY_MS || '3600000'); // 1 hour default async function main(): Promise { const program = new Command(); program .name('social-worker') - .description('Run a continuous social media sync worker for Twitter and profile enrichment') - .option('--type ', 'Sync type: twitter, linkedin (enrichment), or all (default: all)') + .description('Run social media sync worker for Twitter and profile enrichment') + .option('--type ', 'Sync type: twitter, enrichment, or all (default: all)') .option('--userId ', 'Sync specific user ID (if provided, runs once and exits)') + .option('--run-all', 'Run sync for all users once and exit (instead of continuous worker)') .option('--silent', 'Suppress non-error output') .action(async (opts: Opts) => { const syncType = opts.type || 'all'; if (opts.silent) setLevel('error'); - log.info('Starting social worker', { syncType, userId: opts.userId }); + log.info('Starting social worker', { syncType, userId: opts.userId, runAll: opts.runAll }); // Handle graceful shutdown const shutdown = () => { @@ -56,7 +58,7 @@ async function main(): Promise { if (opts.userId) { if (syncType === 'twitter') { await syncSingleTwitterUser(opts.userId); - } else if (syncType === 'linkedin') { + } else if (syncType === 'enrichment') { await syncSingleEnrichmentUser(opts.userId); } else { await syncSingleUserAll(opts.userId); @@ -66,10 +68,24 @@ async function main(): Promise { return; } + // If --run-all is provided, run once for all users and exit + if (opts.runAll) { + if (syncType === 'twitter') { + await syncAllTwitterUsersOnce(); + } else if (syncType === 'enrichment') { + await enrichAllUsersOnce(); + } else { + await syncAllUsersOnce(); + } + log.info('All users sync complete'); + process.exit(0); + return; + } + // Otherwise, run continuous workers if (syncType === 'twitter') { await runTwitterWorker(); - } else if (syncType === 'linkedin') { + } else if (syncType === 'enrichment') { await runEnrichmentWorker(); } else { await runAllSocialWorkers(); @@ -81,7 +97,18 @@ async function main(): Promise { program.addHelpText( 'after', - '\nExamples:\n yarn social-worker --type twitter\n yarn social-worker --type linkedin\n yarn social-worker --type all --silent\n yarn social-worker --type twitter --userId abc123\n' + '\nExamples:\n' + + ' # Continuous workers (every 4 hours for Twitter):\n' + + ' yarn social-worker --type twitter\n' + + ' yarn social-worker --type enrichment\n' + + ' yarn social-worker --type all --silent\n' + + '\n' + + ' # Run once for all users:\n' + + ' yarn social-worker --type twitter --run-all\n' + + ' yarn social-worker --type enrichment --run-all\n' + + '\n' + + ' # Sync single user:\n' + + ' yarn social-worker --type twitter --userId abc123\n' ); try { @@ -96,7 +123,8 @@ async function main(): Promise { async function syncSingleTwitterUser(userId: string): Promise { try { log.info('Syncing single Twitter user', { userId }); - const result = await syncTwitterUser(userId); + const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); + const result = await syncTwitterUser(userId, fourHoursAgo); if (result.success) { log.info('Twitter sync successful', { userId, intentsGenerated: result.intentsGenerated, locationUpdated: result.locationUpdated }); } else { @@ -128,8 +156,9 @@ async function syncSingleEnrichmentUser(userId: string): Promise { async function syncSingleUserAll(userId: string): Promise { try { log.info('Syncing all social media for single user', { userId }); + const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); const [twitterResult, enrichmentResult] = await Promise.all([ - syncTwitterUser(userId).catch(err => ({ success: false, error: err instanceof Error ? err.message : String(err) })), + syncTwitterUser(userId, fourHoursAgo).catch(err => ({ success: false, error: err instanceof Error ? err.message : String(err) })), enrichUserProfile(userId).catch(err => ({ success: false, error: err instanceof Error ? err.message : String(err) })), ]); @@ -182,6 +211,58 @@ async function runEnrichmentWorker(): Promise { } } +async function syncAllTwitterUsersOnce(): Promise { + try { + log.info('Running Twitter sync for all users (once)'); + const result = await syncAllTwitterUsers(); + log.info('Twitter sync for all users complete', { + usersProcessed: result.usersProcessed, + intentsGenerated: result.intentsGenerated, + locationUpdated: result.locationUpdated, + errors: result.errors + }); + } catch (error) { + log.error('Twitter sync for all users error', { + error: error instanceof Error ? error.message : String(error) + }); + process.exit(1); + } +} + +async function enrichAllUsersOnce(): Promise { + try { + log.info('Running enrichment for all users (once)'); + const result = await enrichAllUsers(); + log.info('Enrichment for all users complete', { + usersProcessed: result.usersProcessed, + intentsGenerated: result.intentsGenerated, + locationUpdated: result.locationUpdated, + errors: result.errors + }); + } catch (error) { + log.error('Enrichment for all users error', { + error: error instanceof Error ? error.message : String(error) + }); + process.exit(1); + } +} + +async function syncAllUsersOnce(): Promise { + try { + log.info('Running full social sync for all users (once)'); + const result = await syncAllSocialMedia(); + log.info('Full social sync for all users complete', { + twitter: result.twitter, + enrichment: result.enrichment + }); + } catch (error) { + log.error('Full social sync for all users error', { + error: error instanceof Error ? error.message : String(error) + }); + process.exit(1); + } +} + async function runAllSocialWorkers(): Promise { while (!isShuttingDown) { try { diff --git a/protocol/src/lib/integrations/providers/twitter.ts b/protocol/src/lib/integrations/providers/twitter.ts index dfc43c8c9..2b416a99d 100644 --- a/protocol/src/lib/integrations/providers/twitter.ts +++ b/protocol/src/lib/integrations/providers/twitter.ts @@ -1,9 +1,9 @@ import { log } from '../../log'; -import { fetchTwitterProfile, fetchTwitterTweets, extractTwitterUsername } from '../../snowflake'; +import { fetchTwitterProfile, fetchTwitterTweets, fetchTwitterProfilesBulk, fetchTwitterTweetsBulk, extractTwitterUsername } from '../../snowflake'; import { addGenerateIntentsJob } from '../../queue/llm-queue'; import db from '../../db'; import { users } from '../../schema'; -import { eq, isNull, and } from 'drizzle-orm'; +import { eq, isNull, and, inArray } from 'drizzle-orm'; export interface TwitterSyncResult { intentsGenerated: number; @@ -12,7 +12,7 @@ export interface TwitterSyncResult { error?: string; } -export async function syncTwitterUser(userId: string): Promise { +export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | null): Promise { try { // Get user from database const userRecords = await db.select() @@ -37,7 +37,28 @@ export async function syncTwitterUser(userId: string): Promise ({ text: tweet.TEXT, @@ -105,3 +129,177 @@ export async function syncTwitterUser(userId: string): Promise, + sinceTimestamp?: Date +): Promise<{ usersProcessed: number; intentsGenerated: number; locationUpdated: number; errors: number }> { + const stats = { + usersProcessed: 0, + intentsGenerated: 0, + locationUpdated: 0, + errors: 0, + }; + + try { + // Extract usernames and create mapping + const usernameToUser = new Map(); + const usernames: string[] = []; + + for (const user of userBatch) { + const twitterUrl = user.socials?.x; + if (!twitterUrl) continue; + + const username = extractTwitterUsername(twitterUrl); + if (!username) continue; + + usernameToUser.set(username, user); + usernames.push(username); + } + + if (usernames.length === 0) { + return stats; + } + + log.info('Syncing Twitter users bulk', { + userCount: usernames.length, + syncSince: sinceTimestamp?.toISOString() + }); + + // Fetch all profiles in one query + const profilesMap = await fetchTwitterProfilesBulk(usernames); + + // Determine sync timestamp (4 hours ago or sinceTimestamp) + const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); + const syncSince = sinceTimestamp || fourHoursAgo; + + // Fetch all tweets in one query + // Use TWITTER_TWEETS_3_DAY for worker sync + const tweetsMap = await fetchTwitterTweetsBulk(usernames, syncSince, 100, true); + + // Process each user + const userIdsToUpdate: string[] = []; + const locationUpdates: Array<{ userId: string; location: string }> = []; + const intentJobs: Array<{ userId: string; tweetObjects: any[] }> = []; + + for (const username of usernames) { + const user = usernameToUser.get(username); + if (!user) continue; + + try { + const profile = profilesMap.get(username); + if (!profile) { + stats.errors++; + log.warn('Twitter profile not found', { userId: user.id, username }); + continue; + } + + log.info('Processing Twitter user', { + userId: user.id, + username, + profileId: profile.ID, + followers: profile.FOLLOWERS_COUNT, + tweetsCount: profile.TWEETS_COUNT + }); + + // Update location if needed + if (profile.LOCATION && !user.location) { + locationUpdates.push({ userId: user.id, location: profile.LOCATION }); + stats.locationUpdated++; + log.info('Queued location update', { userId: user.id, username, location: profile.LOCATION }); + } + + // Get tweets for this user + const tweets = tweetsMap.get(username) || []; + + if (tweets.length === 0) { + // Update last sync time even if no new tweets + userIdsToUpdate.push(user.id); + stats.usersProcessed++; + log.info('No new tweets found', { userId: user.id, username }); + continue; + } + + log.info('Found new tweets', { + userId: user.id, + username, + tweetCount: tweets.length + }); + + // Prepare tweet objects for intent generation + const tweetObjects = tweets.map(tweet => ({ + text: tweet.TEXT, + timestamp: tweet.TIMESTAMP, + likes: tweet.LIKES, + reposts: tweet.REPOSTS, + views: tweet.VIEWS, + })); + + intentJobs.push({ userId: user.id, tweetObjects }); + userIdsToUpdate.push(user.id); + stats.usersProcessed++; + log.info('Queued intent generation', { userId: user.id, username, tweetCount: tweets.length }); + } catch (error) { + stats.errors++; + log.error('Error processing user in bulk sync', { + userId: user.id, + username, + error: (error as Error).message + }); + } + } + + // Batch update locations + if (locationUpdates.length > 0) { + for (const { userId, location } of locationUpdates) { + try { + await db.update(users) + .set({ location, updatedAt: new Date() }) + .where(eq(users.id, userId)); + log.info('Updated user location', { userId, location }); + } catch (error) { + log.error('Failed to update location', { userId, error: (error as Error).message }); + } + } + } + + // Queue intent generation jobs + for (const { userId, tweetObjects } of intentJobs) { + try { + await addGenerateIntentsJob({ + userId, + sourceId: userId, + sourceType: 'integration', + objects: tweetObjects, + instruction: 'Generate intents from Twitter tweets', + }, 6); + stats.intentsGenerated++; + log.info('Queued intent generation job', { userId, tweetCount: tweetObjects.length }); + } catch (error) { + log.error('Failed to queue intent generation', { + userId, + error: (error as Error).message + }); + } + } + + + log.info('Twitter bulk sync complete', { + usersProcessed: stats.usersProcessed, + intentsGenerated: stats.intentsGenerated, + locationUpdated: stats.locationUpdated, + errors: stats.errors + }); + + return stats; + } catch (error) { + log.error('Twitter bulk sync error', { error: (error as Error).message }); + return stats; + } +} + diff --git a/protocol/src/lib/integrations/social-sync.ts b/protocol/src/lib/integrations/social-sync.ts index c5fb004d9..a2aacafba 100644 --- a/protocol/src/lib/integrations/social-sync.ts +++ b/protocol/src/lib/integrations/social-sync.ts @@ -1,13 +1,10 @@ import { log } from '../log'; -import { syncTwitterUser } from './providers/twitter'; +import { syncTwitterUser, syncTwitterUsersBulk } from './providers/twitter'; import { enrichUserProfile } from './providers/profile-enrich'; -import { generateUserIntro } from './intro-generator'; -import { generateIntro, GenerateIntroInput } from '../parallels'; -import { analyzeContent } from '../../agents/core/intent_inferrer'; -import { IntentService } from '../intent-service'; import db from '../db'; import { users } from '../schema'; import { isNotNull, isNull, and, eq } from 'drizzle-orm'; +import crypto from 'crypto'; export interface SocialSyncResult { twitter: { @@ -16,18 +13,12 @@ export interface SocialSyncResult { locationUpdated: number; errors: number; }; - linkedin: { + enrichment: { usersProcessed: number; intentsGenerated: number; locationUpdated: number; errors: number; }; - introGeneration: { - usersProcessed: number; - introUpdated: number; - locationUpdated: number; - errors: number; - }; } export async function syncAllTwitterUsers(): Promise { @@ -38,6 +29,9 @@ export async function syncAllTwitterUsers(): Promise user.socials?.x ); - log.info('Starting Twitter sync', { userCount: twitterUsers.length }); + log.info('Starting Twitter sync', { userCount: twitterUsers.length, batchSize: BATCH_SIZE }); - for (const user of twitterUsers) { + // Process users in batches + for (let i = 0; i < twitterUsers.length; i += BATCH_SIZE) { + const batch = twitterUsers.slice(i, i + BATCH_SIZE); + log.info(`Processing Twitter sync batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(twitterUsers.length / BATCH_SIZE)}`, { + batchStart: i + 1, + batchEnd: Math.min(i + BATCH_SIZE, twitterUsers.length), + totalUsers: twitterUsers.length, + }); + + // Process batch using bulk operations try { - const result = await syncTwitterUser(user.id); - stats.usersProcessed++; - - if (result.success) { - if (result.intentsGenerated > 0) stats.intentsGenerated++; - if (result.locationUpdated) stats.locationUpdated++; - } else { - stats.errors++; - log.warn('Twitter sync failed for user', { userId: user.id, error: result.error }); - } + const batchResult = await syncTwitterUsersBulk(batch, FOUR_HOURS_AGO); + stats.usersProcessed += batchResult.usersProcessed; + stats.intentsGenerated += batchResult.intentsGenerated; + stats.locationUpdated += batchResult.locationUpdated; + stats.errors += batchResult.errors; } catch (error) { - stats.errors++; - log.error('Twitter sync error for user', { userId: user.id, error: (error as Error).message }); + stats.errors += batch.length; + log.error('Twitter sync batch error', { + batchStart: i + 1, + batchSize: batch.length, + error: (error as Error).message + }); } } @@ -81,7 +83,7 @@ export async function syncAllTwitterUsers(): Promise { +export async function enrichAllUsers(): Promise { const stats = { usersProcessed: 0, intentsGenerated: 0, @@ -132,70 +134,17 @@ export async function enrichAllUsers(): Promise { } } -export async function generateIntrosForEligibleUsers(): Promise { - const stats = { - usersProcessed: 0, - introUpdated: 0, - locationUpdated: 0, - errors: 0, - }; - - try { - // Get all users with LinkedIn or Twitter but no intro - const eligibleUsers = await db.select() - .from(users) - .where( - and( - isNull(users.deletedAt), - isNotNull(users.socials) - ) - ); - - const usersNeedingIntro = eligibleUsers.filter( - user => !user.intro && (user.socials?.linkedin || user.socials?.x) - ); - - log.info('Starting intro generation', { userCount: usersNeedingIntro.length }); - - for (const user of usersNeedingIntro) { - try { - const result = await generateUserIntro(user.id); - stats.usersProcessed++; - - if (result.success) { - if (result.introUpdated) stats.introUpdated++; - if (result.locationUpdated) stats.locationUpdated++; - } else { - stats.errors++; - log.warn('Intro generation failed for user', { userId: user.id, error: result.error }); - } - } catch (error) { - stats.errors++; - log.error('Intro generation error for user', { userId: user.id, error: (error as Error).message }); - } - } - - log.info('Intro generation complete', stats); - return stats; - } catch (error) { - log.error('Intro generation batch error', { error: (error as Error).message }); - return stats; - } -} - export async function syncAllSocialMedia(): Promise { log.info('Starting full social media sync'); - const [twitter, linkedin, introGeneration] = await Promise.all([ + const [twitter, enrichment] = await Promise.all([ syncAllTwitterUsers(), enrichAllUsers(), - generateIntrosForEligibleUsers(), ]); return { twitter, - linkedin, - introGeneration, + enrichment, }; } @@ -204,15 +153,16 @@ export async function syncAllSocialMedia(): Promise { * Trigger social media sync when user updates their socials field * This runs asynchronously and doesn't block the API response */ -export async function triggerSocialSync(userId: string, socialType: 'twitter' | 'linkedin'): Promise { +export async function triggerSocialSync(userId: string, socialType: 'twitter' | 'enrichment'): Promise { // Run syncs asynchronously without blocking setImmediate(async () => { try { if (socialType === 'twitter') { - log.info('Triggering Twitter sync', { userId }); - await syncTwitterUser(userId); - } else if (socialType === 'linkedin') { - log.info('Triggering LinkedIn sync', { userId }); + log.info('Triggering Twitter sync (profile update)', { userId }); + // For profile update trigger, fetch all tweets (no timestamp filter) + await syncTwitterUser(userId, null); + } else if (socialType === 'enrichment') { + log.info('Triggering enrichment sync', { userId }); await enrichUserProfile(userId); // Includes intro generation and intent generation from biography } } catch (error) { @@ -221,9 +171,136 @@ export async function triggerSocialSync(userId: string, socialType: 'twitter' | }); } +/** + * Generate hash for name+email combination to track enrichment per parameter set + */ +function generateEnrichmentHash(name: string, email: string): string { + return crypto.createHash('sha256') + .update(`${name}:${email}`) + .digest('hex'); +} + +/** + * Check if user meets enrichment criteria and trigger enrichment + * - Don't enrich if user has customized their intro + * - Only enrich once per name+email combination + * This runs asynchronously and doesn't block the API response + */ +export async function checkAndTriggerEnrichment(userId: string): Promise { + // Run check asynchronously without blocking + setImmediate(async () => { + try { + // Fetch current user state from database + const userRecords = await db.select({ + name: users.name, + email: users.email, + intro: users.intro, + onboarding: users.onboarding, + }) + .from(users) + .where(and(eq(users.id, userId), isNull(users.deletedAt))) + .limit(1); + + if (userRecords.length === 0) { + log.warn('User not found for enrichment check', { userId }); + return; + } + + const user = userRecords[0]; + + // Don't enrich if user has customized their intro + if (user.intro) { + log.info('User has customized intro, skipping enrichment', { userId }); + return; + } + + // Check if enrichment condition is met: name exists AND email exists + if (!user.name || !user.email) { + log.info('Enrichment condition not met: missing name or email', { userId }); + return; + } + + // Generate hash for current name+email combination + const currentHash = generateEnrichmentHash(user.name, user.email); + + // Get existing enrichment hash from onboarding + const onboarding = (user.onboarding || {}) as any; + const existingHash = onboarding.enrichmentHash; + + // Only enrich if we haven't enriched for this name+email combination before + if (existingHash === currentHash) { + log.info('Enrichment already done for this name+email combination', { userId, hash: currentHash }); + return; + } + + // Update enrichment hash atomically + // Note: Multiple processes might update simultaneously, so we'll verify after update + await db.update(users) + .set({ + onboarding: { + ...onboarding, + enrichmentHash: currentHash, + }, + }) + .where(and(eq(users.id, userId), isNull(users.deletedAt))); + + // Re-fetch immediately to check if hash was successfully set + // This helps detect race conditions where another process might have set it first + const verifyRecords = await db.select({ + name: users.name, + email: users.email, + intro: users.intro, + onboarding: users.onboarding, + }) + .from(users) + .where(and(eq(users.id, userId), isNull(users.deletedAt))) + .limit(1); + + if (verifyRecords.length === 0) { + log.warn('User not found after hash update', { userId }); + return; + } + + const verifyUser = verifyRecords[0]; + const verifyOnboarding = (verifyUser.onboarding || {}) as any; + const verifyHash = verifyOnboarding.enrichmentHash; + + // Final checks before triggering enrichment: + // 1. Hash must match what we tried to set + // 2. Hash must have changed from what we saw initially (proves we were the one who set it) + // 3. User still doesn't have intro (hasn't customized) + // 4. Name and email still exist + // This prevents duplicate enrichment if another process already enriched + const hashWasUpdated = verifyHash === currentHash && existingHash !== currentHash; + + if (hashWasUpdated && !verifyUser.intro && verifyUser.name && verifyUser.email) { + log.info('Enrichment condition met, triggering enrichment', { userId, hash: currentHash }); + await triggerSocialSync(userId, 'enrichment'); + } else { + if (verifyHash !== currentHash) { + log.info('Enrichment hash was updated by another process, skipping enrichment', { + userId, + expectedHash: currentHash, + actualHash: verifyHash + }); + } else if (!hashWasUpdated && existingHash === currentHash) { + log.info('Enrichment hash was already set before update, skipping enrichment', { userId, hash: currentHash }); + } else if (verifyUser.intro) { + log.info('User customized intro during enrichment check, skipping enrichment', { userId }); + } else { + log.info('Enrichment condition no longer met, skipping enrichment', { userId }); + } + } + } catch (error) { + log.error('Enrichment check error', { userId, error: (error as Error).message }); + } + }); +} + /** * Check if socials field changed and trigger appropriate syncs - * Also generates intents from biography when socials are updated + * Twitter sync is triggered when Twitter changes + * Enrichment is checked when socials are updated (but triggered based on name/email/intro condition) */ export function checkAndTriggerSocialSync( userId: string, @@ -234,17 +311,14 @@ export function checkAndTriggerSocialSync( const oldTwitter = oldSocials?.x; const newTwitter = newSocials?.x; - const oldLinkedIn = oldSocials?.linkedin; - const newLinkedIn = newSocials?.linkedin; - // Check if Twitter changed + // Check if Twitter changed - only trigger Twitter sync on Twitter changes if (newTwitter && newTwitter !== oldTwitter) { triggerSocialSync(userId, 'twitter'); } - // Check if LinkedIn changed - if (newLinkedIn && newLinkedIn !== oldLinkedIn) { - triggerSocialSync(userId, 'linkedin'); - } + // Check enrichment eligibility when socials are updated + // (enrichment triggers based on name/email/intro condition, not social changes) + checkAndTriggerEnrichment(userId); } diff --git a/protocol/src/lib/schema.ts b/protocol/src/lib/schema.ts index d5ee90e8b..ba3c55b7a 100644 --- a/protocol/src/lib/schema.ts +++ b/protocol/src/lib/schema.ts @@ -16,6 +16,7 @@ export interface OnboardingState { currentStep?: 'profile' | 'connections' | 'create_index' | 'invite_members' | 'join_indexes'; indexId?: string; // Persisted index ID for flow 2 invitationCode?: string; // Store which invitation was used (reference only) + enrichmentHash?: string; // Hash of name+email combination to track enrichment per parameter set } // Social links type diff --git a/protocol/src/lib/snowflake.ts b/protocol/src/lib/snowflake.ts index c421437c0..2903e62ff 100644 --- a/protocol/src/lib/snowflake.ts +++ b/protocol/src/lib/snowflake.ts @@ -1,6 +1,43 @@ import snowflake from 'snowflake-sdk'; import { log } from './log'; +snowflake.configure({ logLevel: 'ERROR' }); + +// Suppress Snowflake SDK info logs by overriding console methods temporarily +const originalConsoleInfo = console.info; +const originalConsoleLog = console.log; +let snowflakeLoggingSuppressed = false; + +function suppressSnowflakeLogs() { + if (snowflakeLoggingSuppressed) return; + snowflakeLoggingSuppressed = true; + + console.info = (...args: any[]) => { + const message = args[0]?.toString() || ''; + // Only suppress Snowflake SDK connection logs + if (message.includes('Creating new connection object') || + message.includes('Creating Connection') || + message.includes('Connection[') || + message.includes('Trying to initialize Easy Logging') || + message.includes('No client config detected') || + message.includes('Easy Logging')) { + return; + } + originalConsoleInfo.apply(console, args); + }; + + console.log = (...args: any[]) => { + const message = args[0]?.toString() || ''; + if (message.includes('[level:"INFO"') && message.includes('snowflake')) { + return; + } + originalConsoleLog.apply(console, args); + }; +} + +// Suppress logs immediately +suppressSnowflakeLogs(); + const SNOWFLAKE_ACCOUNT = process.env.SNOWFLAKE_ACCOUNT || ''; const SNOWFLAKE_USERNAME = process.env.SNOWFLAKE_USERNAME || ''; const SNOWFLAKE_PASSWORD = process.env.SNOWFLAKE_PASSWORD || ''; @@ -145,10 +182,66 @@ export async function fetchTwitterProfile(username: string): Promise TwitterProfile + */ +export async function fetchTwitterProfilesBulk(usernames: string[]): Promise> { + const profileMap = new Map(); + + if (!SNOWFLAKE_ACCOUNT || !SNOWFLAKE_USERNAME || !SNOWFLAKE_PASSWORD || usernames.length === 0) { + return profileMap; + } + + let connection: SnowflakeConnection | null = null; + + try { + connection = await createConnection(); + + // Build IN clause with placeholders + const placeholders = usernames.map(() => '?').join(','); + const sqlText = ` + SELECT ID, NAME, DISPLAY_NAME, BIO, LOCATION, + FOLLOWING_COUNT, FOLLOWERS_COUNT, TWEETS_COUNT + FROM twitter_profiles + WHERE name IN (${placeholders}) + `; + + const rows = await executeQuery(connection, sqlText, usernames); + + // Map results by username (NAME field) + for (const profile of rows) { + profileMap.set(profile.NAME, profile); + } + + log.info('Fetched Twitter profiles bulk', { requested: usernames.length, found: rows.length }); + return profileMap; + } catch (error) { + log.error('Failed to fetch Twitter profiles bulk', { usernameCount: usernames.length, error: (error as Error).message }); + return profileMap; + } finally { + if (connection) { + connection.destroy((err: any) => { + if (err) log.error('Error destroying Snowflake connection', { error: err.message }); + }); + } + } +} + /** * Fetch recent tweets from Snowflake by user ID + * @param posterId Twitter user ID + * @param limit Maximum number of tweets to fetch + * @param sinceTimestamp Optional timestamp to filter tweets (only fetch tweets after this time) + * @param useWorkerTable If true, uses TWITTER_TWEETS_3_DAY table (for worker sync), otherwise uses TWITTER_TWEETS (for initial sync) */ -export async function fetchTwitterTweets(posterId: string, limit: number = 50): Promise { +export async function fetchTwitterTweets( + posterId: string, + limit: number = 50, + sinceTimestamp?: Date, + useWorkerTable: boolean = false +): Promise { if (!SNOWFLAKE_ACCOUNT || !SNOWFLAKE_USERNAME || !SNOWFLAKE_PASSWORD) { return []; } @@ -158,16 +251,26 @@ export async function fetchTwitterTweets(posterId: string, limit: number = 50): try { connection = await createConnection(); - // Query using actual schema columns - const sqlText = ` + // Choose table based on sync type + const tableName = useWorkerTable ? 'TWITTER_TWEETS_3_DAY' : 'TWITTER_TWEETS'; + + // Build query with optional timestamp filter + let sqlText = ` SELECT ID, POSTER_ID, TEXT, TIMESTAMP, LIKES, REPOSTS, VIEWS - FROM TWITTER_TWEETS + FROM ${tableName} WHERE POSTER_ID = ? - ORDER BY TIMESTAMP DESC - LIMIT ? `; + const params: any[] = [posterId]; + + if (sinceTimestamp) { + const unixTimestamp = Math.floor(sinceTimestamp.getTime() / 1000); + sqlText += ` AND TIMESTAMP >= ${unixTimestamp}`; + } - const rows = await executeQuery(connection, sqlText, [posterId, limit]); + sqlText += ` ORDER BY TIMESTAMP DESC LIMIT ?`; + params.push(limit); + + const rows = await executeQuery(connection, sqlText, params); return rows; } catch (error) { @@ -181,3 +284,107 @@ export async function fetchTwitterTweets(posterId: string, limit: number = 50): } } } + +/** + * Fetch recent tweets from Snowflake for multiple users (bulk) + * Uses a subquery with twitter_profiles to match by username + * @param usernames Array of Twitter usernames + * @param sinceTimestamp Optional timestamp to filter tweets (only fetch tweets after this time) + * @param limitPerUser Maximum number of tweets per user + * @param useWorkerTable If true, uses TWITTER_TWEETS_3_DAY table (for worker sync), otherwise uses TWITTER_TWEETS (for initial sync) + * @returns Map of username -> TwitterTweet[] + */ +export async function fetchTwitterTweetsBulk( + usernames: string[], + sinceTimestamp?: Date, + limitPerUser: number = 100, + useWorkerTable: boolean = false +): Promise> { + const tweetsMap = new Map(); + + if (!SNOWFLAKE_ACCOUNT || !SNOWFLAKE_USERNAME || !SNOWFLAKE_PASSWORD || usernames.length === 0) { + return tweetsMap; + } + + let connection: SnowflakeConnection | null = null; + + try { + connection = await createConnection(); + + // Choose table based on sync type + const tableName = useWorkerTable ? 'TWITTER_TWEETS_3_DAY' : 'TWITTER_TWEETS'; + + // Build query using subquery to match usernames to poster IDs + // Use ROW_NUMBER() to limit tweets per user + const placeholders = usernames.map(() => '?').join(','); + let sqlText = ` + WITH ranked_tweets AS ( + SELECT + t.ID, + t.POSTER_ID, + t.TEXT, + t.TIMESTAMP, + t.LIKES, + t.REPOSTS, + t.VIEWS, + p.NAME as USERNAME, + ROW_NUMBER() OVER (PARTITION BY t.POSTER_ID ORDER BY t.TIMESTAMP DESC) as rn + FROM ${tableName} t + INNER JOIN twitter_profiles p ON t.POSTER_ID = p.ID + WHERE p.NAME IN (${placeholders}) + `; + const params: any[] = [...usernames]; + + if (sinceTimestamp) { + const unixTimestamp = Math.floor(sinceTimestamp.getTime() / 1000); + sqlText += ` AND t.TIMESTAMP >= ${unixTimestamp}`; + } + + sqlText += ` + ) + SELECT ID, POSTER_ID, TEXT, TIMESTAMP, LIKES, REPOSTS, VIEWS, USERNAME + FROM ranked_tweets + WHERE rn <= ? + ORDER BY USERNAME, TIMESTAMP DESC + `; + params.push(limitPerUser); + + const rows = await executeQuery(connection, sqlText, params); + + // Group tweets by username + for (const row of rows) { + const username = row.USERNAME; + if (!tweetsMap.has(username)) { + tweetsMap.set(username, []); + } + tweetsMap.get(username)!.push({ + ID: row.ID, + POSTER_ID: row.POSTER_ID, + TEXT: row.TEXT, + TIMESTAMP: row.TIMESTAMP, + LIKES: row.LIKES, + REPOSTS: row.REPOSTS, + VIEWS: row.VIEWS, + }); + } + + log.info('Fetched Twitter tweets bulk', { + requestedUsers: usernames.length, + usersWithTweets: tweetsMap.size, + totalTweets: rows.length + }); + return tweetsMap; + } catch (error) { + log.error('Failed to fetch Twitter tweets bulk', { + usernameCount: usernames.length, + error: (error as Error).message + }); + return tweetsMap; + } finally { + if (connection) { + connection.destroy((err: any) => { + if (err) log.error('Error destroying Snowflake connection', { error: err.message }); + }); + } + } +} diff --git a/protocol/src/routes/auth.ts b/protocol/src/routes/auth.ts index ecf36ff05..c92a19e00 100644 --- a/protocol/src/routes/auth.ts +++ b/protocol/src/routes/auth.ts @@ -5,7 +5,7 @@ import db from '../lib/db'; import { users } from '../lib/schema'; import { eq, isNull } from 'drizzle-orm'; import { User, UpdateProfileRequest, OnboardingState } from '../types'; -import { checkAndTriggerSocialSync } from '../lib/integrations/social-sync'; +import { checkAndTriggerSocialSync, checkAndTriggerEnrichment } from '../lib/integrations/social-sync'; const router = Router(); @@ -82,6 +82,11 @@ router.patch('/profile', authenticatePrivy, async (req: AuthRequest, res: Respon checkAndTriggerSocialSync(req.user!.id, oldSocials, socials); } + // Check enrichment eligibility if name or intro fields were updated + if (name !== undefined || intro !== undefined) { + checkAndTriggerEnrichment(req.user!.id); + } + return res.json({ user: updatedUser[0] }); } catch (error) { console.error('Update profile error:', error); diff --git a/protocol/src/routes/users.ts b/protocol/src/routes/users.ts index 0835be791..01f6c0198 100644 --- a/protocol/src/routes/users.ts +++ b/protocol/src/routes/users.ts @@ -5,7 +5,7 @@ import { users } from '../lib/schema'; import { authenticatePrivy, AuthRequest } from '../middleware/auth'; import { eq, isNull, ilike, or, and, count, desc } from 'drizzle-orm'; import { User, UpdateProfileRequest } from '../types'; -import { checkAndTriggerSocialSync } from '../lib/integrations/social-sync'; +import { checkAndTriggerSocialSync, checkAndTriggerEnrichment } from '../lib/integrations/social-sync'; const router = Router(); @@ -108,6 +108,11 @@ router.put('/:id', checkAndTriggerSocialSync(id, oldSocials, socials); } + // Check enrichment eligibility if name or intro fields were updated + if (name !== undefined || intro !== undefined) { + checkAndTriggerEnrichment(id); + } + return res.json({ message: 'User updated successfully', user: updatedUser[0] diff --git a/protocol/src/types/users.ts b/protocol/src/types/users.ts index c92e6f48a..7560cb90e 100644 --- a/protocol/src/types/users.ts +++ b/protocol/src/types/users.ts @@ -13,6 +13,7 @@ export interface OnboardingState { currentStep?: 'profile' | 'connections' | 'create_index' | 'invite_members' | 'join_indexes'; indexId?: UUID | null; invitationCode?: string; + enrichmentHash?: string; // Hash of name+email combination to track enrichment per parameter set } export interface User { From d803517df875a2d63634ce8212cae337d5e0521e Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 17:35:01 -0500 Subject: [PATCH 003/327] Refactor Twitter sync to use integration records Twitter sync logic now uses userIntegrations records for incremental syncing, lastSyncAt tracking, and username extraction. Added migration and integration management utilities to support transition from socials.x to integration records. Updated worker and bulk sync logic to use integration's lastSyncAt for incremental syncs, and refactored schema to include TwitterConfig. --- protocol/env.example | 2 +- protocol/src/cli/social-worker.ts | 15 +- .../src/lib/integrations/providers/twitter.ts | 232 ++++++++++++++---- protocol/src/lib/integrations/social-sync.ts | 209 ++++++++++++++-- protocol/src/lib/schema.ts | 6 + 5 files changed, 389 insertions(+), 75 deletions(-) diff --git a/protocol/env.example b/protocol/env.example index fd2e4bdf4..639ead215 100644 --- a/protocol/env.example +++ b/protocol/env.example @@ -66,7 +66,7 @@ SNOWFLAKE_DATABASE=DATA_COLLECTOR_ICEBERG SNOWFLAKE_SCHEMA=PUBLIC # Social media sync intervals (in milliseconds) -# TWITTER_SYNC_DELAY_MS=14400000 # default: 4 hours (checks last 4 hours of tweets) +# TWITTER_SYNC_DELAY_MS=3600000 # default: 1 hour (uses lastSyncAt for incremental syncs) # ENRICHMENT_SYNC_DELAY_MS=3600000 # default: 1 hour (not used - enrichment is on-demand) # Proxy configuration for web crawling diff --git a/protocol/src/cli/social-worker.ts b/protocol/src/cli/social-worker.ts index ed5d45909..f54a3dbd6 100644 --- a/protocol/src/cli/social-worker.ts +++ b/protocol/src/cli/social-worker.ts @@ -24,7 +24,7 @@ type Opts = { let isShuttingDown = false; -const TWITTER_SYNC_DELAY_MS = parseInt(process.env.TWITTER_SYNC_DELAY_MS || '14400000'); // 4 hours default +const TWITTER_SYNC_DELAY_MS = parseInt(process.env.TWITTER_SYNC_DELAY_MS || '3600000'); // 1 hour default const ENRICHMENT_SYNC_DELAY_MS = parseInt(process.env.ENRICHMENT_SYNC_DELAY_MS || '3600000'); // 1 hour default async function main(): Promise { @@ -98,7 +98,7 @@ async function main(): Promise { program.addHelpText( 'after', '\nExamples:\n' + - ' # Continuous workers (every 4 hours for Twitter):\n' + + ' # Continuous workers (every 1 hour for Twitter):\n' + ' yarn social-worker --type twitter\n' + ' yarn social-worker --type enrichment\n' + ' yarn social-worker --type all --silent\n' + @@ -123,8 +123,8 @@ async function main(): Promise { async function syncSingleTwitterUser(userId: string): Promise { try { log.info('Syncing single Twitter user', { userId }); - const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); - const result = await syncTwitterUser(userId, fourHoursAgo); + // Pass undefined to use integration's lastSyncAt (worker mode behavior) + const result = await syncTwitterUser(userId, undefined); if (result.success) { log.info('Twitter sync successful', { userId, intentsGenerated: result.intentsGenerated, locationUpdated: result.locationUpdated }); } else { @@ -156,9 +156,9 @@ async function syncSingleEnrichmentUser(userId: string): Promise { async function syncSingleUserAll(userId: string): Promise { try { log.info('Syncing all social media for single user', { userId }); - const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); + // Pass undefined to use integration's lastSyncAt (worker mode behavior) const [twitterResult, enrichmentResult] = await Promise.all([ - syncTwitterUser(userId, fourHoursAgo).catch(err => ({ success: false, error: err instanceof Error ? err.message : String(err) })), + syncTwitterUser(userId, undefined).catch(err => ({ success: false, error: err instanceof Error ? err.message : String(err) })), enrichUserProfile(userId).catch(err => ({ success: false, error: err instanceof Error ? err.message : String(err) })), ]); @@ -180,7 +180,8 @@ async function runTwitterWorker(): Promise { await syncAllTwitterUsers(); if (!isShuttingDown) { - log.info(`Twitter cycle complete, next sync in ${TWITTER_SYNC_DELAY_MS / 1000 / 60} minutes`); + const minutes = Math.floor(TWITTER_SYNC_DELAY_MS / 1000 / 60); + log.info(`Twitter cycle complete, next sync in ${minutes} minute${minutes !== 1 ? 's' : ''}`); await sleep(TWITTER_SYNC_DELAY_MS); } } catch (error) { diff --git a/protocol/src/lib/integrations/providers/twitter.ts b/protocol/src/lib/integrations/providers/twitter.ts index 2b416a99d..1cf41ebd2 100644 --- a/protocol/src/lib/integrations/providers/twitter.ts +++ b/protocol/src/lib/integrations/providers/twitter.ts @@ -2,7 +2,7 @@ import { log } from '../../log'; import { fetchTwitterProfile, fetchTwitterTweets, fetchTwitterProfilesBulk, fetchTwitterTweetsBulk, extractTwitterUsername } from '../../snowflake'; import { addGenerateIntentsJob } from '../../queue/llm-queue'; import db from '../../db'; -import { users } from '../../schema'; +import { users, userIntegrations } from '../../schema'; import { eq, isNull, and, inArray } from 'drizzle-orm'; export interface TwitterSyncResult { @@ -12,8 +12,73 @@ export interface TwitterSyncResult { error?: string; } -export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | null): Promise { +export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | null, integrationId?: string): Promise { try { + let integration: typeof userIntegrations.$inferSelect | null = null; + let username: string | null = null; + + // Try to get integration record if integrationId provided, otherwise fetch by userId + if (integrationId) { + const integrationRecords = await db.select() + .from(userIntegrations) + .where( + and( + eq(userIntegrations.id, integrationId), + eq(userIntegrations.userId, userId), + eq(userIntegrations.integrationType, 'twitter'), + isNull(userIntegrations.deletedAt) + ) + ) + .limit(1); + + if (integrationRecords.length > 0) { + integration = integrationRecords[0]; + username = (integration.config as any)?.twitter?.username || null; + } + } else { + // Fallback: fetch integration by userId + const integrationRecords = await db.select() + .from(userIntegrations) + .where( + and( + eq(userIntegrations.userId, userId), + eq(userIntegrations.integrationType, 'twitter'), + isNull(userIntegrations.indexId), + isNull(userIntegrations.deletedAt) + ) + ) + .limit(1); + + if (integrationRecords.length > 0) { + integration = integrationRecords[0]; + username = (integration.config as any)?.twitter?.username || null; + } + } + + // Backward compatibility: if no integration found, check socials.x + if (!username) { + const userRecords = await db.select() + .from(users) + .where(and(eq(users.id, userId), isNull(users.deletedAt))) + .limit(1); + + if (userRecords.length === 0) { + return { intentsGenerated: 0, locationUpdated: false, success: false, error: 'User not found' }; + } + + const user = userRecords[0]; + const twitterUrl = user.socials?.x; + + if (!twitterUrl) { + return { intentsGenerated: 0, locationUpdated: false, success: false, error: 'No Twitter URL found' }; + } + + username = extractTwitterUsername(twitterUrl); + if (!username) { + return { intentsGenerated: 0, locationUpdated: false, success: false, error: 'Invalid Twitter username format' }; + } + } + // Get user from database const userRecords = await db.select() .from(users) @@ -25,22 +90,11 @@ export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | nu } const user = userRecords[0]; - const twitterUrl = user.socials?.x; - - if (!twitterUrl) { - return { intentsGenerated: 0, locationUpdated: false, success: false, error: 'No Twitter URL found' }; - } - - // Extract username from URL or handle - const username = extractTwitterUsername(twitterUrl); - if (!username) { - return { intentsGenerated: 0, locationUpdated: false, success: false, error: 'Invalid Twitter username format' }; - } // Determine sync behavior: // - If sinceTimestamp is null (profile update): fetch all tweets (no timestamp filter) - // - If sinceTimestamp is provided: use it (worker sync with 4-hour lookback) - // - Otherwise: default to 4-hour lookback + // - If sinceTimestamp is provided: use it (explicit timestamp) + // - Otherwise: use integration's lastSyncAt if available, or undefined (fetch all for first sync) let syncSince: Date | undefined; let fetchAllTweets = false; @@ -50,14 +104,20 @@ export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | nu syncSince = undefined; log.info('Syncing Twitter user (profile update, fetching all tweets)', { userId, username }); } else if (sinceTimestamp) { - // Explicit timestamp provided (worker sync) + // Explicit timestamp provided syncSince = sinceTimestamp; log.info('Syncing Twitter user', { userId, username, syncSince: syncSince.toISOString() }); } else { - // Default: 4-hour lookback (worker sync) - const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); - syncSince = fourHoursAgo; - log.info('Syncing Twitter user (default)', { userId, username, syncSince: syncSince.toISOString() }); + // Worker mode: always use integration's lastSyncAt if available + if (integration?.lastSyncAt) { + syncSince = new Date(integration.lastSyncAt); + log.info('Syncing Twitter user (using lastSyncAt)', { userId, username, syncSince: syncSince.toISOString() }); + } else { + // First sync: fetch all tweets + fetchAllTweets = true; + syncSince = undefined; + log.info('Syncing Twitter user (first sync, fetching all tweets)', { userId, username }); + } } // Fetch profile from Snowflake @@ -111,6 +171,13 @@ export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | nu instruction: 'Generate intents from Twitter tweets', }, 6); + // Update integration's lastSyncAt + if (integration) { + await db.update(userIntegrations) + .set({ lastSyncAt: new Date() }) + .where(eq(userIntegrations.id, integration.id)); + } + log.info('Twitter sync complete', { userId, username, tweetsProcessed: tweets.length, locationUpdated }); return { @@ -132,11 +199,11 @@ export async function syncTwitterUser(userId: string, sinceTimestamp?: Date | nu /** * Sync multiple Twitter users in bulk (optimized for batch processing) * Fetches profiles and tweets for all users in one query each - * @param userBatch Array of user records from database (must have id, socials, location, onboarding fields) - * @param sinceTimestamp Optional timestamp to filter tweets (defaults to 4 hours ago) + * @param integrationBatch Array of integration records with joined user records (from syncAllTwitterUsers) + * @param sinceTimestamp Optional explicit timestamp (if provided, overrides lastSyncAt) */ export async function syncTwitterUsersBulk( - userBatch: Array, + integrationBatch: Array<{ integration: typeof userIntegrations.$inferSelect; user: typeof users.$inferSelect }>, sinceTimestamp?: Date ): Promise<{ usersProcessed: number; intentsGenerated: number; locationUpdated: number; errors: number }> { const stats = { @@ -148,17 +215,37 @@ export async function syncTwitterUsersBulk( try { // Extract usernames and create mapping - const usernameToUser = new Map(); + const usernameToData = new Map(); const usernames: string[] = []; - for (const user of userBatch) { - const twitterUrl = user.socials?.x; - if (!twitterUrl) continue; + for (const { integration, user } of integrationBatch) { + const config = integration.config as any; + const username = config?.twitter?.username; + + if (!username) { + stats.errors++; + log.warn('Twitter integration missing username in config', { userId: user.id, integrationId: integration.id }); + continue; + } - const username = extractTwitterUsername(twitterUrl); - if (!username) continue; + // Worker mode: always use integration's lastSyncAt if available + // If explicit sinceTimestamp provided, use that instead + let effectiveSinceTimestamp: Date | undefined; + let fetchAll = false; + + if (sinceTimestamp) { + // Explicit timestamp provided (overrides lastSyncAt) + effectiveSinceTimestamp = sinceTimestamp; + } else if (integration.lastSyncAt) { + // Use integration's lastSyncAt (incremental sync) + effectiveSinceTimestamp = new Date(integration.lastSyncAt); + } else { + // First sync: fetch all tweets + fetchAll = true; + effectiveSinceTimestamp = undefined; + } - usernameToUser.set(username, user); + usernameToData.set(username, { user, integration, lastSyncAt: effectiveSinceTimestamp, fetchAll }); usernames.push(username); } @@ -166,30 +253,71 @@ export async function syncTwitterUsersBulk( return stats; } + // Group integrations by sync timestamp for efficient bulk fetching + const syncTimestampGroups = new Map(); + const fetchAllUsernames: string[] = []; + + for (const username of usernames) { + const data = usernameToData.get(username); + if (!data) continue; + + if (data.fetchAll) { + // First sync: fetch all tweets + fetchAllUsernames.push(username); + } else if (data.lastSyncAt) { + // Incremental sync: group by timestamp + const syncKey = data.lastSyncAt.toISOString(); + if (!syncTimestampGroups.has(syncKey)) { + syncTimestampGroups.set(syncKey, []); + } + syncTimestampGroups.get(syncKey)!.push(username); + } + } + log.info('Syncing Twitter users bulk', { - userCount: usernames.length, - syncSince: sinceTimestamp?.toISOString() + userCount: usernames.length, + incrementalSyncGroups: syncTimestampGroups.size, + firstSyncCount: fetchAllUsernames.length }); // Fetch all profiles in one query const profilesMap = await fetchTwitterProfilesBulk(usernames); - // Determine sync timestamp (4 hours ago or sinceTimestamp) - const fourHoursAgo = new Date(Date.now() - 4 * 60 * 60 * 1000); - const syncSince = sinceTimestamp || fourHoursAgo; - - // Fetch all tweets in one query - // Use TWITTER_TWEETS_3_DAY for worker sync - const tweetsMap = await fetchTwitterTweetsBulk(usernames, syncSince, 100, true); + // Fetch tweets for incremental sync groups (by timestamp) + const tweetsMap = new Map(); + for (const [syncKey, groupUsernames] of syncTimestampGroups.entries()) { + const sampleData = usernameToData.get(groupUsernames[0]); + const syncSince = sampleData?.lastSyncAt; + + if (syncSince) { + // Use TWITTER_TWEETS_3_DAY for worker sync + const groupTweetsMap = await fetchTwitterTweetsBulk(groupUsernames, syncSince, 100, true); + + // Merge into main tweets map + for (const [username, tweets] of groupTweetsMap.entries()) { + tweetsMap.set(username, tweets); + } + } + } + + // Fetch all tweets for first-time syncs (no timestamp filter) + if (fetchAllUsernames.length > 0) { + const fetchAllTweetsMap = await fetchTwitterTweetsBulk(fetchAllUsernames, undefined, 100, true); + for (const [username, tweets] of fetchAllTweetsMap.entries()) { + tweetsMap.set(username, tweets); + } + } // Process each user - const userIdsToUpdate: string[] = []; + const integrationsToUpdate: Array<{ integrationId: string }> = []; const locationUpdates: Array<{ userId: string; location: string }> = []; const intentJobs: Array<{ userId: string; tweetObjects: any[] }> = []; for (const username of usernames) { - const user = usernameToUser.get(username); - if (!user) continue; + const data = usernameToData.get(username); + if (!data) continue; + + const { user, integration } = data; try { const profile = profilesMap.get(username); @@ -219,7 +347,7 @@ export async function syncTwitterUsersBulk( if (tweets.length === 0) { // Update last sync time even if no new tweets - userIdsToUpdate.push(user.id); + integrationsToUpdate.push({ integrationId: integration.id }); stats.usersProcessed++; log.info('No new tweets found', { userId: user.id, username }); continue; @@ -241,7 +369,7 @@ export async function syncTwitterUsersBulk( })); intentJobs.push({ userId: user.id, tweetObjects }); - userIdsToUpdate.push(user.id); + integrationsToUpdate.push({ integrationId: integration.id }); stats.usersProcessed++; log.info('Queued intent generation', { userId: user.id, username, tweetCount: tweets.length }); } catch (error) { @@ -288,6 +416,22 @@ export async function syncTwitterUsersBulk( } } + // Batch update integration lastSyncAt + if (integrationsToUpdate.length > 0) { + const now = new Date(); + for (const { integrationId } of integrationsToUpdate) { + try { + await db.update(userIntegrations) + .set({ lastSyncAt: now }) + .where(eq(userIntegrations.id, integrationId)); + } catch (error) { + log.error('Failed to update integration lastSyncAt', { + integrationId, + error: (error as Error).message + }); + } + } + } log.info('Twitter bulk sync complete', { usersProcessed: stats.usersProcessed, diff --git a/protocol/src/lib/integrations/social-sync.ts b/protocol/src/lib/integrations/social-sync.ts index a2aacafba..0eaadd743 100644 --- a/protocol/src/lib/integrations/social-sync.ts +++ b/protocol/src/lib/integrations/social-sync.ts @@ -2,9 +2,11 @@ import { log } from '../log'; import { syncTwitterUser, syncTwitterUsersBulk } from './providers/twitter'; import { enrichUserProfile } from './providers/profile-enrich'; import db from '../db'; -import { users } from '../schema'; +import { users, userIntegrations } from '../schema'; import { isNotNull, isNull, and, eq } from 'drizzle-orm'; import crypto from 'crypto'; +import { extractTwitterUsername } from '../snowflake'; +import { IntegrationConfigType } from '../schema'; export interface SocialSyncResult { twitter: { @@ -30,37 +32,40 @@ export async function syncAllTwitterUsers(): Promise user.socials?.x - ); + log.info('Starting Twitter sync', { userCount: twitterIntegrations.length, batchSize: BATCH_SIZE }); - log.info('Starting Twitter sync', { userCount: twitterUsers.length, batchSize: BATCH_SIZE }); - - // Process users in batches - for (let i = 0; i < twitterUsers.length; i += BATCH_SIZE) { - const batch = twitterUsers.slice(i, i + BATCH_SIZE); - log.info(`Processing Twitter sync batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(twitterUsers.length / BATCH_SIZE)}`, { + // Process integrations in batches + for (let i = 0; i < twitterIntegrations.length; i += BATCH_SIZE) { + const batch = twitterIntegrations.slice(i, i + BATCH_SIZE); + log.info(`Processing Twitter sync batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(twitterIntegrations.length / BATCH_SIZE)}`, { batchStart: i + 1, - batchEnd: Math.min(i + BATCH_SIZE, twitterUsers.length), - totalUsers: twitterUsers.length, + batchEnd: Math.min(i + BATCH_SIZE, twitterIntegrations.length), + totalUsers: twitterIntegrations.length, }); // Process batch using bulk operations + // Pass undefined to use each integration's lastSyncAt try { - const batchResult = await syncTwitterUsersBulk(batch, FOUR_HOURS_AGO); + const batchResult = await syncTwitterUsersBulk(batch, undefined); stats.usersProcessed += batchResult.usersProcessed; stats.intentsGenerated += batchResult.intentsGenerated; stats.locationUpdated += batchResult.locationUpdated; @@ -180,6 +185,70 @@ function generateEnrichmentHash(name: string, email: string): string { .digest('hex'); } +/** + * Ensure Twitter integration record exists for a user + * Creates or updates integration record when user adds/updates Twitter URL + */ +async function ensureTwitterIntegration(userId: string, twitterUrl: string): Promise { + try { + const username = extractTwitterUsername(twitterUrl); + if (!username) { + log.warn('Invalid Twitter URL format', { userId, twitterUrl }); + return null; + } + + // Check if integration record already exists + const existing = await db.select() + .from(userIntegrations) + .where( + and( + eq(userIntegrations.userId, userId), + eq(userIntegrations.integrationType, 'twitter'), + isNull(userIntegrations.indexId), // Twitter is user-level, not index-level + isNull(userIntegrations.deletedAt) + ) + ) + .limit(1); + + const config: IntegrationConfigType = { + twitter: { username } + }; + + if (existing.length > 0) { + // Update existing integration + const [updated] = await db.update(userIntegrations) + .set({ + config, + status: 'connected', + updatedAt: new Date(), + deletedAt: null, // Restore if it was soft-deleted + }) + .where(eq(userIntegrations.id, existing[0].id)) + .returning(); + + log.info('Updated Twitter integration', { userId, integrationId: updated.id, username }); + return updated; + } else { + // Create new integration + const [created] = await db.insert(userIntegrations) + .values({ + userId, + integrationType: 'twitter', + status: 'connected', + config, + connectedAt: new Date(), + }) + .returning(); + + log.info('Created Twitter integration', { userId, integrationId: created.id, username }); + return created; + } + } catch (error) { + log.error('Failed to ensure Twitter integration', { userId, error: (error as Error).message }); + return null; + } +} + /** * Check if user meets enrichment criteria and trigger enrichment * - Don't enrich if user has customized their intro @@ -312,13 +381,107 @@ export function checkAndTriggerSocialSync( const oldTwitter = oldSocials?.x; const newTwitter = newSocials?.x; - // Check if Twitter changed - only trigger Twitter sync on Twitter changes - if (newTwitter && newTwitter !== oldTwitter) { - triggerSocialSync(userId, 'twitter'); - } + // Handle Twitter integration record creation/update/deletion + setImmediate(async () => { + try { + if (newTwitter && newTwitter !== oldTwitter) { + // Twitter URL added or changed - create/update integration record + await ensureTwitterIntegration(userId, newTwitter); + triggerSocialSync(userId, 'twitter'); + } else if (!newTwitter && oldTwitter) { + // Twitter URL removed - soft-delete integration record + await db.update(userIntegrations) + .set({ deletedAt: new Date() }) + .where( + and( + eq(userIntegrations.userId, userId), + eq(userIntegrations.integrationType, 'twitter'), + isNull(userIntegrations.indexId), + isNull(userIntegrations.deletedAt) + ) + ); + log.info('Soft-deleted Twitter integration', { userId }); + } + } catch (error) { + log.error('Error managing Twitter integration', { userId, error: (error as Error).message }); + } + }); // Check enrichment eligibility when socials are updated // (enrichment triggers based on name/email/intro condition, not social changes) checkAndTriggerEnrichment(userId); } +/** + * Migrate existing users with Twitter URLs to create integration records + * This should be run once to migrate existing data + */ +export async function migrateTwitterUsersToIntegrations(): Promise<{ migrated: number; errors: number }> { + let migrated = 0; + let errors = 0; + + try { + log.info('Starting Twitter users migration to integrations'); + + // Get all users with Twitter URL + const allUsersWithTwitter = await db.select({ + id: users.id, + socials: users.socials, + }) + .from(users) + .where( + and( + isNull(users.deletedAt), + isNotNull(users.socials) + ) + ); + + // Get all existing Twitter integrations + const existingIntegrations = await db.select({ + userId: userIntegrations.userId, + }) + .from(userIntegrations) + .where( + and( + eq(userIntegrations.integrationType, 'twitter'), + isNull(userIntegrations.indexId), + isNull(userIntegrations.deletedAt) + ) + ); + + const existingUserIds = new Set(existingIntegrations.map(i => i.userId)); + + // Filter users who have Twitter URL but no integration + const usersToMigrate = allUsersWithTwitter.filter( + (row) => row.socials && (row.socials as any).x && !existingUserIds.has(row.id) + ); + + log.info('Found users to migrate', { count: usersToMigrate.length }); + + for (const row of usersToMigrate) { + try { + const twitterUrl = row.socials?.x; + if (!twitterUrl) continue; + const integration = await ensureTwitterIntegration(row.id, twitterUrl); + if (integration) { + migrated++; + if (migrated % 100 === 0) { + log.info('Migration progress', { migrated, total: usersToMigrate.length }); + } + } else { + errors++; + } + } catch (error) { + errors++; + log.error('Migration error for user', { userId: row.id, error: (error as Error).message }); + } + } + + log.info('Twitter users migration complete', { migrated, errors, total: usersToMigrate.length }); + return { migrated, errors }; + } catch (error) { + log.error('Twitter users migration failed', { error: (error as Error).message }); + return { migrated, errors }; + } +} + diff --git a/protocol/src/lib/schema.ts b/protocol/src/lib/schema.ts index ba3c55b7a..2ea5650b5 100644 --- a/protocol/src/lib/schema.ts +++ b/protocol/src/lib/schema.ts @@ -58,10 +58,16 @@ export interface SlackConfig { selectedChannels?: string[]; // Array of channel IDs to sync } +// Twitter-specific configuration +export interface TwitterConfig { + username: string; // Twitter username extracted from URL +} + // Integration configuration type export interface IntegrationConfigType { directorySync?: DirectorySyncConfig; slack?: SlackConfig; + twitter?: TwitterConfig; } // Tables From 7531828658b5e8b1950a81ae4d4a2f040480ce45 Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 18:08:57 -0500 Subject: [PATCH 004/327] Remove tsconfig.json from types directory Deleted the tsconfig.json file from protocol/src/types, possibly as part of a cleanup or restructuring of TypeScript configuration. --- frontend/Dockerfile | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 3429fa6e6..1fb969616 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -2,6 +2,9 @@ FROM node:24.1.0-alpine AS builder WORKDIR /app +# Enable corepack for Yarn +RUN corepack enable + # Accept build arguments ARG NEXT_PUBLIC_PRIVY_APP_ID ARG NEXT_PUBLIC_PRIVY_CLIENT_ID @@ -16,10 +19,10 @@ ENV NEXT_PUBLIC_API_URL=$NEXT_PUBLIC_API_URL ENV NEXT_PUBLIC_STATIC_URL=$NEXT_PUBLIC_STATIC_URL ENV NEXT_PUBLIC_GLOBAL_INDEX_ID=$NEXT_PUBLIC_GLOBAL_INDEX_ID -COPY package*.json ./ -RUN npm install +COPY package.json yarn.lock ./ +RUN yarn install --frozen-lockfile COPY . . -RUN npm run build +RUN yarn build # Production stage FROM node:24.1.0-alpine AS runner From c264fee1ac5ee3685a6aa0b1e130b0c3a022a11d Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 18:16:33 -0500 Subject: [PATCH 005/327] Update frontend Docker build context and file paths The GitHub Actions workflow now specifies the Dockerfile location and build context explicitly. The Dockerfile updates COPY instructions to use the correct frontend subdirectory paths and adds protocol/src/types to the build context for type sharing. --- .github/workflows/build-frontend.yml | 2 +- frontend/Dockerfile | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-frontend.yml b/.github/workflows/build-frontend.yml index 0a1ee2915..5556a5336 100644 --- a/.github/workflows/build-frontend.yml +++ b/.github/workflows/build-frontend.yml @@ -59,7 +59,7 @@ jobs: DOCKER_TAG: indexnetwork/website:${{ steps.build-time.outputs.time }} DOCKER_REGISTRY: 236785930124.dkr.ecr.us-east-1.amazonaws.com run: | - docker build --build-arg NEXT_PUBLIC_PRIVY_APP_ID=${{ secrets.NEXT_PUBLIC_PRIVY_APP_ID }} --build-arg NEXT_PUBLIC_PRIVY_CLIENT_ID=${{ secrets.NEXT_PUBLIC_PRIVY_CLIENT_ID }} --build-arg NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} --build-arg NEXT_PUBLIC_STATIC_URL=${{ secrets.NEXT_PUBLIC_STATIC_URL }} --build-arg NEXT_PUBLIC_GLOBAL_INDEX_ID=${{ secrets.NEXT_PUBLIC_GLOBAL_INDEX_ID }} -t $DOCKER_TAG ./frontend + docker build --build-arg NEXT_PUBLIC_PRIVY_APP_ID=${{ secrets.NEXT_PUBLIC_PRIVY_APP_ID }} --build-arg NEXT_PUBLIC_PRIVY_CLIENT_ID=${{ secrets.NEXT_PUBLIC_PRIVY_CLIENT_ID }} --build-arg NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} --build-arg NEXT_PUBLIC_STATIC_URL=${{ secrets.NEXT_PUBLIC_STATIC_URL }} --build-arg NEXT_PUBLIC_GLOBAL_INDEX_ID=${{ secrets.NEXT_PUBLIC_GLOBAL_INDEX_ID }} -f frontend/Dockerfile -t $DOCKER_TAG . docker tag $DOCKER_TAG $DOCKER_REGISTRY/$DOCKER_TAG docker push $DOCKER_REGISTRY/$DOCKER_TAG docker tag $DOCKER_TAG $DOCKER_REGISTRY/indexnetwork/website:latest-${GITHUB_REF#refs/heads/} diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 1fb969616..d0b7a89eb 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -19,9 +19,10 @@ ENV NEXT_PUBLIC_API_URL=$NEXT_PUBLIC_API_URL ENV NEXT_PUBLIC_STATIC_URL=$NEXT_PUBLIC_STATIC_URL ENV NEXT_PUBLIC_GLOBAL_INDEX_ID=$NEXT_PUBLIC_GLOBAL_INDEX_ID -COPY package.json yarn.lock ./ +COPY frontend/package.json frontend/yarn.lock ./ RUN yarn install --frozen-lockfile -COPY . . +COPY frontend/ ./ +COPY protocol/src/types ./src/types RUN yarn build # Production stage From d981b4116c361bfdcf98d1e10eea88b16d93d3d7 Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 18:48:56 -0500 Subject: [PATCH 006/327] Add env flag to disable intent events and update embedding dimensions Introduces a DISABLE_INTENT_EVENTS environment variable to conditionally disable intent event handling in IntentEvents.onCreated. Also updates the embedding vector dimensions in the intents schema from 3072 to 2000 to match new requirements. --- protocol/src/lib/events.ts | 7 +++++++ protocol/src/lib/schema.ts | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/protocol/src/lib/events.ts b/protocol/src/lib/events.ts index 884536d93..928ef3fbf 100644 --- a/protocol/src/lib/events.ts +++ b/protocol/src/lib/events.ts @@ -37,6 +37,13 @@ export class IntentEvents { * Triggered when a new intent is created */ static async onCreated(event: IntentEvent): Promise { + + const intentEventsDisabled = process.env.DISABLE_INTENT_EVENTS === 'disabled'; + if (intentEventsDisabled) { + console.log('IntentEvents.onCreated disabled'); + return; + } + try { // Get all eligible indexes for this user const eligibleIndexes = await db.select({ diff --git a/protocol/src/lib/schema.ts b/protocol/src/lib/schema.ts index 2ea5650b5..73d654298 100644 --- a/protocol/src/lib/schema.ts +++ b/protocol/src/lib/schema.ts @@ -103,8 +103,8 @@ export const intents = pgTable('intents', { // Polymorphic nullable source (file | integration | link) sourceId: uuid('source_id'), sourceType: sourceType('source_type'), - // Vector embedding for semantic search (3072 dimensions for text-embedding-3-large) - embedding: vector('embedding', { dimensions: 3072 }), + // Vector embedding for semantic search (2000 dimensions for text-embedding-3-large) + embedding: vector('embedding', { dimensions: 2000 }), }); export const indexes = pgTable('indexes', { From 72eb5aae7cd413ed76df6042834693c48a1c4bca Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Sun, 30 Nov 2025 23:23:44 -0500 Subject: [PATCH 007/327] Add 'enrichment' to intent source types Introduces 'enrichment' as a valid sourceType for intents in the schema and intent service. Updates profile enrichment logic to use 'enrichment' sourceType and sets sourceId to undefined for enrichment-based intents. --- protocol/src/lib/integrations/providers/profile-enrich.ts | 4 ++-- protocol/src/lib/intent-service.ts | 2 +- protocol/src/lib/schema.ts | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/protocol/src/lib/integrations/providers/profile-enrich.ts b/protocol/src/lib/integrations/providers/profile-enrich.ts index b7abd60c2..d04b7d968 100644 --- a/protocol/src/lib/integrations/providers/profile-enrich.ts +++ b/protocol/src/lib/integrations/providers/profile-enrich.ts @@ -111,8 +111,8 @@ export async function enrichUserProfile(userId: string, generateIntents: boolean await IntentService.createIntent({ payload: intentData.payload, userId, - sourceId: userId, - sourceType: 'integration', + sourceId: undefined, // No specific integration record for enrichment-based intents + sourceType: 'enrichment', confidence: intentData.confidence, inferenceType: intentData.type, }); diff --git a/protocol/src/lib/intent-service.ts b/protocol/src/lib/intent-service.ts index 2a0c947eb..6a30565c1 100644 --- a/protocol/src/lib/intent-service.ts +++ b/protocol/src/lib/intent-service.ts @@ -12,7 +12,7 @@ export interface CreateIntentOptions { isIncognito?: boolean; indexIds?: string[]; sourceId?: string; - sourceType?: 'file' | 'integration' | 'link' | 'discovery_form'; + sourceType?: 'file' | 'integration' | 'link' | 'discovery_form' | 'enrichment'; confidence: number; // 0-1, required inferenceType: 'explicit' | 'implicit'; // required createdAt?: Date; diff --git a/protocol/src/lib/schema.ts b/protocol/src/lib/schema.ts index 73d654298..45b52d924 100644 --- a/protocol/src/lib/schema.ts +++ b/protocol/src/lib/schema.ts @@ -7,7 +7,7 @@ export const connectionAction = pgEnum('connection_action', [ 'REQUEST', 'SKIP', 'CANCEL', 'ACCEPT', 'DECLINE', 'OWNER_APPROVE', 'OWNER_DENY' ]); // Polymorphic source type for intents -export const sourceType = pgEnum('source_type', ['file', 'integration', 'link', 'discovery_form']); +export const sourceType = pgEnum('source_type', ['file', 'integration', 'link', 'discovery_form', 'enrichment']); // Onboarding state type export interface OnboardingState { From b1c2f6661d05b0e3303bb41298107c274ccf8939 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yank=C4=B1=20Ekin=20Y=C3=BCksel?= Date: Mon, 1 Dec 2025 18:26:45 +0300 Subject: [PATCH 008/327] feat: implement dynamic email subjects and updated templates --- .../src/agents/external/vibe_checker/index.ts | 42 ++++++++++++++--- protocol/src/lib/email/email-handlers.ts | 4 +- .../email/templates/connection-accepted.ts | 18 +++---- .../lib/email/templates/connection-request.ts | 12 ++--- .../lib/email/templates/weekly-newsletter.ts | 8 ++-- protocol/src/lib/synthesis.ts | 47 +++++++++++-------- protocol/src/routes/synthesis.ts | 7 +-- 7 files changed, 90 insertions(+), 48 deletions(-) diff --git a/protocol/src/agents/external/vibe_checker/index.ts b/protocol/src/agents/external/vibe_checker/index.ts index edb0d9b62..17d77fb0f 100644 --- a/protocol/src/agents/external/vibe_checker/index.ts +++ b/protocol/src/agents/external/vibe_checker/index.ts @@ -5,6 +5,7 @@ import { format } from 'timeago.js'; export interface VibeCheckResult { success: boolean; synthesis?: string; + subject?: string; error?: string; timing?: { startTime: Date; endTime: Date; durationMs: number }; } @@ -72,7 +73,7 @@ export async function vibeCheck( other_user_id: data.id, other_user_name: data.name, intent_pairs_count: data.intentPairs.length - })([systemMsg, userMsg], { reasoning: { exclude: true, effort: 'minimal' } }), + })([systemMsg, userMsg], { reasoning: { exclude: true, effort: 'minimal' }, response_format: { type: "json_object" } }), new Promise((_, reject) => setTimeout(() => reject(new Error('Vibe check timeout')), timeout) ) @@ -82,13 +83,28 @@ export async function vibeCheck( other_user_id: data.id, other_user_name: data.name, intent_pairs_count: data.intentPairs.length - })([systemMsg, userMsg], { reasoning: { exclude: true, effort: 'minimal' } }); + })([systemMsg, userMsg], { reasoning: { exclude: true, effort: 'minimal' }, response_format: { type: "json_object" } } as any); - const synthesis = (response.content as string).trim(); + let synthesis = ""; + let subject = ""; + + try { + let contentStr = response.content as string; + // Strip markdown code blocks if present + contentStr = contentStr.replace(/^```json\s*/, '').replace(/^```\s*/, '').replace(/\s*```$/, ''); + + const content = JSON.parse(contentStr); + synthesis = content.body || ""; + subject = content.subject || ""; + } catch (e) { + // Fallback for non-JSON response (shouldn't happen with response_format) + synthesis = (response.content as string).trim(); + } return { success: true, synthesis, + subject, timing: getTiming(startTime) }; } catch (error) { @@ -120,18 +136,32 @@ function buildSystemMessage( role: "system", content: `You are a collaboration synthesis generator. Create a warm, practical paragraph explaining why two people are mutual matches based on what they're explicitly looking for. -Style: +Also generate a short, punchy email subject line for this connection request. + +Style for Body: - Warm and friendly, not formal (we're introducing humans, not robots) - Grounded in stated needs (state what they're explicitly looking for, not speculative "could do" scenarios) - Direct and concise - Add a small human touchโ€”a light joke, casual aside, or relatable moment. Keep it natural, like you're telling a friend about this match. +Style for Subject: +- Include the personโ€™s name (${target}) +- Highlight strongest mutual-intent synergy +- Stay under 12 words +- Sound warm, professional, and action-oriented +- Avoid robotic "Label: Topic" formats (e.g., "Shared focus: AI"). Use natural phrases instead. +- Examples: + - "${target} - exploring shared work on scalable coordination" + - "${target} - shared intent around decentralized networking" + - "${target} - collaborating on privacy-preserving AI" + Format: -- Markdown with 2-3 inline hyperlinks: [descriptive phrase](https://index.network/intents/ID) +- Return a JSON object with "subject" and "body" fields. +- Body Markdown with 2-3 inline hyperlinks: [descriptive phrase](https://index.network/intents/ID) - ONLY hyperlink ${isThirdPerson ? `${initiator}'s` : 'your'} intents - NEVER link ${target}'s intents - Link natural phrases like "UX designers crafting interfaces" not "UX designers (link)" - Place links in beginning/middle of paragraph, not at the end -- No bold, italic, or title${characterLimit ? `\n- Maximum ${characterLimit} characters` : ''} +- No bold, italic, or title${characterLimit ? `\n- Maximum ${characterLimit} characters for body` : ''} Time Awareness: - Each intent includes a timestamp (e.g., "2 months ago", "3 days ago") diff --git a/protocol/src/lib/email/email-handlers.ts b/protocol/src/lib/email/email-handlers.ts index 345e1d18b..36188b2c5 100644 --- a/protocol/src/lib/email/email-handlers.ts +++ b/protocol/src/lib/email/email-handlers.ts @@ -71,7 +71,7 @@ export async function sendConnectionRequestEmail(initiatorUserId: string, receiv console.log('initiatorUserId', initiatorUserId); // Generate synthesis for the receiver - const synthesisMarkdown = await synthesizeVibeCheck( + const { synthesis: synthesisMarkdown, subject } = await synthesizeVibeCheck( receiverUserId, initiatorUserId, { vibeOptions: { characterLimit: 500 } } @@ -81,7 +81,7 @@ export async function sendConnectionRequestEmail(initiatorUserId: string, receiv const { marked } = await import('marked'); const synthesis = await marked.parse(synthesisMarkdown); - const template = connectionRequestTemplate(initiator[0].name, receiver[0].name, synthesis); + const template = connectionRequestTemplate(initiator[0].name, receiver[0].name, synthesis, subject); await sendEmail({ to: receiver[0].email, subject: template.subject, diff --git a/protocol/src/lib/email/templates/connection-accepted.ts b/protocol/src/lib/email/templates/connection-accepted.ts index a7404e76c..fa67c2c85 100644 --- a/protocol/src/lib/email/templates/connection-accepted.ts +++ b/protocol/src/lib/email/templates/connection-accepted.ts @@ -1,31 +1,31 @@ export const connectionAcceptedTemplate = (senderName: string, recipientName: string, synthesis?: string) => ({ - subject: `โœ… Youโ€™re connected! ${senderName} โ†” ${recipientName}`, - html: ` + subject: `${senderName} <> ${recipientName} - somethingโ€™s here`, + html: `

Hey ${senderName} and ${recipientName},

-

you both said yes โ€” love when that happens.

+

You both said yes, love when that happens.

-

From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange โ€” itโ€™s yours now.

+

From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange, itโ€™s yours now.

${synthesis ? `

Quick recap:

${synthesis}
` : ''} -

No formal intros needed โ€” just reply here and pick it up from wherever feels right.

+

No formal intros needed - just reply here and pick it up from wherever feels right.

โ€”Your discovery agent, quietly cheering from the background

`, - text: `Hey ${senderName} and ${recipientName}, + text: `Hey ${senderName} and ${recipientName}, -you both said yes โ€” love when that happens. +You both said yes, love when that happens. -From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange โ€” itโ€™s yours now. +From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange, itโ€™s yours now. ${synthesis ? `Quick recap: ${synthesis} -` : ''}No formal intros needed โ€” just reply here and pick it up from wherever feels right. +` : ''}No formal intros needed - just reply here and pick it up from wherever feels right. โ€”Your discovery agent, quietly cheering from the background` }); diff --git a/protocol/src/lib/email/templates/connection-request.ts b/protocol/src/lib/email/templates/connection-request.ts index b26b1286c..772ede8bf 100644 --- a/protocol/src/lib/email/templates/connection-request.ts +++ b/protocol/src/lib/email/templates/connection-request.ts @@ -1,6 +1,6 @@ -export const connectionRequestTemplate = (fromUserName: string, toUserName: string, synthesis?: string) => ({ - subject: `โœจ ${fromUserName} wants to connect with you`, - html: ` +export const connectionRequestTemplate = (fromUserName: string, toUserName: string, synthesis?: string, subject?: string) => ({ + subject: subject || `โœจ ${fromUserName} wants to connect with you`, + html: `

Hey ${toUserName},

Youโ€™ve got a new connection request on Index, ${fromUserName} wants to connect with you.

@@ -14,11 +14,11 @@ export const connectionRequestTemplate = (fromUserName: string, toUserName: stri
${synthesis}
` : ''} -

If youโ€™re curious, Iโ€™ll make the connection. If not, everything stays quiet.

+

If you want to move it forward, Iโ€™ll make the introduction. If not, everything stays quiet.

โ€”Index

`, - text: `Hey ${toUserName}, + text: `Hey ${toUserName}, Youโ€™ve got a new connection request on Index, ${fromUserName} wants to connect with you. @@ -27,7 +27,7 @@ Youโ€™ve got a new connection request on Index, ${fromUserName} wants to connect ${synthesis ? `What could happen between you two: ${synthesis} -` : ''}If youโ€™re curious, Iโ€™ll make the connection. If not, everything stays quiet. +` : ''}If you want to move it forward, Iโ€™ll make the introduction. If not, everything stays quiet. โ€”Index` }); diff --git a/protocol/src/lib/email/templates/weekly-newsletter.ts b/protocol/src/lib/email/templates/weekly-newsletter.ts index 8fd0526b9..a68699106 100644 --- a/protocol/src/lib/email/templates/weekly-newsletter.ts +++ b/protocol/src/lib/email/templates/weekly-newsletter.ts @@ -7,7 +7,7 @@ export interface Match { export const weeklyNewsletterTemplate = (recipientName: string, matches: Match[]) => { const matchesHtml = matches.map((match) => `
-

${match.name}${match.role ? ` โ€” ${match.role}` : ''}

+

${match.name}${match.role ? ` โ€” ${match.role}` : ''}

${match.reasoning}

`).join(''); @@ -28,13 +28,15 @@ ${match.reasoning}

Each one can shift something forward - choose your move, and Iโ€™ll take the next step with you.

${matchesHtml}

โ€”Index, keeping your next moves within reach

diff --git a/protocol/src/lib/synthesis.ts b/protocol/src/lib/synthesis.ts index 21d13e221..7165215c4 100644 --- a/protocol/src/lib/synthesis.ts +++ b/protocol/src/lib/synthesis.ts @@ -7,7 +7,7 @@ import { eq, isNull, and, sql, inArray } from 'drizzle-orm'; import crypto from 'crypto'; import { getAccessibleIntents } from './intent-access'; -interface SynthesisOptions extends VibeCheckOptions {} +interface SynthesisOptions extends VibeCheckOptions { } function createCacheHash(data: any, options?: any): string { return crypto @@ -26,7 +26,7 @@ export async function synthesizeVibeCheck( indexIds?: string[]; vibeOptions?: SynthesisOptions; } -): Promise { +): Promise<{ synthesis: string; subject?: string }> { try { const { initiatorId, intentIds, indexIds, vibeOptions } = opts || {}; @@ -37,12 +37,12 @@ export async function synthesizeVibeCheck( .from(usersTable) .where(inArray(usersTable.id, userIds)); - if (!users.length) return ""; + if (!users.length) return { synthesis: "" }; const targetUser = users.find(u => u.id === targetUserId); const initiatorUser = initiatorId ? users.find(u => u.id === initiatorId) : undefined; - - if (!targetUser) return ""; + + if (!targetUser) return { synthesis: "" }; // Get context intents using secure access control const contextIntents = await getAccessibleIntents(contextUserId, { @@ -51,10 +51,10 @@ export async function synthesizeVibeCheck( includeOwnIntents: true }); - + const contextIntentIds = contextIntents.intents.map(i => i.id); - - if (!contextIntentIds.length) return ""; + + if (!contextIntentIds.length) return { synthesis: "" }; // Get top 3 stakes connecting context and target user intents // Optimized: use uuid[] instead of text[], leverage GIN index, remove redundant checks @@ -91,7 +91,7 @@ export async function synthesizeVibeCheck( .orderBy(sql`${intentStakes.stake} DESC`) .limit(3); - if (!stakes.length) return ""; + if (!stakes.length) return { synthesis: "" }; // Fetch intent details and build pairs const allIntentIds = stakes.flatMap(s => s.stakeIntents); @@ -111,12 +111,12 @@ export async function synthesizeVibeCheck( const [id1, id2] = stake.stakeIntents; const intent1 = intentDetails.find(i => i.id === id1); const intent2 = intentDetails.find(i => i.id === id2); - + const contextIntent = intent1?.userId === contextUserId ? intent1 : intent2; const targetIntent = intent1?.userId === targetUserId ? intent1 : intent2; - + if (!contextIntent || !targetIntent) return null; - + return { stake: Number(stake.stake), contextUserIntent: { @@ -146,20 +146,29 @@ export async function synthesizeVibeCheck( const cacheData = initiatorId ? { ...vibeData, initiatorId } : vibeData; const cacheKey = createCacheHash(cacheData, vibeOptions); const cached = await cache.hget('synthesis', cacheKey); - if (cached) return cached; + if (cached) { + try { + // Try parsing as JSON first (new format) + return JSON.parse(cached); + } catch { + // Fallback for old string format + return { synthesis: cached }; + } + } // Generate synthesis const result = await vibeCheck(vibeData, vibeOptions); - + if (result.success && result.synthesis) { - await cache.hset('synthesis', cacheKey, result.synthesis); - return result.synthesis; + const cacheValue = { synthesis: result.synthesis, subject: result.subject }; + await cache.hset('synthesis', cacheKey, JSON.stringify(cacheValue)); + return cacheValue; } - return ""; + return { synthesis: "" }; } catch (error) { console.error('Synthesis error:', error); - return ""; + return { synthesis: "" }; } } @@ -224,7 +233,7 @@ export async function synthesizeIntro( const result = await introMaker(introData); return result.success && result.synthesis ? result.synthesis : ""; - + } catch (error) { console.error('Intro synthesis error:', error); return ""; diff --git a/protocol/src/routes/synthesis.ts b/protocol/src/routes/synthesis.ts index 289322c07..6bf4609ef 100644 --- a/protocol/src/routes/synthesis.ts +++ b/protocol/src/routes/synthesis.ts @@ -43,9 +43,9 @@ router.post('/vibecheck', // Use generic validation function const { validIndexIds, error } = await validateAndGetAccessibleIndexIds(contextUserId, indexIds); if (error) { - return res.status(error.status).json({ + return res.status(error.status).json({ error: error.message, - invalidIds: error.invalidIds + invalidIds: error.invalidIds }); } @@ -54,7 +54,7 @@ router.post('/vibecheck', return res.status(400).json({ error: 'No accessible indexes found for synthesis' }); } - const synthesis = await synthesizeVibeCheck( + const { synthesis, subject } = await synthesizeVibeCheck( initiatorId || contextUserId, targetUserId, { @@ -68,6 +68,7 @@ router.post('/vibecheck', return res.json({ synthesis, + subject, targetUserId, contextUserId, }); From 32d7e211c0549602566b85889419fa3f99d67bbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yank=C4=B1=20Ekin=20Y=C3=BCksel?= Date: Mon, 1 Dec 2025 18:27:06 +0300 Subject: [PATCH 009/327] test: add email testing infrastructure and scripts --- protocol/email-debug.md | 143 ++++++++++++++++++++++++++++ protocol/env.example | 4 +- protocol/src/cli/test-emails.ts | 140 +++++++++++++++++++++++++++ protocol/src/cli/test-vibe-check.ts | 55 +++++++++++ protocol/src/lib/email/email.ts | 18 +++- 5 files changed, 354 insertions(+), 6 deletions(-) create mode 100644 protocol/email-debug.md create mode 100644 protocol/src/cli/test-emails.ts create mode 100644 protocol/src/cli/test-vibe-check.ts diff --git a/protocol/email-debug.md b/protocol/email-debug.md new file mode 100644 index 000000000..cd6283ca0 --- /dev/null +++ b/protocol/email-debug.md @@ -0,0 +1,143 @@ +# Email Test Debug Output + +================================================================================ +Connection Request +================================================================================ +To: bob@example.com +Subject: Alice - Bob needs your coordination tools + +--- TEXT CONTENT --- +Hey Bob, + +Youโ€™ve got a new connection request on Index, Alice wants to connect with you. + +๐Ÿ‘‰ Go to Index to approve: https://index.network/inbox + +What could happen between you two: +Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges. + +If you want to move it forward, Iโ€™ll make the introduction. If not, everything stays quiet. + +โ€”Index + +--- HTML CONTENT --- + +
+

Hey Bob,

+

Youโ€™ve got a new connection request on Index, Alice wants to connect with you.

+ + + + +

What could happen between you two:

+
Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges.
+ + +

If you want to move it forward, Iโ€™ll make the introduction. If not, everything stays quiet.

+

โ€”Index

+
+ + + +================================================================================ +Connection Accepted +================================================================================ +To: alice@example.com, bob@example.com +Subject: Alice <> Bob - somethingโ€™s here + +--- TEXT CONTENT --- +Hey Alice and Bob, + +You both said yes, love when that happens. + +From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange, itโ€™s yours now. + +Quick recap: +Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges. + +No formal intros needed - just reply here and pick it up from wherever feels right. + +โ€”Your discovery agent, quietly cheering from the background + +--- HTML CONTENT --- + +
+

Hey Alice and Bob,

+

You both said yes, love when that happens.

+ +

From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange, itโ€™s yours now.

+ + +

Quick recap:

+
Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges.
+ + +

No formal intros needed - just reply here and pick it up from wherever feels right.

+

โ€”Your discovery agent, quietly cheering from the background

+
+ + + +================================================================================ +Weekly Newsletter +================================================================================ +To: bob@example.com +Subject: Youโ€™ve got 2 conversations waiting in your Index Inbox + +--- TEXT CONTENT --- +Hey Bob, + +Indexโ€™s agents surfaced a few people this week whose work lines up unusually well with the things youโ€™re pushing right now - fundraising, agent deployments, protocol scaling, semantic web thinking, and the early shape of a discovery network. + +Each one can shift something forward - choose your move, and Iโ€™ll take the next step with you. + +๐Ÿ‘‰ Go to your Inbox: https://index.network/inbox + + +Alice โ€” Software Engineer +Matches your interest in decentralized protocols. + + +Charlie โ€” Product Designer +Working on similar UI patterns for agentic workflows. + + +๐Ÿ‘‰ Go to your Inbox: https://index.network/inbox + +โ€”Index, keeping your next moves within reach + +--- HTML CONTENT --- + +
+

Hey Bob,

+

Indexโ€™s agents surfaced a few people this week whose work lines up unusually well with the things youโ€™re pushing right now - fundraising, agent deployments, protocol scaling, semantic web thinking, and the early shape of a discovery network.

+

Each one can shift something forward - choose your move, and Iโ€™ll take the next step with you.

+ +
+ ๐Ÿ‘‰ + Go to your Inbox +
+ + +
+

Alice โ€” Software Engineer

+

Matches your interest in decentralized protocols.

+
+ +
+

Charlie โ€” Product Designer

+

Working on similar UI patterns for agentic workflows.

+
+ + +
+ ๐Ÿ‘‰ + Go to your Inbox +
+ +

โ€”Index, keeping your next moves within reach

+
+ + diff --git a/protocol/env.example b/protocol/env.example index 639ead215..5256210e5 100644 --- a/protocol/env.example +++ b/protocol/env.example @@ -33,6 +33,8 @@ NODE_ENV=development # default: development # Email (Resend). If not set, emails are skipped. # RESEND_API_KEY= +# ENABLE_EMAIL_TESTING=true +# TESTING_EMAIL_ADDRESS= # Redis cache. Use REDIS_URL or individual fields (default localhost:6379, db=0). # REDIS_URL= @@ -113,4 +115,4 @@ LANGFUSE_BASE_URL=https://us.cloud.langfuse.com # JWT is not used; Privy provides auth. # JWT_SECRET= -# JWT_EXPIRES_IN= +# JWT_EXPIRES_IN= \ No newline at end of file diff --git a/protocol/src/cli/test-emails.ts b/protocol/src/cli/test-emails.ts new file mode 100644 index 000000000..df16c0e13 --- /dev/null +++ b/protocol/src/cli/test-emails.ts @@ -0,0 +1,140 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { writeFile } from 'fs/promises'; + +config({ path: resolve(__dirname, '../../.env.development') }); + +import { connectionRequestTemplate } from '../lib/email/templates/connection-request'; +import { connectionAcceptedTemplate } from '../lib/email/templates/connection-accepted'; +import { weeklyNewsletterTemplate } from '../lib/email/templates/weekly-newsletter'; +import { sendEmail } from '../lib/email/email'; + +import { vibeCheck, OtherUserData } from '../agents/external/vibe_checker/index'; + +async function logEmailToFile(title: string, to: string | string[], subject: string, html: string, text: string) { + const separator = '='.repeat(80); + const content = ` +${separator} +${title} +${separator} +To: ${Array.isArray(to) ? to.join(', ') : to} +Subject: ${subject} + +--- TEXT CONTENT --- +${text} + +--- HTML CONTENT --- +${html} + +`; + await writeFile('email-debug.md', content, { flag: 'a' }); +} + +async function main() { + console.log('Starting email test script (Mock Mode with Real AI)...'); + + // Clear previous debug file + await writeFile('email-debug.md', '# Email Test Debug Output\n'); + + const user1 = { name: 'Alice', email: 'alice@example.com' }; + const user2 = { name: 'Bob', email: 'bob@example.com' }; + + // Mock Data for Vibe Check + const mockData: OtherUserData = { + id: 'user-123', + name: 'Alice', + intro: 'Building scalable coordination tools for DAOs.', + intentPairs: [ + { + stake: 10, + contextUserIntent: { + id: 'intent-1', + payload: 'I am looking for scalable coordination tools', + createdAt: new Date() + }, + targetUserIntent: { + id: 'intent-2', + payload: 'I am building scalable coordination tools', + createdAt: new Date() + } + } + ], + initiatorName: 'Bob' + }; + + try { + // Generate Synthesis using Real AI + console.log('Generating synthesis via vibeCheck...'); + const vibeResult = await vibeCheck(mockData); + + if (!vibeResult.success) { + throw new Error(`Vibe check failed: ${vibeResult.error}`); + } + + const synthesis = vibeResult.synthesis || "Fallback synthesis"; + const subject = vibeResult.subject || "Fallback subject"; + + console.log(`Generated Subject: ${subject}`); + + // Test 1: Connection Request + console.log('\n--- Testing Connection Request Email ---'); + const reqTemplate = connectionRequestTemplate(user1.name, user2.name, synthesis, subject); + await logEmailToFile('Connection Request', user2.email, reqTemplate.subject, reqTemplate.html, reqTemplate.text); + + await sendEmail({ + to: user2.email, + subject: reqTemplate.subject, + html: reqTemplate.html, + text: reqTemplate.text + }); + console.log('โœ… Connection Request Email Processed'); + + // Test 2: Connection Accepted + console.log('\n--- Testing Connection Accepted Email ---'); + const accTemplate = connectionAcceptedTemplate(user1.name, user2.name, synthesis); + await logEmailToFile('Connection Accepted', [user1.email, user2.email], accTemplate.subject, accTemplate.html, accTemplate.text); + + await sendEmail({ + to: [user1.email, user2.email], + subject: accTemplate.subject, + html: accTemplate.html, + text: accTemplate.text + }); + console.log('โœ… Connection Accepted Email Processed'); + + // Test 3: Weekly Newsletter + console.log('\n--- Testing Weekly Newsletter Email ---'); + const mockMatches = [ + { + name: user1.name, + role: 'Software Engineer', + reasoning: 'Matches your interest in decentralized protocols.' + }, + { + name: 'Charlie', + role: 'Product Designer', + reasoning: 'Working on similar UI patterns for agentic workflows.' + } + ]; + + const newsTemplate = weeklyNewsletterTemplate(user2.name, mockMatches); + await logEmailToFile('Weekly Newsletter', user2.email, newsTemplate.subject, newsTemplate.html, newsTemplate.text); + + await sendEmail({ + to: user2.email, + subject: newsTemplate.subject, + html: newsTemplate.html, + text: newsTemplate.text + }); + console.log('โœ… Weekly Newsletter Email Processed'); + + console.log('\n๐ŸŽ‰ Debug output written to protocol/email-debug.md'); + + } catch (error) { + console.error('Test failed:', error); + } + + process.exit(0); +} + +main(); diff --git a/protocol/src/cli/test-vibe-check.ts b/protocol/src/cli/test-vibe-check.ts new file mode 100644 index 000000000..ffba59102 --- /dev/null +++ b/protocol/src/cli/test-vibe-check.ts @@ -0,0 +1,55 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +config({ path: resolve(__dirname, '../../.env.development') }); + +import { vibeCheck, OtherUserData } from '../agents/external/vibe_checker/index'; + +async function main() { + console.log('Starting vibeCheck test...'); + + if (!process.env.OPENROUTER_API_KEY) { + console.error('Error: OPENROUTER_API_KEY is missing in .env.development'); + process.exit(1); + } + + const mockData: OtherUserData = { + id: 'user-123', + name: 'Alice', + intro: 'Building scalable coordination tools for DAOs.', + intentPairs: [ + { + stake: 10, + contextUserIntent: { + id: 'intent-1', + payload: 'I am looking for scalable coordination tools', + createdAt: new Date() + }, + targetUserIntent: { + id: 'intent-2', + payload: 'I am building scalable coordination tools', + createdAt: new Date() + } + } + ], + initiatorName: 'Bob' + }; + + try { + console.log('Calling vibeCheck with mock data...'); + const result = await vibeCheck(mockData); + + console.log('\n--- Vibe Check Result ---'); + console.log('Success:', result.success); + if (result.success) { + console.log('Subject:', result.subject); + console.log('Synthesis:', result.synthesis); + } else { + console.error('Error:', result.error); + } + + } catch (error) { + console.error('Test failed:', error); + } +} + +main(); diff --git a/protocol/src/lib/email/email.ts b/protocol/src/lib/email/email.ts index 54535ee6c..37429ab93 100644 --- a/protocol/src/lib/email/email.ts +++ b/protocol/src/lib/email/email.ts @@ -8,18 +8,26 @@ export const sendEmail = async (options: { html: string; text: string; }) => { - if (!process.env.RESEND_API_KEY || !resend) { - console.warn('RESEND_API_KEY not configured, email not sent'); + // SAFETY: Override recipient for testing + const isTestMode = process.env.ENABLE_EMAIL_TESTING === 'true'; + const recipient = isTestMode ? 'yanki@index.network' : options.to; + + if (!process.env.RESEND_API_KEY || !resend || process.env.RESEND_API_KEY === 'DISABLED') { + console.warn('RESEND_API_KEY not configured or disabled, email not sent'); + return; + } + + if (!isTestMode) { + console.log('Email is disabled for now: not from mainnet yet'); return; } - console.log('Email is disabled for now: not from mainnet yet'); - return; + console.log(`[TEST MODE] Sending email to ${recipient} (Original: ${options.to})`); try { const result = await resend!.emails.send({ from: 'Index Network ', - to: options.to, + to: recipient, replyTo: 'hello@index.network', subject: options.subject, html: options.html, From f1a9f96e5ec75d9badca47639ae75a6d047b99f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yank=C4=B1=20Ekin=20Y=C3=BCksel?= Date: Mon, 1 Dec 2025 22:42:07 +0300 Subject: [PATCH 010/327] chore: remove debug files and add CLI test suite --- protocol/email-debug.md | 143 ---------------------- protocol/snowflake.log | 11 -- protocol/src/cli/db-seed.ts | 33 ++---- protocol/src/cli/test-data.ts | 20 ++++ protocol/src/cli/test-suite.ts | 204 ++++++++++++++++++++++++++++++++ protocol/src/lib/email/email.ts | 43 ++++++- 6 files changed, 273 insertions(+), 181 deletions(-) delete mode 100644 protocol/email-debug.md delete mode 100644 protocol/snowflake.log create mode 100644 protocol/src/cli/test-data.ts create mode 100644 protocol/src/cli/test-suite.ts diff --git a/protocol/email-debug.md b/protocol/email-debug.md deleted file mode 100644 index cd6283ca0..000000000 --- a/protocol/email-debug.md +++ /dev/null @@ -1,143 +0,0 @@ -# Email Test Debug Output - -================================================================================ -Connection Request -================================================================================ -To: bob@example.com -Subject: Alice - Bob needs your coordination tools - ---- TEXT CONTENT --- -Hey Bob, - -Youโ€™ve got a new connection request on Index, Alice wants to connect with you. - -๐Ÿ‘‰ Go to Index to approve: https://index.network/inbox - -What could happen between you two: -Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges. - -If you want to move it forward, Iโ€™ll make the introduction. If not, everything stays quiet. - -โ€”Index - ---- HTML CONTENT --- - -
-

Hey Bob,

-

Youโ€™ve got a new connection request on Index, Alice wants to connect with you.

- - - - -

What could happen between you two:

-
Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges.
- - -

If you want to move it forward, Iโ€™ll make the introduction. If not, everything stays quiet.

-

โ€”Index

-
- - - -================================================================================ -Connection Accepted -================================================================================ -To: alice@example.com, bob@example.com -Subject: Alice <> Bob - somethingโ€™s here - ---- TEXT CONTENT --- -Hey Alice and Bob, - -You both said yes, love when that happens. - -From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange, itโ€™s yours now. - -Quick recap: -Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges. - -No formal intros needed - just reply here and pick it up from wherever feels right. - -โ€”Your discovery agent, quietly cheering from the background - ---- HTML CONTENT --- - -
-

Hey Alice and Bob,

-

You both said yes, love when that happens.

- -

From what youโ€™re each exploring, this felt like a connection worth bringing into the real world. Whether it turns into a conversation, a collaboration, or just a useful exchange, itโ€™s yours now.

- - -

Quick recap:

-
Bob is explicitly looking for [scalable coordination tools](https://index.network/intents/intent-1) right now, and Alice is deeply immersed in building exactly that for DAOs. It seems like the stars have aligned for this one; they both have a fresh, shared focus on solving coordination challenges.
- - -

No formal intros needed - just reply here and pick it up from wherever feels right.

-

โ€”Your discovery agent, quietly cheering from the background

-
- - - -================================================================================ -Weekly Newsletter -================================================================================ -To: bob@example.com -Subject: Youโ€™ve got 2 conversations waiting in your Index Inbox - ---- TEXT CONTENT --- -Hey Bob, - -Indexโ€™s agents surfaced a few people this week whose work lines up unusually well with the things youโ€™re pushing right now - fundraising, agent deployments, protocol scaling, semantic web thinking, and the early shape of a discovery network. - -Each one can shift something forward - choose your move, and Iโ€™ll take the next step with you. - -๐Ÿ‘‰ Go to your Inbox: https://index.network/inbox - - -Alice โ€” Software Engineer -Matches your interest in decentralized protocols. - - -Charlie โ€” Product Designer -Working on similar UI patterns for agentic workflows. - - -๐Ÿ‘‰ Go to your Inbox: https://index.network/inbox - -โ€”Index, keeping your next moves within reach - ---- HTML CONTENT --- - -
-

Hey Bob,

-

Indexโ€™s agents surfaced a few people this week whose work lines up unusually well with the things youโ€™re pushing right now - fundraising, agent deployments, protocol scaling, semantic web thinking, and the early shape of a discovery network.

-

Each one can shift something forward - choose your move, and Iโ€™ll take the next step with you.

- -
- ๐Ÿ‘‰ - Go to your Inbox -
- - -
-

Alice โ€” Software Engineer

-

Matches your interest in decentralized protocols.

-
- -
-

Charlie โ€” Product Designer

-

Working on similar UI patterns for agentic workflows.

-
- - -
- ๐Ÿ‘‰ - Go to your Inbox -
- -

โ€”Index, keeping your next moves within reach

-
- - diff --git a/protocol/snowflake.log b/protocol/snowflake.log deleted file mode 100644 index d8ad32dd1..000000000 --- a/protocol/snowflake.log +++ /dev/null @@ -1,11 +0,0 @@ -{"level":"INFO","message":"[5:33:06.844 PM]: Creating new connection object"} -{"level":"INFO","message":"[5:33:06.866 PM]: Creating Connection[id: d6607740-5fa2-47d6-a562-aba5fb8a8732] with host: JXLVOQE-TLB06923.snowflakecomputing.com, account: JXLVOQE-TLB06923, accessUrl: https://JXLVOQE-TLB06923.snowflakecomputing.com, user: index, password is provided, role: undefined, database: DATA_COLLECTOR_ICEBERG, schema: PUBLIC, warehouse: twitter_index, region: undefined, authenticator: SNOWFLAKE, ocsp mode: FAIL_OPEN, os: darwin, os version: 25.1.0"} -{"level":"INFO","message":"[5:33:06.867 PM]: Connection[id: d6607740-5fa2-47d6-a562-aba5fb8a8732] additional details: passcode in password is provided, passcode is not provided, private key is not provided, application: undefined, client name: snowflake-sdk, client version: 1.15.0, retry timeout: 300, private key path: undefined, private key pass is not provided, client store temporary credential: false, browser response timeout: 120000"} -{"level":"INFO","message":"[5:33:06.867 PM]: Connection[id: d6607740-5fa2-47d6-a562-aba5fb8a8732] - connection object created successfully."} -{"level":"INFO","message":"[5:33:06.867 PM]: Connection[id: d6607740-5fa2-47d6-a562-aba5fb8a8732] - connecting. Associated Snowflake domain: GLOBAL"} -{"level":"INFO","message":"[5:33:06.868 PM]: Connection[id: d6607740-5fa2-47d6-a562-aba5fb8a8732] - authentication successful using: SNOWFLAKE"} -{"level":"INFO","message":"[5:33:06.868 PM]: Trying to initialize Easy Logging"} -{"level":"INFO","message":"[5:33:06.871 PM]: No client config detected."} -{"level":"INFO","message":"[5:33:06.871 PM]: No config file path found. Client config will not be used."} -{"level":"INFO","message":"[5:33:06.871 PM]: Easy Logging is disabled as no config has been found"} -{"level":"INFO","message":"[5:33:06.873 PM]: Connection[id: d6607740-5fa2-47d6-a562-aba5fb8a8732] - connected successfully after 6.107833000001847 milliseconds"} diff --git a/protocol/src/cli/db-seed.ts b/protocol/src/cli/db-seed.ts index a26d81654..38059c944 100644 --- a/protocol/src/cli/db-seed.ts +++ b/protocol/src/cli/db-seed.ts @@ -24,26 +24,7 @@ type GlobalOpts = { const INDEX_ID = '5aff6cd6-d64e-4ef9-8bcf-6c89815f771c'; const SEMANTIC_RELEVANCY_AGENT_ID = '028ef80e-9b1c-434b-9296-bb6130509482'; -const PRIVY_TEST_ACCOUNTS = [ - { name: 'Casey Harper', email: 'test-6285@privy.io', phoneNumber: '+1 555 555 1625', otpCode: '607027' }, - { name: 'Devon Brooks', email: 'test-9716@privy.io', phoneNumber: '+1 555 555 2920', otpCode: '670543' }, - { name: 'Morgan Li', email: 'test-1761@privy.io', phoneNumber: '+1 555 555 5724', otpCode: '888893' }, - { name: 'Riley Nguyen', email: 'test-5331@privy.io', phoneNumber: '+1 555 555 6283', otpCode: '094228' }, - { name: 'Taylor Singh', email: 'test-6462@privy.io', phoneNumber: '+1 555 555 8175', otpCode: '066860' }, - { name: 'Quinn Ramirez', email: 'test-7106@privy.io', phoneNumber: '+1 555 555 8469', otpCode: '991478' }, - { name: 'Emerson Blake', email: 'test-6945@privy.io', phoneNumber: '+1 555 555 9096', otpCode: '510460' }, - { name: 'Peyton Alvarez', email: 'test-2676@privy.io', phoneNumber: '+1 555 555 9419', otpCode: '503536' }, - { name: 'Sydney Clarke', email: 'test-7561@privy.io', phoneNumber: '+1 555 555 9497', otpCode: '737681' }, - { name: 'Hayden Moore', email: 'test-1093@privy.io', phoneNumber: '+1 555 555 9779', otpCode: '934435' }, -]; - -const INTENTS = [ - 'Looking for AI researchers to collaborate on machine learning projects', - 'Seeking blockchain developers for DeFi partnerships', - 'Connecting with designers for UI/UX collaborations', - 'Building cross-functional teams for product development', - 'Open to mentoring junior developers in web3 space', -]; +import { PRIVY_TEST_ACCOUNTS, INTENTS } from './test-data'; async function ensurePrivyIdentity(email: string): Promise { let privyUser = await privyClient.getUserByEmail(email); @@ -57,7 +38,7 @@ async function ensurePrivyIdentity(email: string): Promise { async function createUser(account: typeof PRIVY_TEST_ACCOUNTS[0]): Promise { const privyId = await ensurePrivyIdentity(account.email); - + try { const [user] = await db.insert(users).values({ privyId, @@ -109,7 +90,7 @@ async function seedDatabase(): Promise<{ ok: boolean; error?: string }> { title: 'Mock Demo Network', prompt: 'Share collaboration opportunities', }); - } catch {} + } catch { } // Create users and intents const createdUsers = []; @@ -128,7 +109,7 @@ async function seedDatabase(): Promise<{ ok: boolean; error?: string }> { prompt: 'everything', autoAssign: true, }); - } catch {} + } catch { } // Create intent const payload = INTENTS[i % INTENTS.length]; @@ -148,13 +129,13 @@ async function seedDatabase(): Promise<{ ok: boolean; error?: string }> { reasoning: `${createdUsers[i].name} and ${createdUsers[j].name} should connect`, agentId: SEMANTIC_RELEVANCY_AGENT_ID, }); - } catch {} + } catch { } } } console.log(`โœ… Created ${createdUsers.length} users with connected intents`); console.log('\nLogin credentials:'); - PRIVY_TEST_ACCOUNTS.forEach(acc => + PRIVY_TEST_ACCOUNTS.forEach(acc => console.log(`${acc.name}: ${acc.email} | ${acc.phoneNumber} | OTP: ${acc.otpCode}`) ); @@ -188,7 +169,7 @@ async function main(): Promise { } const result = await seedDatabase(); - + if (!result.ok) { console.error('โŒ Seed failed:', result.error); process.exit(1); diff --git a/protocol/src/cli/test-data.ts b/protocol/src/cli/test-data.ts new file mode 100644 index 000000000..d35cef9f0 --- /dev/null +++ b/protocol/src/cli/test-data.ts @@ -0,0 +1,20 @@ +export const PRIVY_TEST_ACCOUNTS = [ + { name: 'Casey Harper', email: 'test-6285@privy.io', phoneNumber: '+1 555 555 1625', otpCode: '607027' }, + { name: 'Devon Brooks', email: 'test-9716@privy.io', phoneNumber: '+1 555 555 2920', otpCode: '670543' }, + { name: 'Morgan Li', email: 'test-1761@privy.io', phoneNumber: '+1 555 555 5724', otpCode: '888893' }, + { name: 'Riley Nguyen', email: 'test-5331@privy.io', phoneNumber: '+1 555 555 6283', otpCode: '094228' }, + { name: 'Taylor Singh', email: 'test-6462@privy.io', phoneNumber: '+1 555 555 8175', otpCode: '066860' }, + { name: 'Quinn Ramirez', email: 'test-7106@privy.io', phoneNumber: '+1 555 555 8469', otpCode: '991478' }, + { name: 'Emerson Blake', email: 'test-6945@privy.io', phoneNumber: '+1 555 555 9096', otpCode: '510460' }, + { name: 'Peyton Alvarez', email: 'test-2676@privy.io', phoneNumber: '+1 555 555 9419', otpCode: '503536' }, + { name: 'Sydney Clarke', email: 'test-7561@privy.io', phoneNumber: '+1 555 555 9497', otpCode: '737681' }, + { name: 'Hayden Moore', email: 'test-1093@privy.io', phoneNumber: '+1 555 555 9779', otpCode: '934435' }, +]; + +export const INTENTS = [ + 'Looking for AI researchers to collaborate on machine learning projects', + 'Seeking blockchain developers for DeFi partnerships', + 'Connecting with designers for UI/UX collaborations', + 'Building cross-functional teams for product development', + 'Open to mentoring junior developers in web3 space', +]; diff --git a/protocol/src/cli/test-suite.ts b/protocol/src/cli/test-suite.ts new file mode 100644 index 000000000..6835cb3ce --- /dev/null +++ b/protocol/src/cli/test-suite.ts @@ -0,0 +1,204 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { eq, and, desc } from 'drizzle-orm'; +import { v4 as uuidv4 } from 'uuid'; + +// Load environment variables immediately +config({ path: resolve(__dirname, '../../.env.development') }); + +// Enable email testing mode +process.env.ENABLE_EMAIL_TESTING = 'true'; +if (!process.env.TESTING_EMAIL_ADDRESS) { + console.warn('โš ๏ธ TESTING_EMAIL_ADDRESS not set. Emails will not be sent.'); +} else { + console.log(`๐Ÿ“ง Email testing enabled. All emails will be sent to: ${process.env.TESTING_EMAIL_ADDRESS}`); +} + +import { PRIVY_TEST_ACCOUNTS } from './test-data'; + +// Types for our test context +interface TestContext { + users: Map; // email -> user record + createdIntents: Map; // description -> intentId + createdFiles: string[]; + createdLinks: string[]; +} + +const ctx: TestContext = { + users: new Map(), + createdIntents: new Map(), + createdFiles: [], + createdLinks: [] +}; + +async function main() { + // Dynamic imports + type DbModule = typeof import('../lib/db'); + const { default: db, closeDb } = await import('../lib/db.js') as unknown as DbModule; + const { users, files, indexLinks, intents, intentStakes } = await import('../lib/schema.js'); + const { IntentService } = await import('../lib/intent-service.js'); + const { discoverUsers } = await import('../lib/discover.js'); + const { sendConnectionRequestEmail } = await import('../lib/email/email-handlers.js'); + + console.log('\n๐Ÿš€ Starting Expanded CLI Test Suite...\n'); + + if (process.env.NODE_ENV === 'production') { + console.error('โŒ Cannot run test suite in production environment'); + process.exit(1); + } + + try { + // --- PREPARATION --- + console.log('--- 1. Preparation: Fetching Test Users ---'); + for (const account of PRIVY_TEST_ACCOUNTS) { + const [user] = await db.select().from(users).where(eq(users.email, account.email)).limit(1); + if (user) { + ctx.users.set(account.email, user); + console.log(`โœ… Loaded user: ${user.name} (${user.email})`); + } else { + console.warn(`โš ๏ธ User not found: ${account.email}. Run 'yarn db:seed' first.`); + } + } + + if (ctx.users.size < 2) { + throw new Error('Need at least 2 test users to run full suite.'); + } + + const alice = ctx.users.get('alice@example.com') || Array.from(ctx.users.values())[0]; + const bob = ctx.users.get('bob@example.com') || Array.from(ctx.users.values())[1]; + + console.log(`\nTesting with:\n- Alice: ${alice.name} (${alice.id})\n- Bob: ${bob.name} (${bob.id})\n`); + + // --- SCENARIO 1: DISCOVERY FORM (Alice) --- + console.log('--- 2. Scenario: Discovery Form (Alice) ---'); + + // 1. Create a dummy file record + const fileId = uuidv4(); + await db.insert(files).values({ + id: fileId, + name: 'project_proposal.pdf', + size: BigInt(1024 * 1024), + type: 'application/pdf', + userId: alice.id, + }); + ctx.createdFiles.push(fileId); + console.log(`โœ… Simulated file upload: ${fileId}`); + + // 2. Create a dummy link record + const linkId = uuidv4(); + await db.insert(indexLinks).values({ + id: linkId, + userId: alice.id, + url: 'https://example.com/alice-portfolio', + lastStatus: 'ok' + }); + ctx.createdLinks.push(linkId); + console.log(`โœ… Simulated link submission: ${linkId}`); + + // 3. Create Intent from Discovery Form + const alicePayload = "I am looking for a co-founder to build a decentralized social graph."; + const aliceIntent = await IntentService.createIntent({ + payload: alicePayload, + userId: alice.id, + confidence: 1.0, + inferenceType: 'explicit', + sourceType: 'discovery_form', + sourceId: fileId, // Linked to the file + indexIds: ['5aff6cd6-d64e-4ef9-8bcf-6c89815f771c'] + }); + ctx.createdIntents.set('alice_discovery', aliceIntent.id); + console.log(`โœ… Created Intent for Alice: "${alicePayload}" (ID: ${aliceIntent.id})`); + + + // --- SCENARIO 2: LIBRARY UPLOAD (Bob) --- + console.log('\n--- 3. Scenario: Library Upload (Bob) ---'); + + // 1. Create a dummy file record for Bob + const bobFileId = uuidv4(); + await db.insert(files).values({ + id: bobFileId, + name: 'technical_architecture.docx', + size: BigInt(500 * 1024), + type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + userId: bob.id, + }); + ctx.createdFiles.push(bobFileId); + console.log(`โœ… Simulated library file upload: ${bobFileId}`); + + // 2. Create Intent from File + const bobPayload = "I am an experienced backend engineer interested in social graphs and decentralized identity."; + const bobIntent = await IntentService.createIntent({ + payload: bobPayload, + userId: bob.id, + confidence: 0.9, + inferenceType: 'implicit', + sourceType: 'file', + sourceId: bobFileId, + indexIds: ['5aff6cd6-d64e-4ef9-8bcf-6c89815f771c'] + }); + ctx.createdIntents.set('bob_library', bobIntent.id); + console.log(`โœ… Created Intent for Bob: "${bobPayload}" (ID: ${bobIntent.id})`); + + + // --- SCENARIO 3: MATCHING & DISCOVERY --- + console.log('\n--- 4. Scenario: Matching & Discovery ---'); + + // Force a stake between them to ensure discovery works (simulating the background agent) + // In a real scenario, the 'semantic-relevancy' agent would do this. + const stakeId = uuidv4(); + await db.insert(intentStakes).values({ + id: stakeId, + intents: [aliceIntent.id, bobIntent.id], // Order doesn't strictly matter for the array check usually, but let's be safe + stake: BigInt(85), + reasoning: "Alice needs a co-founder for a social graph, and Bob is an engineer interested in social graphs.", + agentId: '028ef80e-9b1c-434b-9296-bb6130509482' + }); + console.log(`โœ… Injected mock stake between Alice and Bob (ID: ${stakeId})`); + + // Run discovery for Alice + console.log(`Running discovery for Alice...`); + const discoveryResults = await discoverUsers({ + authenticatedUserId: alice.id, + limit: 10 + }); + + const bobFound = discoveryResults.results.find(r => r.user.id === bob.id); + + if (bobFound) { + console.log(`โœ… Alice discovered Bob!`); + console.log(` Total Stake: ${bobFound.totalStake}`); + console.log(` Reasoning: ${bobFound.intents[0].reasonings[0]}`); + } else { + console.error(`โŒ Alice did NOT discover Bob. Discovery results:`, JSON.stringify(discoveryResults.results.map(r => r.user.name), null, 2)); + // Don't fail hard, proceed to email test if possible + } + + + // --- SCENARIO 4: EMAIL & CONNECTION --- + console.log('\n--- 5. Scenario: Email & Connection ---'); + + console.log(`Sending connection request from Alice to Bob...`); + console.log(`(Emails should be redirected to ${process.env.TESTING_EMAIL_ADDRESS || 'yanki@index.network'})`); + + try { + await sendConnectionRequestEmail(alice.id, bob.id); + console.log(`โœ… sendConnectionRequestEmail completed without error.`); + console.log(`๐Ÿ‘‰ CHECK YOUR EMAIL (${process.env.TESTING_EMAIL_ADDRESS || 'yanki@index.network'}) for a message with subject related to "Connection Request"`); + } catch (error) { + console.error(`โŒ Failed to send email:`, error); + } + + console.log('\n=================================================='); + console.log(`Test Suite Completed Successfully.`); + console.log('=================================================='); + + } catch (error) { + console.error('Test suite failed:', error); + } finally { + // Cleanup (Optional - maybe we want to keep data for inspection?) + // For now, let's keep it. + await closeDb(); + } +} + +main(); diff --git a/protocol/src/lib/email/email.ts b/protocol/src/lib/email/email.ts index 37429ab93..5ab27fb77 100644 --- a/protocol/src/lib/email/email.ts +++ b/protocol/src/lib/email/email.ts @@ -10,7 +10,39 @@ export const sendEmail = async (options: { }) => { // SAFETY: Override recipient for testing const isTestMode = process.env.ENABLE_EMAIL_TESTING === 'true'; - const recipient = isTestMode ? 'yanki@index.network' : options.to; + const recipient = isTestMode ? process.env.TESTING_EMAIL_ADDRESS : options.to; + + if (isTestMode && !recipient) { + console.warn('TESTING_EMAIL_ADDRESS not set. Skipping email sending.'); + return; + } + + if (isTestMode) { + const { appendFile } = await import('fs/promises'); + const { resolve } = await import('path'); + const debugPath = resolve(process.cwd(), 'email-debug.md'); + const separator = '='.repeat(80); + const timestamp = new Date().toISOString(); + const content = ` + ${separator} + [${timestamp}] Email Sent + ${separator} + To: ${Array.isArray(recipient) ? recipient.join(', ') : recipient} + Subject: ${options.subject} + + --- TEXT CONTENT --- + ${options.text} + + --- HTML CONTENT --- + ${options.html} + `; + try { + await appendFile(debugPath, content); + console.log(`๐Ÿ“ Email logged to ${debugPath}`); + } catch (err) { + console.error('Failed to log email to file:', err); + } + } if (!process.env.RESEND_API_KEY || !resend || process.env.RESEND_API_KEY === 'DISABLED') { console.warn('RESEND_API_KEY not configured or disabled, email not sent'); @@ -22,6 +54,15 @@ export const sendEmail = async (options: { return; } + if (!recipient) { + if (isTestMode) { + console.warn('TESTING_EMAIL_ADDRESS not set. Skipping email sending (logged to file).'); + return; + } + console.error('No recipient defined for email.'); + return; + } + console.log(`[TEST MODE] Sending email to ${recipient} (Original: ${options.to})`); try { From 83b2f44df3e993c7c703958fed76bbc01bec758b Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Mon, 1 Dec 2025 16:14:40 -0500 Subject: [PATCH 011/327] Refactor Slack message processing to per-message basis Changed Slack integration to process each message individually instead of batching by user. This simplifies user resolution, index membership, and intent generation, improving reliability and granularity of processing. --- .../src/lib/integrations/providers/slack.ts | 147 ++++++------------ 1 file changed, 48 insertions(+), 99 deletions(-) diff --git a/protocol/src/lib/integrations/providers/slack.ts b/protocol/src/lib/integrations/providers/slack.ts index a57fc594a..f396d0ddb 100644 --- a/protocol/src/lib/integrations/providers/slack.ts +++ b/protocol/src/lib/integrations/providers/slack.ts @@ -117,7 +117,7 @@ interface SlackApiResponse { /** * Initialize Slack integration sync. - * Fetches messages, processes per user, resolves users, adds to index, and queues intent generation. + * Fetches messages, processes each message individually, resolves users, adds to index, and queues intent generation. */ export async function initSlack( integrationId: string, @@ -400,9 +400,9 @@ export async function initSlack( }); } - // Process this page of messages per user - if (pageMessages.length > 0) { - const result = await processMessagesPerUser(pageMessages, integration.id, integration.indexId!); + // Process each message individually + for (const message of pageMessages) { + const result = await processMessage(message, integration.id, integration.indexId!); totalIntentsGenerated += result.intentsGenerated; totalUsersProcessed += result.usersProcessed; totalNewUsersCreated += result.newUsersCreated; @@ -456,112 +456,61 @@ export async function initSlack( } /** - * Process messages per user - extract users, resolve them, add to index, queue intents + * Process a single message - resolve user, add to index, queue intent generation */ -async function processMessagesPerUser( - messages: SlackMessage[], +async function processMessage( + message: SlackMessage, integrationId: string, indexId: string ): Promise<{ intentsGenerated: number; usersProcessed: number; newUsersCreated: number }> { - // Extract unique users from messages - const userMap = new Map(); + if (!message.user_profile) { + return { intentsGenerated: 0, usersProcessed: 0, newUsersCreated: 0 }; + } - for (const message of messages) { - if (!message.user_profile) continue; - const slackUserId = message.user; - if (userMap.has(slackUserId)) continue; - - userMap.set(slackUserId, { + try { + // Resolve user + const resolvedUser = await resolveIntegrationUser({ email: message.user_profile.email, + providerId: message.user, name: message.user_profile.name, + provider: 'slack', avatar: message.user_profile.avatar, - providerId: slackUserId + updateEmptyFields: true }); - } - - if (userMap.size === 0) { - return { intentsGenerated: 0, usersProcessed: 0, newUsersCreated: 0 }; - } - - // Resolve each user and add to index - const resolvedUsers = new Map(); - let newUsersCreated = 0; - - for (const [providerId, userInfo] of userMap.entries()) { - try { - const resolvedUser = await resolveIntegrationUser({ - email: userInfo.email, - providerId, - name: userInfo.name, - provider: 'slack', - avatar: userInfo.avatar, - updateEmptyFields: true - }); - - if (!resolvedUser) { - log.error('Failed to resolve user', { providerId, email: userInfo.email }); - continue; - } - - await ensureIndexMembership(resolvedUser.id, indexId); - - if (resolvedUser.isNewUser) { - newUsersCreated++; - } - - resolvedUsers.set(providerId, resolvedUser); - } catch (error) { - log.error('Failed to resolve user', { - providerId, - error: error instanceof Error ? error.message : String(error) - }); - } - } - - if (resolvedUsers.size === 0) { - return { intentsGenerated: 0, usersProcessed: 0, newUsersCreated }; - } - - // Queue intent generation per user - let createdAt: Date | undefined; - if (messages.length > 0 && messages[0]?.metadata?.createdAt) { - createdAt = messages[0].metadata.createdAt; - } - - let intentsGenerated = 0; - for (const [providerId, user] of resolvedUsers.entries()) { - try { - // Filter messages to only include messages from this user - const userMessages = messages.filter(msg => msg.user === providerId); - - if (userMessages.length === 0) { - continue; // Skip if no messages for this user - } - - await addGenerateIntentsJob({ - userId: user.id, - sourceId: integrationId, - sourceType: 'integration', - objects: userMessages, - instruction: `Generate intents based on Slack messages`, - indexId, - intentCount: MAX_INTENTS_PER_USER, - ...(createdAt && { createdAt }) - }, 6); - intentsGenerated++; - } catch (error) { - log.error('Failed to queue intent generation', { - userId: user.id, - error: error instanceof Error ? error.message : String(error) - }); + + if (!resolvedUser) { + log.error('Failed to resolve user', { providerId: message.user, email: message.user_profile.email }); + return { intentsGenerated: 0, usersProcessed: 0, newUsersCreated: 0 }; } + + await ensureIndexMembership(resolvedUser.id, indexId); + + const newUsersCreated = resolvedUser.isNewUser ? 1 : 0; + + // Queue intent generation for this message + await addGenerateIntentsJob({ + userId: resolvedUser.id, + sourceId: integrationId, + sourceType: 'integration', + objects: [message], + instruction: `Generate intents based on Slack messages`, + indexId, + intentCount: MAX_INTENTS_PER_USER, + ...(message.metadata?.createdAt && { createdAt: message.metadata.createdAt }) + }, 6); + + return { + intentsGenerated: 1, + usersProcessed: 1, + newUsersCreated + }; + } catch (error) { + log.error('Failed to process message', { + providerId: message.user, + error: error instanceof Error ? error.message : String(error) + }); + return { intentsGenerated: 0, usersProcessed: 0, newUsersCreated: 0 }; } - - return { - intentsGenerated, - usersProcessed: resolvedUsers.size, - newUsersCreated - }; } /** From 39f161a1d1592783689b8c84053a114007f811e3 Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Mon, 1 Dec 2025 21:25:03 -0500 Subject: [PATCH 012/327] Refactor discover pagination and reduce inbox limit Reduced the inbox discover API limit from 50 to 25 for improved performance. Refactored discoverUsers to apply pagination after sorting by totalStake, ensuring correct result ordering and pagination logic. Removed unnecessary console logs from BaseContextBroker for cleaner output. --- frontend/src/app/inbox/page.tsx | 2 +- protocol/src/agents/context_brokers/base.ts | 2 -- protocol/src/lib/discover.ts | 26 ++++++++++----------- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/frontend/src/app/inbox/page.tsx b/frontend/src/app/inbox/page.tsx index 6f0030f90..2066cde69 100644 --- a/frontend/src/app/inbox/page.tsx +++ b/frontend/src/app/inbox/page.tsx @@ -103,7 +103,7 @@ export default function InboxPage() { indexIds: apiIndexIds, intentIds: apiIntentIds, excludeDiscovered: true, - limit: 50 + limit: 25 }); // Transform discover data diff --git a/protocol/src/agents/context_brokers/base.ts b/protocol/src/agents/context_brokers/base.ts index 9dc2b4f11..0f09fd5ed 100644 --- a/protocol/src/agents/context_brokers/base.ts +++ b/protocol/src/agents/context_brokers/base.ts @@ -133,7 +133,6 @@ export abstract class BaseContextBroker { .limit(10); console.log(`Found ${similarIntents.length} similar intents using vector search`); - console.log(similarIntents); // Filter by similarity threshold (equivalent to 0.7 LLM score) const relatedIntents = similarIntents @@ -149,7 +148,6 @@ export abstract class BaseContextBroker { score: intent.similarity })); - console.log('Related intents (vector similarity):', relatedIntents.length); return relatedIntents; } catch (error) { diff --git a/protocol/src/lib/discover.ts b/protocol/src/lib/discover.ts index ff20dd265..84f86a35e 100644 --- a/protocol/src/lib/discover.ts +++ b/protocol/src/lib/discover.ts @@ -190,10 +190,7 @@ export async function discoverUsers(filters: DiscoverFilters): Promise<{ ) ) // Group by stake to get all intents for each stake - .groupBy(intentStakes.id, intentStakes.stake, intentStakes.reasoning) - // Add pagination - .limit(limit) - .offset((page - 1) * limit); + .groupBy(intentStakes.id, intentStakes.stake, intentStakes.reasoning); // This query finds stakes with the authenticated user's intents // Then we process them to find discovered users @@ -202,7 +199,7 @@ export async function discoverUsers(filters: DiscoverFilters): Promise<{ // - Stakes that contain intents from other users (discovered users) // - Index coherence (all intents in stake exist in same index) // - Groups by stake to get all intents per stake - // - Includes pagination + // Note: Pagination is applied as post-filter after sorting by totalStake const results = await mainQuery; @@ -352,20 +349,21 @@ export async function discoverUsers(filters: DiscoverFilters): Promise<{ }; }); - // Sort by bucket first (newer first), then by stake within bucket - bucketedResults.sort((a, b) => { - // First sort by bucket (newer first) - if (a.bucket !== b.bucket) return a.bucket - b.bucket; - // Then by stake within bucket - return b.totalStake - a.totalStake; - }); + // Sort by totalStake descending + bucketedResults.sort((a, b) => b.totalStake - a.totalStake); + + // Apply pagination as post-filter + const startIndex = (page - 1) * limit; + const endIndex = startIndex + limit; + const paginatedResults = bucketedResults.slice(startIndex, endIndex); + const totalResults = bucketedResults.length; return { - results: bucketedResults, + results: paginatedResults, pagination: { page, limit, - hasNext: results.length === limit, + hasNext: endIndex < totalResults, hasPrev: page > 1 } }; From 133bafd8393007838288f97cc13aef7c72a5e502 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yank=C4=B1=20Ekin=20Y=C3=BCksel?= Date: Tue, 2 Dec 2025 15:48:50 +0300 Subject: [PATCH 013/327] fix: prevent duplicate matches in weekly newsletter, refine role display, and update connection request email link styling. --- protocol/src/lib/email/templates/connection-request.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/protocol/src/lib/email/templates/connection-request.ts b/protocol/src/lib/email/templates/connection-request.ts index 772ede8bf..9f8674c03 100644 --- a/protocol/src/lib/email/templates/connection-request.ts +++ b/protocol/src/lib/email/templates/connection-request.ts @@ -6,7 +6,8 @@ export const connectionRequestTemplate = (fromUserName: string, toUserName: stri

Youโ€™ve got a new connection request on Index, ${fromUserName} wants to connect with you.

${synthesis ? ` From ceb25d651083d0602cfc768507e84c862a4448b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yank=C4=B1=20Ekin=20Y=C3=BCksel?= Date: Tue, 2 Dec 2025 15:49:27 +0300 Subject: [PATCH 014/327] refactor: Improve discover filter route error handling and refine weekly newsletter match deduplication and user role display. --- protocol/src/routes/discover.ts | 89 ++++++++++++++++----------------- 1 file changed, 44 insertions(+), 45 deletions(-) diff --git a/protocol/src/routes/discover.ts b/protocol/src/routes/discover.ts index 02b0ea075..35d6de71b 100644 --- a/protocol/src/routes/discover.ts +++ b/protocol/src/routes/discover.ts @@ -13,9 +13,8 @@ import { getUploadsPath } from '../lib/paths'; import { processUploadedFiles } from '../lib/uploads'; import { crawlLinksForIndex } from '../lib/crawl/web_crawler'; import { analyzeObjects } from '../agents/core/intent_inferrer'; -import { IntentService } from '../lib/intent-service'; -import { createUploadClient, cleanupUploadedFiles } from '../lib/uploads'; -import { DiscoverResponse, DiscoverFilters, DiscoveryRequestResponse } from '../types'; +import { CreatedIntent, IntentService } from '../lib/intent-service'; +import { createUploadClient } from '../lib/uploads'; const router = Router(); @@ -61,7 +60,7 @@ router.post('/new', [body('payload').optional().isString()], async (req: AuthRequest, res: Response) => { const uploadedFiles = req.files as Express.Multer.File[]; - + try { const errors = validationResult(req); if (!errors.isEmpty()) { @@ -84,7 +83,7 @@ router.post('/new', const savedFileIds: string[] = []; const savedLinkIds: string[] = []; let combinedContent = ''; - + // 1. Save uploaded files to database if (uploadedFiles && uploadedFiles.length > 0) { for (const file of uploadedFiles) { @@ -125,7 +124,7 @@ router.post('/new', for (const url of urls) { const linkReference = `[LINK_${linkCounter}]`; instructionText = instructionText.replace(url, linkReference); - + try { // Save link to database const linkRecord = await db.insert(indexLinks) @@ -139,20 +138,20 @@ router.post('/new', try { const crawlResult = await crawlLinksForIndex([url]); const crawledFiles = crawlResult.files || []; - + if (crawledFiles.length > 0 && crawledFiles[0].content) { // Save crawled content to file const linksDir = getUploadsPath('links', userId); if (!fs.existsSync(linksDir)) fs.mkdirSync(linksDir, { recursive: true }); const filepath = path.join(linksDir, `${linkRecord[0].id}.md`); await fs.promises.writeFile(filepath, crawledFiles[0].content); - + combinedContent += `Content of ${linkReference}\n${crawledFiles[0].content.substring(0, 5000)}\n\n`; - + await db.update(indexLinks) .set({ lastSyncAt: new Date(), lastStatus: 'ok', lastError: null }) .where(eq(indexLinks.id, linkRecord[0].id)); - + console.log(`โœ… URL crawled successfully: ${url}`); } else { await db.update(indexLinks) @@ -180,16 +179,16 @@ router.post('/new', // 4. Generate intents from combined content let generatedIntents: any[] = []; - + // If payload is short, no files, and no URLs, create intent directly const hasFiles = uploadedFiles && uploadedFiles.length > 0; const hasUrls = savedLinkIds.length > 0; const isShortPayload = payload && payload.length < 100; - + if (isShortPayload && !hasFiles && !hasUrls) { console.log(`๐Ÿ“ Creating intent directly (short payload, no attachments/URLs)`); try { - const createdIntent = await IntentService.createIntent({ + const createdIntent: CreatedIntent = await IntentService.createIntent({ payload: payload.trim(), userId: userId, sourceId: undefined, @@ -206,7 +205,7 @@ router.post('/new', } } else if (combinedContent.trim()) { console.log(`๐Ÿค– Generating intents from ${combinedContent.length} characters`); - + // Create objects for intent generation const contentObjects = []; if (combinedContent) { @@ -220,10 +219,10 @@ router.post('/new', 1, // generate 1 intent 60000 // 60 second timeout ); - + if (intentResult.success && intentResult.intents.length > 0) { console.log(`โœ… Generated ${intentResult.intents.length} intents`); - + // Save each generated intent to database using IntentService for (const generatedIntent of intentResult.intents) { // Determine source: use first file if exists, otherwise first link @@ -326,7 +325,7 @@ Response:{ */ // ๐Ÿš€ Route: Get paired users' staked intents -router.post("/filter", +router.post("/filter", authenticatePrivy, [ body('intentIds').optional().isArray(), @@ -362,34 +361,34 @@ router.post("/filter", const authenticatedUserId = req.user!.id; - // Use the library function to discover users - const { results: formattedResults, pagination } = await discoverUsers({ - authenticatedUserId, - intentIds, - userIds, - indexIds, - sources, - excludeDiscovered, - page, - limit - }); - - return res.json({ - results: formattedResults, - pagination, - filters: { - intentIds: intentIds || null, - userIds: userIds || null, - indexIds: indexIds || null, - sources: sources || null, - excludeDiscovered: excludeDiscovered - } - }); - } catch (err) { - console.error("Discover filter error:", err); - return res.status(500).json({ error: "Failed to fetch discovery data" }); - } -}); + // Use the library function to discover users + const { results: formattedResults, pagination } = await discoverUsers({ + authenticatedUserId, + intentIds, + userIds, + indexIds, + sources, + excludeDiscovered, + page, + limit + }); + + return res.json({ + results: formattedResults, + pagination, + filters: { + intentIds: intentIds || null, + userIds: userIds || null, + indexIds: indexIds || null, + sources: sources || null, + excludeDiscovered: excludeDiscovered + } + }); + } catch (err) { + console.error("Discover filter error:", err); + return res.status(500).json({ error: "Failed to fetch discovery data" }); + } + }); From 7ca119c84eec57d332b2107aff61d3f39fd3a261 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yank=C4=B1=20Ekin=20Y=C3=BCksel?= Date: Tue, 2 Dec 2025 15:52:52 +0300 Subject: [PATCH 015/327] feat: prevent duplicate matches, improve user role display, and add an email testing control for the weekly newsletter job. --- protocol/src/jobs/weekly-newsletter.ts | 37 +++++++++++++++----------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/protocol/src/jobs/weekly-newsletter.ts b/protocol/src/jobs/weekly-newsletter.ts index d3d973dae..81a68a325 100644 --- a/protocol/src/jobs/weekly-newsletter.ts +++ b/protocol/src/jobs/weekly-newsletter.ts @@ -53,7 +53,6 @@ async function hasConnectionEvent(user1Id: string, user2Id: string) { export async function sendWeeklyNewsletter(now: Date = new Date()) { console.log('Starting weekly newsletter job...'); - try { // Optimization: Only run if it's possible to be Monday 9 AM anywhere on Earth // Window: Sunday 19:00 UTC (UTC+14) to Monday 21:00 UTC (UTC-12) @@ -78,7 +77,7 @@ export async function sendWeeklyNewsletter(now: Date = new Date()) { console.log(`Found ${recentStakes.length} stakes from the last 7 days.`); - const userMatches = new Map(); + const userMatches = new Map }>(); for (const stake of recentStakes) { const participants = await getUsersForStake(stake.id, stake.intents); @@ -96,23 +95,31 @@ export async function sendWeeklyNewsletter(now: Date = new Date()) { // 3. Add to user matches // For P1, match is P2 if (!userMatches.has(p1.userId)) { - userMatches.set(p1.userId, { user: p1, matches: [] }); + userMatches.set(p1.userId, { user: p1, matches: [], matchedUserIds: new Set() }); + } + const p1Data = userMatches.get(p1.userId)!; + if (!p1Data.matchedUserIds.has(p2.userId)) { + p1Data.matches.push({ + name: p2.userName, + role: p2.userRole || 'Index User', // Removed aggressive truncation + reasoning: stake.reasoning + }); + p1Data.matchedUserIds.add(p2.userId); } - userMatches.get(p1.userId)!.matches.push({ - name: p2.userName, - role: p2.userRole?.split('\n')[0].substring(0, 50) || 'Index User', // Simple truncation for role - reasoning: stake.reasoning - }); // For P2, match is P1 if (!userMatches.has(p2.userId)) { - userMatches.set(p2.userId, { user: p2, matches: [] }); + userMatches.set(p2.userId, { user: p2, matches: [], matchedUserIds: new Set() }); + } + const p2Data = userMatches.get(p2.userId)!; + if (!p2Data.matchedUserIds.has(p1.userId)) { + p2Data.matches.push({ + name: p1.userName, + role: p1.userRole || 'Index User', // Removed aggressive truncation + reasoning: stake.reasoning + }); + p2Data.matchedUserIds.add(p1.userId); } - userMatches.get(p2.userId)!.matches.push({ - name: p1.userName, - role: p1.userRole?.split('\n')[0].substring(0, 50) || 'Index User', - reasoning: stake.reasoning - }); } // 4. Send emails @@ -145,7 +152,7 @@ export async function sendWeeklyNewsletter(now: Date = new Date()) { const template = weeklyNewsletterTemplate(data.user.userName, data.matches); - if (process.env.NODE_ENV === 'development') { + if (process.env.NODE_ENV === 'development' && process.env.ENABLE_EMAIL_TESTING !== 'true') { console.log(`[DEV] Would send email to ${data.user.userEmail}:`); console.log(`Subject: ${template.subject}`); console.log(`Body preview: ${template.text.substring(0, 200)}...`); From 6c990e065d05f40d3717146316625677c881a719 Mon Sep 17 00:00:00 2001 From: seref <1573640+serefyarar@users.noreply.github.com> Date: Tue, 2 Dec 2025 15:52:12 -0500 Subject: [PATCH 016/327] Remove protocol connections doc and hide 'Create Index' button Deleted the FRONTEND_PROTOCOL_CONNECTIONS.md documentation file. In the Header component, commented out the 'Create Index' button to hide it from the dropdown menu. --- FRONTEND_PROTOCOL_CONNECTIONS.md | 892 ----------------------------- frontend/src/components/Header.tsx | 4 +- 2 files changed, 2 insertions(+), 894 deletions(-) delete mode 100644 FRONTEND_PROTOCOL_CONNECTIONS.md diff --git a/FRONTEND_PROTOCOL_CONNECTIONS.md b/FRONTEND_PROTOCOL_CONNECTIONS.md deleted file mode 100644 index 66f1ba901..000000000 --- a/FRONTEND_PROTOCOL_CONNECTIONS.md +++ /dev/null @@ -1,892 +0,0 @@ -# Frontend-Protocol Connections Documentation - -This document maps all connections between frontend services and protocol API routes, including endpoints, request/response types, and shared domain models. - -**Analysis Date:** November 26, 2025 -**Frontend Services Analyzed:** 7 -**Protocol Routes Analyzed:** 15 -**Total Connections Identified:** 58 - ---- - -## Table of Contents - -1. [Admin Service](#1-admin-service) -2. [Connections Service](#2-connections-service) -3. [Indexes Service](#3-indexes-service) -4. [Integrations Service](#4-integrations-service) -5. [Links Service](#5-links-service) -6. [LMSR Service](#6-lmsr-service) -7. [Synthesis Service](#7-synthesis-service) -8. [Shared Type Definitions](#shared-type-definitions) -9. [Type Mismatches & Issues](#type-mismatches--issues) - ---- - -## 1. Admin Service - -**Frontend File:** [`frontend/src/services/admin.ts`](frontend/src/services/admin.ts:1) -**Protocol File:** [`protocol/src/routes/admin.ts`](protocol/src/routes/admin.ts:1) - -### 1.1 Get Pending Connections - -**Frontend Method:** [`getPendingConnections(indexId: string)`](frontend/src/services/admin.ts:4) -**Protocol Endpoint:** `GET /admin/:indexId/pending-connections` -**Protocol Handler:** [`Line 12-128`](protocol/src/routes/admin.ts:12) - -**Request:** -- Path Parameter: `indexId` (UUID) - -**Response Type (Frontend):** -```typescript -{ - connections: Array<{ - id: string; - initiator: { - id: string; - name: string; - avatar: string | null; - }; - receiver: { - id: string; - name: string; - avatar: string | null; - }; - createdAt: string; - }>; -} -``` - -**Response Type (Protocol):** -```typescript -{ - connections: Array<{ - id: string; - initiator: { id: string; name: string; avatar: string }; - receiver: { id: string; name: string; avatar: string }; - createdAt: Date; - }>; -} -``` - -### 1.2 Approve Connection - -**Frontend Method:** [`approveConnection(indexId: string, initiatorUserId: string, receiverUserId: string)`](frontend/src/services/admin.ts:24) -**Protocol Endpoint:** `POST /admin/:indexId/approve-connection` -**Protocol Handler:** [`Line 132-211`](protocol/src/routes/admin.ts:132) - -**Request:** -- Path Parameter: `indexId` (UUID) -- Body: `{ initiatorUserId: string, receiverUserId: string }` - -**Response Type (Frontend & Protocol):** -```typescript -{ - message: string; - event: { - id: string; - initiatorUserId: string; - receiverUserId: string; - eventType: string; - createdAt: string; - }; -} -``` - -### 1.3 Deny Connection - -**Frontend Method:** [`denyConnection(indexId: string, initiatorUserId: string, receiverUserId: string)`](frontend/src/services/admin.ts:41) -**Protocol Endpoint:** `POST /admin/:indexId/deny-connection` -**Protocol Handler:** [`Line 215-294`](protocol/src/routes/admin.ts:215) - -**Request:** -- Path Parameter: `indexId` (UUID) -- Body: `{ initiatorUserId: string, receiverUserId: string }` - -**Response Type:** Same as Approve Connection - -### 1.4 Get Pending Count - -**Frontend Method:** [`getPendingCount(indexId: string)`](frontend/src/services/admin.ts:58) -**Protocol Endpoint:** `GET /admin/:indexId/pending-count` -**Protocol Handler:** [`Line 298-382`](protocol/src/routes/admin.ts:298) - -**Request:** -- Path Parameter: `indexId` (UUID) - -**Response Type (Frontend & Protocol):** -```typescript -{ - count: number; -} -``` - ---- - -## 2. Connections Service - -**Frontend File:** [`frontend/src/services/connections.ts`](frontend/src/services/connections.ts:1) -**Protocol File:** [`protocol/src/routes/connections.ts`](protocol/src/routes/connections.ts:1) - -### 2.1 Get Connections by User - -**Frontend Method:** [`getConnectionsByUser(type: 'inbox' | 'pending' | 'history', indexIds?: string[])`](frontend/src/services/connections.ts:9) -**Protocol Endpoint:** `POST /connections/by-user` -**Protocol Handler:** [`Line 14-148`](protocol/src/routes/connections.ts:14) - -**Request:** -```typescript -{ - type: 'inbox' | 'pending' | 'history'; - indexIds?: string[]; -} -``` - -**Response Type (Frontend - defined in types.ts):** -```typescript -// ConnectionsByUserResponse -{ - connections: Array<{ - user: { - id: string; - name: string; - avatar: string | null; - }; - status: string; - isInitiator: boolean; - lastUpdated: string; - }>; -} -``` - -### 2.2 Connection Actions - -**Frontend Methods:** -- [`requestConnection(targetUserId: string)`](frontend/src/services/connections.ts:22) -- [`skipConnection(targetUserId: string)`](frontend/src/services/connections.ts:29) -- [`acceptConnection(targetUserId: string)`](frontend/src/services/connections.ts:36) -- [`declineConnection(targetUserId: string)`](frontend/src/services/connections.ts:43) -- [`cancelConnection(targetUserId: string)`](frontend/src/services/connections.ts:50) - -**Protocol Endpoint:** `POST /connections/actions` -**Protocol Handler:** [`Line 152-277`](protocol/src/routes/connections.ts:152) - -**Request:** -```typescript -{ - targetUserId: string; - action: 'REQUEST' | 'SKIP' | 'CANCEL' | 'ACCEPT' | 'DECLINE'; -} -``` - -**Response Type (Frontend - defined in types.ts):** -```typescript -// ConnectionEvent -{ - event: { - id: string; - initiatorUserId: string; - receiverUserId: string; - eventType: string; - createdAt: string; - }; -} -``` - ---- - -## 3. Indexes Service - -**Frontend File:** [`frontend/src/services/indexes.ts`](frontend/src/services/indexes.ts:1) -**Protocol File:** [`protocol/src/routes/indexes.ts`](protocol/src/routes/indexes.ts:1) - -### 3.1 Get Indexes - -**Frontend Method:** [`getIndexes(page: number, limit: number)`](frontend/src/services/indexes.ts:34) -**Protocol Endpoint:** `GET /indexes?page=&limit=` -**Protocol Handler:** [`Line 134-253`](protocol/src/routes/indexes.ts:134) - -**Response Type (Frontend - uses PaginatedResponse):** -```typescript -PaginatedResponse -``` - -### 3.2 Discover Public Indexes - -**Frontend Method:** [`discoverPublicIndexes(page: number, limit: number)`](frontend/src/services/indexes.ts:40) -**Protocol Endpoint:** `GET /indexes/discover/public?page=&limit=` -**Protocol Handler:** [`Line 29-130`](protocol/src/routes/indexes.ts:29) - -**Response Type:** -```typescript -PaginatedResponse -``` - -### 3.3 Get Index - -**Frontend Method:** [`getIndex(id: string)`](frontend/src/services/indexes.ts:46) -**Protocol Endpoint:** `GET /indexes/:id` -**Protocol Handler:** [`Line 306-392`](protocol/src/routes/indexes.ts:306) - -**Response Type:** -```typescript -{ - index: Index; -} -``` - -### 3.4 Get Index by Share Code - -**Frontend Method:** [`getIndexByShareCode(code: string)`](frontend/src/services/indexes.ts:55) -**Protocol Endpoint:** `GET /indexes/share/:code` -**Protocol Handler:** [`Line 1281-1339`](protocol/src/routes/indexes.ts:1281) - -### 3.5 Get Public Index by ID - -**Frontend Method:** [`getPublicIndexById(id: string)`](frontend/src/services/indexes.ts:64) -**Protocol Endpoint:** `GET /indexes/public/:id` -**Protocol Handler:** [`Line 1222-1277`](protocol/src/routes/indexes.ts:1222) - -### 3.6 Create Index - -**Frontend Method:** [`createIndex(data: CreateIndexRequest)`](frontend/src/services/indexes.ts:73) -**Protocol Endpoint:** `POST /indexes` -**Protocol Handler:** [`Line 396-475`](protocol/src/routes/indexes.ts:396) - -**Request Type:** -```typescript -CreateIndexRequest { - title: string; - prompt?: string; - joinPolicy?: 'anyone' | 'invite_only'; -} -``` - -### 3.7 Update Index - -**Frontend Method:** [`updateIndex(id: string, data: UpdateIndexRequest)`](frontend/src/services/indexes.ts:82) -**Protocol Endpoint:** `PUT /indexes/:id` -**Protocol Handler:** [`Line 479-569`](protocol/src/routes/indexes.ts:479) - -**Request Type:** -```typescript -UpdateIndexRequest { - title?: string; - prompt?: string; - permissions?: { - joinPolicy?: 'anyone' | 'invite_only'; - allowGuestVibeCheck?: boolean; - }; -} -``` - -### 3.8 Delete Index - -**Frontend Method:** [`deleteIndex(id: string)`](frontend/src/services/indexes.ts:91) -**Protocol Endpoint:** `DELETE /indexes/:id` -**Protocol Handler:** [`Line 574-602`](protocol/src/routes/indexes.ts:574) - -### 3.9 Member Management - -**Frontend Methods:** -- [`addMember(indexId: string, userId: string, permissions: string[])`](frontend/src/services/indexes.ts:97) -- [`removeMember(indexId: string, userId: string)`](frontend/src/services/indexes.ts:109) -- [`updateMemberPermissions(indexId: string, userId: string, permissions: string[])`](frontend/src/services/indexes.ts:114) -- [`getMembers(indexId: string, searchQuery?: string)`](frontend/src/services/indexes.ts:125) - -**Protocol Endpoints:** -- `POST /indexes/:id/members` [`Line 606-707`](protocol/src/routes/indexes.ts:606) -- `DELETE /indexes/:id/members/:userId` [`Line 711-756`](protocol/src/routes/indexes.ts:711) -- `PATCH /indexes/:id/members/:userId` [`Line 801-901`](protocol/src/routes/indexes.ts:801) -- `GET /indexes/:id/members` [`Line 1068-1113`](protocol/src/routes/indexes.ts:1068) - -**Member Type:** -```typescript -{ - id: string; - name: string; - email: string; - avatar?: string; - permissions: string[]; - createdAt?: string; - updatedAt?: string; -} -``` - -### 3.10 Permissions & Access - -**Frontend Methods:** -- [`updatePermissions(indexId: string, permissions)`](frontend/src/services/indexes.ts:137) -- [`regenerateInvitationLink(indexId: string)`](frontend/src/services/indexes.ts:146) -- [`searchUsers(query: string, indexId?: string)`](frontend/src/services/indexes.ts:156) -- [`joinIndex(indexId: string)`](frontend/src/services/indexes.ts:166) -- [`acceptInvitation(code: string)`](frontend/src/services/indexes.ts:181) - -**Protocol Endpoints:** -- `PATCH /indexes/:id/permissions` [`Line 905-993`](protocol/src/routes/indexes.ts:905) -- `PATCH /indexes/:id/regenerate-invitation` [`Line 997-1064`](protocol/src/routes/indexes.ts:997) -- `GET /indexes/search-users` [`Line 257-302`](protocol/src/routes/indexes.ts:257) -- `POST /indexes/:id/join` [`Line 1343-1434`](protocol/src/routes/indexes.ts:1343) -- `POST /indexes/invitation/:code/accept` [`Line 1439-1550`](protocol/src/routes/indexes.ts:1439) - -### 3.11 Member Intents - -**Frontend Methods:** -- [`getMemberIntents(indexId: string)`](frontend/src/services/indexes.ts:197) -- [`removeMemberIntent(indexId: string, intentId: string)`](frontend/src/services/indexes.ts:223) - -**Protocol Endpoints:** -- `GET /indexes/:id/member-intents` [`Line 1726-1764`](protocol/src/routes/indexes.ts:1726) -- `DELETE /indexes/:id/member-intents/:intentId` [`Line 1913-1962`](protocol/src/routes/indexes.ts:1913) - ---- - -## 4. Integrations Service - -**Frontend File:** [`frontend/src/services/integrations.ts`](frontend/src/services/integrations.ts:1) -**Protocol File:** [`protocol/src/routes/integrations.ts`](protocol/src/routes/integrations.ts:1) - -### 4.1 Get Integrations - -**Frontend Method:** [`getIntegrations(indexId?: string)`](frontend/src/services/integrations.ts:67) -**Protocol Endpoint:** `GET /integrations?indexId=` -**Protocol Handler:** [`Line 30-108`](protocol/src/routes/integrations.ts:30) - -**Response Type:** -```typescript -{ - integrations: IntegrationResponse[]; - availableTypes: AvailableIntegrationType[]; -} - -// IntegrationResponse -{ - id: string; - type: string; - name: string; - connected: boolean; - connectedAt?: string | null; - lastSyncAt?: string | null; - indexId?: string | null; - status?: string; -} - -// AvailableIntegrationType -{ - type: string; - name: string; - toolkit: string; -} -``` - -### 4.2 Connect Integration - -**Frontend Method:** [`connectIntegration(integrationType: string, data: ConnectIntegrationRequest)`](frontend/src/services/integrations.ts:79) -**Protocol Endpoint:** `POST /integrations/connect/:integrationType` -**Protocol Handler:** [`Line 112-246`](protocol/src/routes/integrations.ts:112) - -**Request Type:** -```typescript -ConnectIntegrationRequest { - indexId?: string; - enableUserAttribution?: boolean; -} -``` - -**Response Type:** -```typescript -ConnectIntegrationResponse { - redirectUrl: string; - integrationId: string; -} -``` - -### 4.3 Integration Status - -**Frontend Method:** [`getIntegrationStatus(integrationId: string)`](frontend/src/services/integrations.ts:87) -**Protocol Endpoint:** `GET /integrations/:integrationId/status` -**Protocol Handler:** [`Line 250-342`](protocol/src/routes/integrations.ts:250) - -**Response Type:** -```typescript -IntegrationStatusResponse { - status: 'pending' | 'connected'; - connectedAt?: string; -} -``` - -### 4.4 Disconnect Integration - -**Frontend Method:** [`disconnectIntegration(integrationId: string)`](frontend/src/services/integrations.ts:92) -**Protocol Endpoint:** `DELETE /integrations/:integrationId` -**Protocol Handler:** [`Line 346-413`](protocol/src/routes/integrations.ts:346) - -### 4.5 Directory Sync - -**Frontend Methods:** -- [`getDirectorySources(integrationId: string)`](frontend/src/services/integrations.ts:97) -- [`getDirectorySourceSchema(integrationId: string, sourceId: string, subSourceId?: string)`](frontend/src/services/integrations.ts:101) -- [`getDirectoryConfig(integrationId: string)`](frontend/src/services/integrations.ts:106) -- [`saveDirectoryConfig(integrationId: string, config)`](frontend/src/services/integrations.ts:110) -- [`syncDirectory(integrationId: string)`](frontend/src/services/integrations.ts:114) - -**Protocol Endpoints:** -- `GET /integrations/:integrationId/directory/sources` [`Line 419-475`](protocol/src/routes/integrations.ts:419) -- `GET /integrations/:integrationId/directory/sources/:sourceId/schema` [`Line 479-539`](protocol/src/routes/integrations.ts:479) -- `GET /integrations/:integrationId/directory/config` [`Line 543-582`](protocol/src/routes/integrations.ts:543) -- `POST /integrations/:integrationId/directory/config` [`Line 586-655`](protocol/src/routes/integrations.ts:586) -- `POST /integrations/:integrationId/directory/sync` [`Line 659-727`](protocol/src/routes/integrations.ts:659) - -**Directory Sync Types:** -```typescript -DirectorySyncConfig { - enabled: boolean; - source: { - id: string; - name: string; - subId?: string; - subName?: string; - }; - columnMappings: { - email: string; - name?: string; - intro?: string; - location?: string; - twitter?: string; - linkedin?: string; - github?: string; - website?: string; - }; - lastSyncAt?: string; - lastSyncStatus?: 'success' | 'error' | 'partial'; - lastSyncError?: string; - memberCount?: number; -} -``` - ---- - -## 5. Links Service - -**Frontend File:** [`frontend/src/services/links.ts`](frontend/src/services/links.ts:1) -**Protocol File:** [`protocol/src/routes/links.ts`](protocol/src/routes/links.ts:1) - -### 5.1 Get Links - -**Frontend Method:** [`getLinks()`](frontend/src/services/links.ts:24) -**Protocol Endpoint:** `GET /links` -**Protocol Handler:** [`Line 65-74`](protocol/src/routes/links.ts:65) - -**Response Type:** -```typescript -{ - links: LinkRecord[]; -} - -// LinkRecord -{ - id: string; - url: string; - createdAt?: string; - lastSyncAt?: string | null; - lastStatus?: string | null; - lastError?: string | null; - contentUrl?: string; -} -``` - -### 5.2 Create Link - -**Frontend Method:** [`createLink(url: string)`](frontend/src/services/links.ts:30) -**Protocol Endpoint:** `POST /links` -**Protocol Handler:** [`Line 78-103`](protocol/src/routes/links.ts:78) - -**Request:** -```typescript -{ - url: string; -} -``` - -**Response Type:** -```typescript -{ - link: LinkRecord; -} -``` - -### 5.3 Delete Link - -**Frontend Method:** [`deleteLink(linkId: string)`](frontend/src/services/links.ts:36) -**Protocol Endpoint:** `DELETE /links/:linkId` -**Protocol Handler:** [`Line 107-119`](protocol/src/routes/links.ts:107) - -### 5.4 Get Link Content - -**Frontend Method:** [`getLinkContent(linkId: string)`](frontend/src/services/links.ts:41) -**Protocol Endpoint:** `GET /links/:linkId/content` -**Protocol Handler:** [`Line 122-141`](protocol/src/routes/links.ts:122) - -**Response Type:** -```typescript -LinkContentResponse { - content?: string; - pending?: boolean; - url?: string; - lastStatus?: string | null; - lastSyncAt?: string | null; -} -``` - ---- - -## 6. LMSR Service - -**Frontend File:** [`frontend/src/services/lmsr.ts`](frontend/src/services/lmsr.ts:1) -**Protocol File:** None (Frontend-only simulation) - -This service implements client-side LMSR (Logarithmic Market Scoring Rule) market calculations for the simulation/demo feature. It does not make any API calls to the protocol. - -**Key Types:** -```typescript -MarketState { - intentPairId: string; - q: number; - price: number; - liquidity: number; - volume: number; - yesShares: number; - noShares: number; -} - -MarketAction { - type: 'BUY' | 'SELL'; - amount: number; - agentId: string; - confidence: number; - outcome: 'YES' | 'NO'; -} - -Agent { - id: string; - name: string; - description: string; - icon: React.ReactNode; - color: string; - target: string[]; - budget: number; - stakedAmount?: number; - position?: 'YES' | 'NO'; - stakedIn?: string; - triggers?: Array<{ - type: string; - condition: (result: SearchResult) => boolean; - }>; - audience?: string[]; -} -``` - ---- - -## 7. Synthesis Service - -**Frontend File:** [`frontend/src/services/synthesis.ts`](frontend/src/services/synthesis.ts:1) -**Protocol File:** [`protocol/src/routes/synthesis.ts`](protocol/src/routes/synthesis.ts:1) - -### 7.1 Generate VibeCheck - -**Frontend Method:** [`generateVibeCheck(request: SynthesisRequest)`](frontend/src/services/synthesis.ts:23) -**Protocol Endpoint:** `POST /synthesis/vibecheck` -**Protocol Handler:** [`Line 14-79`](protocol/src/routes/synthesis.ts:14) - -**Request Type:** -```typescript -SynthesisRequest { - targetUserId: string; - initiatorId?: string; - intentIds?: string[]; - indexIds?: string[]; - options?: { - characterLimit?: number; - [key: string]: unknown; - }; -} -``` - -**Response Type:** -```typescript -SynthesisResponse { - synthesis: string; - targetUserId: string; - contextUserId: string; - connectingStakes: number; -} -``` - ---- - -## Shared Type Definitions - -### Core Domain Models - -#### Index -**Defined in:** [`frontend/src/lib/types.ts`](frontend/src/lib/types.ts:1) - -```typescript -Index { - id: string; - title: string; - prompt?: string | null; - permissions?: { - joinPolicy?: 'anyone' | 'invite_only'; - invitationLink?: { code: string } | null; - allowGuestVibeCheck?: boolean; - requireApproval?: boolean; - }; - createdAt: string; - updatedAt: string; - user?: { - id: string; - name: string; - avatar?: string | null; - }; - _count?: { - members: number; - files?: number; - }; -} -``` - -#### ConnectionEvent -**Defined in:** [`frontend/src/lib/types.ts`](frontend/src/lib/types.ts:1) - -```typescript -ConnectionEvent { - id: string; - initiatorUserId: string; - receiverUserId: string; - eventType: 'REQUEST' | 'ACCEPT' | 'DECLINE' | 'SKIP' | 'CANCEL' | 'OWNER_APPROVE' | 'OWNER_DENY'; - createdAt: string; -} -``` - -#### PaginatedResponse -**Defined in:** [`frontend/src/lib/types.ts`](frontend/src/lib/types.ts:1) - -```typescript -PaginatedResponse { - [data]: T[]; - pagination: { - current: number; - total: number; - count: number; - totalCount: number; - }; -} -``` - -#### APIResponse -**Defined in:** [`frontend/src/lib/types.ts`](frontend/src/lib/types.ts:1) - -```typescript -APIResponse { - [key: string]: T | unknown; -} -``` - -### Request Types - -#### CreateIndexRequest -```typescript -{ - title: string; - prompt?: string; - joinPolicy?: 'anyone' | 'invite_only'; -} -``` - -#### UpdateIndexRequest -```typescript -{ - title?: string; - prompt?: string; - permissions?: { - joinPolicy?: 'anyone' | 'invite_only'; - allowGuestVibeCheck?: boolean; - }; -} -``` - ---- - -## Type Mismatches & Issues - -### 1. Date vs String Inconsistency - -**Issue:** Protocol returns `Date` objects for timestamps, but frontend expects `string`. - -**Affected Fields:** -- `createdAt`, `updatedAt`, `archivedAt`, `connectedAt`, `lastSyncAt` - -**Example:** -- **Protocol:** [`admin.ts:118`](protocol/src/routes/admin.ts:118) returns `createdAt: event.createdAt` (Date) -- **Frontend:** [`admin.ts:18`](frontend/src/services/admin.ts:18) expects `createdAt: string` - -**Recommendation:** Use JSON serialization which automatically converts Date to ISO string, or explicitly convert dates in protocol responses. - -### 2. Member Type Mismatch - -**Issue:** Frontend `Member` interface includes `email` field, but protocol routes don't always return it. - -**Frontend Definition:** [`indexes.ts:15`](frontend/src/services/indexes.ts:15) -```typescript -interface Member { - id: string; - name: string; - email: string; // โš ๏ธ Not always returned by protocol - avatar?: string; - permissions: string[]; -} -``` - -**Protocol:** [`indexes.ts:1095-1106`](protocol/src/routes/indexes.ts:1095) returns members without email field. - -**Recommendation:** Make `email` optional in frontend or ensure protocol always includes it. - -### 3. Avatar Field Nullability - -**Issue:** Inconsistent handling of `avatar` field - sometimes `string | null`, sometimes `string`. - -**Frontend:** Uses `avatar: string | null` in some places but `avatar?: string` in others -**Protocol:** Returns `avatar: string` from database but can be null - -**Recommendation:** Standardize to `avatar?: string | null` across both layers. - -### 4. ConnectionsByUserResponse Type - -**Issue:** Frontend defines this type in `types.ts` but uses inline types in service methods. - -**Frontend:** [`connections.ts:3`](frontend/src/services/connections.ts:3) imports type but also uses inline type at [`Line 9-18`](frontend/src/services/connections.ts:9) - -**Recommendation:** Use the imported type consistently. - -### 5. Pagination Response Structure - -**Issue:** Some endpoints return data under `indexes`, others under generic keys. - -**Examples:** -- `GET /indexes` returns `{ indexes: [...], pagination: {...} }` -- `GET /intents` returns `{ intents: [...], pagination: {...} }` - -**Frontend:** Uses generic `PaginatedResponse` which expects data under array key - -**Recommendation:** Standardize pagination response structure across all endpoints. - -### 6. Integration Config Type - -**Issue:** `DirectorySyncConfig` is defined in protocol schema but also redefined in frontend. - -**Protocol:** [`protocol/src/routes/integrations.ts:17`](protocol/src/routes/integrations.ts:17) imports from schema -**Frontend:** [`integrations.ts:35`](frontend/src/services/integrations.ts:35) defines inline - -**Recommendation:** Share type definitions between frontend and protocol layers. - ---- - -## Summary Statistics - -### Endpoints by Service - -| Service | Endpoints | Protocol Routes | -|---------|-----------|----------------| -| Admin | 4 | 4 | -| Connections | 6 | 2 | -| Indexes | 22 | 22 | -| Integrations | 9 | 9 | -| Links | 4 | 4 | -| LMSR | 0 | 0 | -| Synthesis | 1 | 1 | -| **Total** | **46** | **42** | - -### Type Definitions - -- **Shared Domain Models:** 8 (Index, ConnectionEvent, Member, IntegrationResponse, etc.) -- **Request Types:** 6 (CreateIndexRequest, UpdateIndexRequest, SynthesisRequest, etc.) -- **Response Types:** 4 (PaginatedResponse, APIResponse, ConnectionsByUserResponse, etc.) -- **Frontend-Only Types:** 4 (LMSR market types) -- **Type Mismatches Identified:** 6 - -### Files Analyzed - -**Frontend Services (7):** -1. [`admin.ts`](frontend/src/services/admin.ts:1) - 69 lines -2. [`connections.ts`](frontend/src/services/connections.ts:1) - 61 lines -3. [`indexes.ts`](frontend/src/services/indexes.ts:1) - 263 lines -4. [`integrations.ts`](frontend/src/services/integrations.ts:1) - 123 lines -5. [`links.ts`](frontend/src/services/links.ts:1) - 50 lines -6. [`lmsr.ts`](frontend/src/services/lmsr.ts:1) - 417 lines -7. [`synthesis.ts`](frontend/src/services/synthesis.ts:1) - 32 lines - -**Protocol Routes (15):** -1. [`admin.ts`](protocol/src/routes/admin.ts:1) - 386 lines -2. [`agents.ts`](protocol/src/routes/agents.ts:1) - 205 lines (not used by frontend services) -3. [`auth.ts`](protocol/src/routes/auth.ts:1) - 160 lines (used by auth context) -4. [`connections.ts`](protocol/src/routes/connections.ts:1) - 331 lines -5. [`discover.ts`](protocol/src/routes/discover.ts:1) - 395 lines (used by discovery form) -6. [`files.ts`](protocol/src/routes/files.ts:1) - 262 lines (used directly, no service wrapper) -7. [`indexes.ts`](protocol/src/routes/indexes.ts:1) - 1965 lines -8. [`integrations.ts`](protocol/src/routes/integrations.ts:1) - 730 lines -9. [`intents.ts`](protocol/src/routes/intents.ts:1) - 701 lines (used directly, no service wrapper) -10. [`links.ts`](protocol/src/routes/links.ts:1) - 143 lines -11. [`queue.ts`](protocol/src/routes/queue.ts:1) - 112 lines (used for queue status) -12. [`sync.ts`](protocol/src/routes/sync.ts:1) - 29 lines (used for manual sync) -13. [`synthesis.ts`](protocol/src/routes/synthesis.ts:1) - 81 lines -14. [`upload.ts`](protocol/src/routes/upload.ts:1) - 48 lines (avatar uploads) -15. [`users.ts`](protocol/src/routes/users.ts:1) - 145 lines (user profile management) - ---- - -## Recommendations - -### 1. Type Safety Improvements - -Create a shared type package between frontend and protocol: -- Extract common types to `@shared/types` package -- Use TypeScript's type generation from protocol to frontend -- Implement runtime type validation with libraries like Zod - -### 2. API Response Standardization - -Standardize all API responses to follow consistent structure: -```typescript -{ - data: T; - pagination?: PaginationInfo; - meta?: ResponseMeta; -} -``` - -### 3. Date Serialization - -Implement consistent date handling: -- Protocol: Use `.toISOString()` for all dates before returning -- Frontend: Parse ISO strings to Date objects when needed -- Consider using a date library like `date-fns` for consistency - -### 4. Documentation - -- Add JSDoc comments to all service methods -- Include request/response examples -- Document error responses -- Create OpenAPI/Swagger specification - -### 5. Testing - -- Add integration tests for each service-route connection -- Implement contract testing to catch type mismatches -- Add E2E tests for critical user flows - ---- - -**Document Version:** 1.0 -**Last Updated:** November 26, 2025 \ No newline at end of file diff --git a/frontend/src/components/Header.tsx b/frontend/src/components/Header.tsx index 3579493bf..4a074efcf 100644 --- a/frontend/src/components/Header.tsx +++ b/frontend/src/components/Header.tsx @@ -240,7 +240,7 @@ export default function Header({ showNavigation = true, onToggleSidebar, isSideb My Library - + */}