VIBN Frontend for Coolify deployment

This commit is contained in:
2026-02-15 19:25:52 -08:00
commit 40bf8428cd
398 changed files with 76513 additions and 0 deletions

View File

@@ -0,0 +1,76 @@
/**
* Migration Script: Add Phase Tracking to Existing Projects
*
* Run with: node scripts/add-phase-tracking.js
*/
const admin = require('firebase-admin');
const fs = require('fs');
const path = require('path');
// Load environment variables
require('dotenv').config({ path: '.env.local' });
// Initialize Firebase Admin
const serviceAccount = {
projectId: process.env.FIREBASE_PROJECT_ID,
clientEmail: process.env.FIREBASE_CLIENT_EMAIL,
privateKey: process.env.FIREBASE_PRIVATE_KEY?.replace(/\\n/g, '\n')
};
if (!admin.apps.length) {
admin.initializeApp({
credential: admin.credential.cert(serviceAccount)
});
}
const db = admin.firestore();
async function addPhaseTracking() {
console.log('🔍 Finding projects without phase tracking...\n');
const projectsSnapshot = await db.collection('projects').get();
let updatedCount = 0;
let skippedCount = 0;
for (const doc of projectsSnapshot.docs) {
const data = doc.data();
// Skip if already has phase tracking
if (data.currentPhase) {
console.log(`⏭️ Skipping ${data.name} (${doc.id}) - already has phase tracking`);
skippedCount++;
continue;
}
// Add phase tracking
await doc.ref.update({
currentPhase: 'gathering',
phaseStatus: 'not_started',
phaseData: {},
phaseHistory: [],
updatedAt: admin.firestore.FieldValue.serverTimestamp()
});
console.log(`✅ Updated ${data.name} (${doc.id}) - initialized with gathering phase`);
updatedCount++;
}
console.log(`\n📊 Migration Complete:`);
console.log(` Updated: ${updatedCount} projects`);
console.log(` Skipped: ${skippedCount} projects`);
console.log(` Total: ${projectsSnapshot.size} projects\n`);
}
// Run migration
addPhaseTracking()
.then(() => {
console.log('✅ Migration successful!');
process.exit(0);
})
.catch((error) => {
console.error('❌ Migration failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,58 @@
/**
* Migration Script: Add Phase Tracking to Existing Projects
*
* Run with: npx tsx scripts/add-phase-tracking.ts
*/
import { getAdminDb } from '../lib/firebase/admin';
async function addPhaseTracking() {
const adminDb = getAdminDb();
console.log('🔍 Finding projects without phase tracking...\n');
const projectsSnapshot = await adminDb.collection('projects').get();
let updatedCount = 0;
let skippedCount = 0;
for (const doc of projectsSnapshot.docs) {
const data = doc.data();
// Skip if already has phase tracking
if (data.currentPhase) {
console.log(`⏭️ Skipping ${data.name} (${doc.id}) - already has phase tracking`);
skippedCount++;
continue;
}
// Add phase tracking
await doc.ref.update({
currentPhase: 'gathering',
phaseStatus: 'not_started',
phaseData: {},
phaseHistory: [],
updatedAt: new Date()
});
console.log(`✅ Updated ${data.name} (${doc.id}) - initialized with gathering phase`);
updatedCount++;
}
console.log(`\n📊 Migration Complete:`);
console.log(` Updated: ${updatedCount} projects`);
console.log(` Skipped: ${skippedCount} projects`);
console.log(` Total: ${projectsSnapshot.size} projects\n`);
}
// Run migration
addPhaseTracking()
.then(() => {
console.log('✅ Migration successful!');
process.exit(0);
})
.catch((error) => {
console.error('❌ Migration failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,123 @@
# ============================================
# AlloyDB Setup Commands for Cloud Shell
# ============================================
# Copy-paste these one at a time into Cloud Shell
# -------------------------------------------
# STEP 1: Get AlloyDB Private IP
# -------------------------------------------
ALLOYDB_IP=$(gcloud alloydb instances describe vibn-primary \
--cluster=vibn \
--region=northamerica-northeast1 \
--format="value(ipAddress)")
echo "AlloyDB IP: $ALLOYDB_IP"
# -------------------------------------------
# STEP 2: Test Connection
# -------------------------------------------
psql "host=$ALLOYDB_IP port=5432 user=mark@getacquired.com dbname=postgres sslmode=disable" -c "\l"
# -------------------------------------------
# STEP 3: Create vibn Database
# -------------------------------------------
psql "host=$ALLOYDB_IP port=5432 user=mark@getacquired.com dbname=postgres sslmode=disable" -c "CREATE DATABASE vibn;"
# -------------------------------------------
# STEP 4: Enable Extensions
# -------------------------------------------
psql "host=$ALLOYDB_IP port=5432 user=mark@getacquired.com dbname=vibn sslmode=disable" <<EOF
CREATE EXTENSION IF NOT EXISTS vector;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
SELECT extname, extversion FROM pg_extension WHERE extname IN ('vector', 'uuid-ossp');
EOF
# -------------------------------------------
# STEP 5: Create a temporary SQL file for the schema
# -------------------------------------------
cat > /tmp/knowledge-chunks-schema.sql << 'EOFSCHEMA'
-- Enable required extensions (already done above, but safe to repeat)
CREATE EXTENSION IF NOT EXISTS vector;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Create the knowledge_chunks table
CREATE TABLE IF NOT EXISTS knowledge_chunks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
project_id TEXT NOT NULL,
knowledge_item_id TEXT NOT NULL,
chunk_index INT NOT NULL,
content TEXT NOT NULL,
embedding VECTOR(768) NOT NULL,
source_type TEXT,
importance TEXT CHECK (importance IN ('primary', 'supporting', 'irrelevant') OR importance IS NULL),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Standard indexes
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_project_id
ON knowledge_chunks (project_id);
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_knowledge_item_id
ON knowledge_chunks (knowledge_item_id);
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_project_knowledge
ON knowledge_chunks (project_id, knowledge_item_id);
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_item_index
ON knowledge_chunks (knowledge_item_id, chunk_index);
-- Vector similarity index using IVFFlat
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_embedding
ON knowledge_chunks
USING ivfflat (embedding vector_cosine_ops)
WITH (lists = 100);
-- Auto-update trigger
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_knowledge_chunks_updated_at
BEFORE UPDATE ON knowledge_chunks
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
EOFSCHEMA
# -------------------------------------------
# STEP 6: Run the Schema File
# -------------------------------------------
psql "host=$ALLOYDB_IP port=5432 user=mark@getacquired.com dbname=vibn sslmode=disable" -f /tmp/knowledge-chunks-schema.sql
# -------------------------------------------
# STEP 7: Verify Everything
# -------------------------------------------
psql "host=$ALLOYDB_IP port=5432 user=mark@getacquired.com dbname=vibn sslmode=disable" <<EOF
-- Check table exists
\dt knowledge_chunks
-- Check indexes
\di
-- Count rows (should be 0)
SELECT COUNT(*) FROM knowledge_chunks;
-- Test vector operations
SELECT 1 as test;
EOF
echo ""
echo "✅ AlloyDB setup complete!"
echo ""
echo "Connection string for your .env.local:"
echo "ALLOYDB_HOST=$ALLOYDB_IP"
echo "ALLOYDB_PORT=5432"
echo "ALLOYDB_USER=mark@getacquired.com"
echo "ALLOYDB_PASSWORD="
echo "ALLOYDB_DATABASE=vibn"
echo "ALLOYDB_SSL=false"

View File

@@ -0,0 +1,86 @@
/**
* Quick script to verify session associations in Firestore
*/
import { config } from 'dotenv';
import { resolve } from 'path';
// Load environment variables from .env.local
config({ path: resolve(__dirname, '../.env.local') });
import { getAdminDb } from '../lib/firebase/admin';
async function checkSessionLinks() {
const db = getAdminDb();
console.log('🔍 Checking session associations...\n');
// Get all sessions
const sessionsSnapshot = await db.collection('sessions').get();
console.log(`📊 Total sessions: ${sessionsSnapshot.size}\n`);
// Group by status
const linked: any[] = [];
const unlinked: any[] = [];
sessionsSnapshot.docs.forEach(doc => {
const data = doc.data();
const sessionInfo = {
id: doc.id,
workspaceName: data.workspaceName || 'Unknown',
workspacePath: data.workspacePath,
projectId: data.projectId,
needsProjectAssociation: data.needsProjectAssociation,
createdAt: data.createdAt?.toDate?.() || data.createdAt,
};
if (data.projectId) {
linked.push(sessionInfo);
} else {
unlinked.push(sessionInfo);
}
});
console.log(`✅ Linked to projects: ${linked.length}`);
console.log(`⚠️ Not linked: ${unlinked.length}\n`);
if (linked.length > 0) {
console.log('📌 Linked Sessions:');
linked.forEach(s => {
console.log(` - ${s.workspaceName} → Project: ${s.projectId?.substring(0, 8)}...`);
console.log(` needsProjectAssociation: ${s.needsProjectAssociation}`);
});
console.log('');
}
if (unlinked.length > 0) {
console.log('⚠️ Unlinked Sessions:');
unlinked.forEach(s => {
console.log(` - ${s.workspaceName}`);
console.log(` Path: ${s.workspacePath}`);
console.log(` needsProjectAssociation: ${s.needsProjectAssociation}`);
});
console.log('');
}
// Check projects
const projectsSnapshot = await db.collection('projects').get();
console.log(`📁 Total projects: ${projectsSnapshot.size}\n`);
projectsSnapshot.docs.forEach(doc => {
const data = doc.data();
console.log(` - ${data.productName || data.name || 'Unnamed'}`);
console.log(` ID: ${doc.id}`);
console.log(` Workspace: ${data.workspacePath || 'Not set'}`);
console.log('');
});
process.exit(0);
}
checkSessionLinks().catch(error => {
console.error('Error:', error);
process.exit(1);
});

View File

@@ -0,0 +1,161 @@
#!/usr/bin/env tsx
/**
* One-time migration script to process existing knowledge_items into AlloyDB
*
* This script:
* 1. Fetches all knowledge_items from Firestore
* 2. For each item, chunks and embeds it into AlloyDB
* 3. Shows progress and handles errors gracefully
*
* Usage:
* npx tsx scripts/migrate-existing-knowledge-to-alloydb.ts [projectId]
*
* - If projectId is provided, processes only that project
* - If omitted, processes ALL projects
*/
import { getAdminDb } from '../lib/firebase/admin';
import { writeKnowledgeChunksForItem, getChunkCountForKnowledgeItem } from '../lib/server/vector-memory';
interface KnowledgeItem {
id: string;
projectId: string;
content: string;
sourceMeta?: {
sourceType?: string;
importance?: 'primary' | 'supporting' | 'irrelevant';
};
}
async function getAllKnowledgeItems(projectId?: string): Promise<KnowledgeItem[]> {
const adminDb = getAdminDb();
const items: KnowledgeItem[] = [];
if (projectId) {
// Single project
console.log(`[Migration] Fetching knowledge items for project ${projectId}...`);
const snapshot = await adminDb
.collection('knowledge_items')
.where('projectId', '==', projectId)
.get();
snapshot.forEach((doc) => {
const data = doc.data();
items.push({
id: doc.id,
projectId: data.projectId,
content: data.content || '',
sourceMeta: data.sourceMeta,
});
});
} else {
// All projects
console.log(`[Migration] Fetching ALL knowledge items...`);
const snapshot = await adminDb.collection('knowledge_items').get();
snapshot.forEach((doc) => {
const data = doc.data();
items.push({
id: doc.id,
projectId: data.projectId,
content: data.content || '',
sourceMeta: data.sourceMeta,
});
});
}
return items;
}
async function migrateKnowledgeItems(projectId?: string) {
console.log('');
console.log('='.repeat(60));
console.log('🚀 AlloyDB Knowledge Migration');
console.log('='.repeat(60));
console.log('');
try {
// Fetch all items
const items = await getAllKnowledgeItems(projectId);
console.log(`✅ Found ${items.length} knowledge items to process`);
console.log('');
if (items.length === 0) {
console.log(' No knowledge items found. Nothing to migrate.');
return;
}
let successCount = 0;
let skipCount = 0;
let errorCount = 0;
// Process each item
for (let i = 0; i < items.length; i++) {
const item = items[i];
const progress = `[${i + 1}/${items.length}]`;
try {
// Check if already processed (skip if chunks exist)
const existingChunks = await getChunkCountForKnowledgeItem(item.id);
if (existingChunks > 0) {
console.log(`${progress} ⏭️ Skipping ${item.id} (already has ${existingChunks} chunks)`);
skipCount++;
continue;
}
console.log(`${progress} 🔄 Processing ${item.id}...`);
// Chunk and embed
await writeKnowledgeChunksForItem(item);
const newChunks = await getChunkCountForKnowledgeItem(item.id);
console.log(`${progress} ✅ Success! Created ${newChunks} chunks`);
successCount++;
// Small delay to avoid rate limiting
if (i < items.length - 1) {
await new Promise((resolve) => setTimeout(resolve, 100));
}
} catch (error) {
console.error(`${progress} ❌ Failed to process ${item.id}:`, error);
errorCount++;
}
}
// Summary
console.log('');
console.log('='.repeat(60));
console.log('📊 Migration Complete');
console.log('='.repeat(60));
console.log(`✅ Processed: ${successCount}`);
console.log(`⏭️ Skipped (already exists): ${skipCount}`);
console.log(`❌ Errors: ${errorCount}`);
console.log(`📦 Total: ${items.length}`);
console.log('');
if (errorCount > 0) {
console.log('⚠️ Some items failed. Check logs above for details.');
process.exit(1);
} else {
console.log('🎉 All knowledge items successfully migrated to AlloyDB!');
process.exit(0);
}
} catch (error) {
console.error('');
console.error('❌ Migration failed:', error);
process.exit(1);
}
}
// Parse command line arguments
const projectId = process.argv[2];
if (projectId) {
console.log(` Processing single project: ${projectId}`);
} else {
console.log(' No projectId provided - processing ALL projects');
}
// Run migration
migrateKnowledgeItems(projectId);

View File

@@ -0,0 +1,373 @@
// MUST load environment variables BEFORE any other imports
require('dotenv').config({ path: require('path').resolve(__dirname, '../.env.local') });
import { Client } from 'pg';
import admin from 'firebase-admin';
import { FieldValue } from 'firebase-admin/firestore';
const PG_CONNECTION_STRING = 'postgresql://postgres:jhsRNOIyjjVfrdvDXnUVcXXXsuzjvcFc@metro.proxy.rlwy.net:30866/railway';
// Initialize Firebase Admin directly
if (!admin.apps.length) {
const privateKey = process.env.FIREBASE_PRIVATE_KEY?.replace(/\\n/g, '\n');
if (!process.env.FIREBASE_PROJECT_ID || !process.env.FIREBASE_CLIENT_EMAIL || !privateKey) {
throw new Error('Missing Firebase Admin credentials. Check your .env.local file.');
}
admin.initializeApp({
credential: admin.credential.cert({
projectId: process.env.FIREBASE_PROJECT_ID,
clientEmail: process.env.FIREBASE_CLIENT_EMAIL,
privateKey: privateKey,
}),
});
console.log('✅ Firebase Admin initialized successfully');
}
const adminDb = admin.firestore();
const adminAuth = admin.auth();
interface PgUser {
id: number;
email: string;
name: string;
created_at: Date;
settings: any;
}
interface PgProject {
id: number;
client_id: number;
name: string;
workspace_path: string;
status: string;
created_at: Date;
updated_at: Date;
metadata: any;
}
interface PgSession {
id: number;
session_id: string;
project_id: number;
user_id: number;
started_at: Date;
last_updated: Date;
ended_at: Date | null;
status: string;
conversation: any[];
file_changes: any[];
message_count: number;
user_message_count: number;
assistant_message_count: number;
file_change_count: number;
duration_minutes: number;
summary: string | null;
tasks_identified: any[];
decisions_made: any[];
technologies_used: any[];
metadata: any;
total_tokens: number;
prompt_tokens: number;
completion_tokens: number;
estimated_cost_usd: number;
model: string;
}
interface PgWorkCompleted {
id: number;
project_id: number;
session_id: number;
title: string;
description: string;
completed_at: Date;
files_modified: any[];
lines_added: number;
lines_removed: number;
metadata: any;
}
interface PgClient {
id: number;
owner_user_id: number;
name: string;
email: string | null;
created_at: Date;
metadata: any;
}
async function migrateUsers(pgClient: Client, userMapping: Map<number, string>) {
console.log('\n📋 Migrating Users...');
const result = await pgClient.query<PgUser>('SELECT * FROM users');
for (const pgUser of result.rows) {
try {
// Create Firebase Auth user
let firebaseUser;
try {
firebaseUser = await adminAuth.getUserByEmail(pgUser.email);
console.log(` ✅ User already exists: ${pgUser.email}`);
} catch {
// User doesn't exist, create them
firebaseUser = await adminAuth.createUser({
email: pgUser.email,
displayName: pgUser.name,
emailVerified: true,
});
console.log(` ✨ Created Firebase Auth user: ${pgUser.email}`);
}
// Store mapping
userMapping.set(pgUser.id, firebaseUser.uid);
// Create user document in Firestore
const workspace = pgUser.email.split('@')[0].replace(/[^a-z0-9]/gi, '-').toLowerCase();
await adminDb.collection('users').doc(firebaseUser.uid).set({
uid: firebaseUser.uid,
email: pgUser.email,
displayName: pgUser.name,
workspace: workspace,
settings: pgUser.settings || {},
createdAt: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp(),
migratedFrom: 'postgresql',
originalPgId: pgUser.id,
});
console.log(` ✅ Migrated user: ${pgUser.email}${firebaseUser.uid}`);
} catch (error) {
console.error(` ❌ Error migrating user ${pgUser.email}:`, error);
}
}
}
async function migrateClients(pgClient: Client, userMapping: Map<number, string>) {
console.log('\n📋 Migrating Clients...');
const result = await pgClient.query<PgClient>('SELECT * FROM clients');
for (const pgClient of result.rows) {
const firebaseUserId = userMapping.get(pgClient.owner_user_id);
if (!firebaseUserId) {
console.log(` ⚠️ Skipping client ${pgClient.name} - user not found`);
continue;
}
try {
const clientRef = adminDb.collection('clients').doc();
await clientRef.set({
id: clientRef.id,
ownerId: firebaseUserId,
name: pgClient.name,
email: pgClient.email || null,
createdAt: FieldValue.serverTimestamp(),
metadata: pgClient.metadata || {},
migratedFrom: 'postgresql',
originalPgId: pgClient.id,
});
console.log(` ✅ Migrated client: ${pgClient.name}`);
} catch (error) {
console.error(` ❌ Error migrating client ${pgClient.name}:`, error);
}
}
}
async function migrateProjects(pgClient: Client, userMapping: Map<number, string>, projectMapping: Map<number, string>) {
console.log('\n📋 Migrating Projects...');
const result = await pgClient.query<PgProject>('SELECT * FROM projects');
for (const pgProject of result.rows) {
try {
// Get the client to find the owner
const clientResult = await pgClient.query('SELECT owner_user_id FROM clients WHERE id = $1', [pgProject.client_id]);
const firebaseUserId = userMapping.get(clientResult.rows[0]?.owner_user_id);
if (!firebaseUserId) {
console.log(` ⚠️ Skipping project ${pgProject.name} - user not found`);
continue;
}
// Get user's workspace
const userDoc = await adminDb.collection('users').doc(firebaseUserId).get();
const workspace = userDoc.data()?.workspace || 'default-workspace';
const projectRef = adminDb.collection('projects').doc();
await projectRef.set({
id: projectRef.id,
name: pgProject.name,
slug: pgProject.name.toLowerCase().replace(/[^a-z0-9]/g, '-'),
userId: firebaseUserId,
workspace: workspace,
productName: pgProject.name,
productVision: pgProject.metadata?.vision || null,
workspacePath: pgProject.workspace_path,
status: pgProject.status,
isForClient: true,
hasLogo: false,
hasDomain: false,
hasWebsite: false,
hasGithub: false,
hasChatGPT: false,
createdAt: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp(),
metadata: pgProject.metadata || {},
migratedFrom: 'postgresql',
originalPgId: pgProject.id,
});
projectMapping.set(pgProject.id, projectRef.id);
console.log(` ✅ Migrated project: ${pgProject.name}${projectRef.id}`);
} catch (error) {
console.error(` ❌ Error migrating project ${pgProject.name}:`, error);
}
}
}
async function migrateSessions(pgClient: Client, userMapping: Map<number, string>, projectMapping: Map<number, string>) {
console.log('\n📋 Migrating Sessions...');
const result = await pgClient.query<PgSession>('SELECT * FROM sessions ORDER BY started_at');
for (const pgSession of result.rows) {
try {
const firebaseUserId = userMapping.get(pgSession.user_id);
const firebaseProjectId = projectMapping.get(pgSession.project_id);
if (!firebaseUserId) {
console.log(` ⚠️ Skipping session ${pgSession.session_id} - user not found`);
continue;
}
const sessionRef = adminDb.collection('sessions').doc();
await sessionRef.set({
id: sessionRef.id,
userId: firebaseUserId,
projectId: firebaseProjectId || null,
// Session data
startTime: pgSession.started_at,
endTime: pgSession.ended_at || null,
duration: pgSession.duration_minutes * 60, // Convert to seconds
// Project context
workspacePath: null, // Not in old schema
workspaceName: null,
// AI usage
model: pgSession.model,
tokensUsed: pgSession.total_tokens,
promptTokens: pgSession.prompt_tokens,
completionTokens: pgSession.completion_tokens,
cost: parseFloat(String(pgSession.estimated_cost_usd)),
// Context
filesModified: pgSession.file_changes.map((fc: any) => fc.path || fc.file),
conversationSummary: pgSession.summary || null,
conversation: pgSession.conversation || [],
// Additional data from old schema
messageCount: pgSession.message_count,
userMessageCount: pgSession.user_message_count,
assistantMessageCount: pgSession.assistant_message_count,
fileChangeCount: pgSession.file_change_count,
tasksIdentified: pgSession.tasks_identified || [],
decisionsMade: pgSession.decisions_made || [],
technologiesUsed: pgSession.technologies_used || [],
status: pgSession.status,
metadata: pgSession.metadata || {},
createdAt: pgSession.started_at,
updatedAt: pgSession.last_updated,
migratedFrom: 'postgresql',
originalPgId: pgSession.id,
originalSessionId: pgSession.session_id,
});
console.log(` ✅ Migrated session: ${pgSession.session_id}`);
} catch (error) {
console.error(` ❌ Error migrating session ${pgSession.session_id}:`, error);
}
}
}
async function migrateWorkCompleted(pgClient: Client, projectMapping: Map<number, string>) {
console.log('\n📋 Migrating Work Completed...');
const result = await pgClient.query<PgWorkCompleted>('SELECT * FROM work_completed ORDER BY completed_at');
for (const work of result.rows) {
try {
const firebaseProjectId = projectMapping.get(work.project_id);
if (!firebaseProjectId) {
console.log(` ⚠️ Skipping work ${work.title} - project not found`);
continue;
}
const workRef = adminDb.collection('workCompleted').doc();
await workRef.set({
id: workRef.id,
projectId: firebaseProjectId,
sessionId: work.session_id ? `pg-session-${work.session_id}` : null,
title: work.title,
description: work.description,
completedAt: work.completed_at,
filesModified: work.files_modified || [],
linesAdded: work.lines_added || 0,
linesRemoved: work.lines_removed || 0,
metadata: work.metadata || {},
createdAt: work.completed_at,
migratedFrom: 'postgresql',
originalPgId: work.id,
});
console.log(` ✅ Migrated work: ${work.title}`);
} catch (error) {
console.error(` ❌ Error migrating work ${work.title}:`, error);
}
}
}
async function main() {
console.log('🚀 Starting PostgreSQL to Firebase migration...\n');
const pgClient = new Client({
connectionString: PG_CONNECTION_STRING,
});
try {
// Connect to PostgreSQL
console.log('📡 Connecting to PostgreSQL...');
await pgClient.connect();
console.log('✅ Connected to PostgreSQL\n');
// Mappings to track old ID -> new ID
const userMapping = new Map<number, string>();
const projectMapping = new Map<number, string>();
// Migrate in order (respecting foreign keys)
await migrateUsers(pgClient, userMapping);
await migrateClients(pgClient, userMapping);
await migrateProjects(pgClient, userMapping, projectMapping);
await migrateSessions(pgClient, userMapping, projectMapping);
await migrateWorkCompleted(pgClient, projectMapping);
console.log('\n✅ Migration completed successfully!');
console.log('\n📊 Summary:');
console.log(` - Users migrated: ${userMapping.size}`);
console.log(` - Projects migrated: ${projectMapping.size}`);
} catch (error) {
console.error('\n❌ Migration failed:', error);
throw error;
} finally {
await pgClient.end();
console.log('\n📡 Disconnected from PostgreSQL');
}
}
// Run migration
main().catch(console.error);

View File

@@ -0,0 +1,133 @@
// MUST load environment variables BEFORE any other imports
require('dotenv').config({ path: require('path').resolve(__dirname, '../.env.local') });
import admin from 'firebase-admin';
import { FieldValue } from 'firebase-admin/firestore';
// Initialize Firebase Admin directly
if (!admin.apps.length) {
const privateKey = process.env.FIREBASE_PRIVATE_KEY?.replace(/\\n/g, '\n');
if (!process.env.FIREBASE_PROJECT_ID || !process.env.FIREBASE_CLIENT_EMAIL || !privateKey) {
throw new Error('Missing Firebase Admin credentials. Check your .env.local file.');
}
admin.initializeApp({
credential: admin.credential.cert({
projectId: process.env.FIREBASE_PROJECT_ID,
clientEmail: process.env.FIREBASE_CLIENT_EMAIL,
privateKey: privateKey,
}),
});
console.log('✅ Firebase Admin initialized successfully');
}
const adminDb = admin.firestore();
const adminAuth = admin.auth();
async function reassignMigratedData() {
console.log('🚀 Starting data reassignment...\n');
try {
// Get the current user (mark@getacquired.com)
console.log('📋 Finding current user: mark@getacquired.com');
const currentUser = await adminAuth.getUserByEmail('mark@getacquired.com');
console.log(`✅ Current user found: ${currentUser.uid}\n`);
// Get the migrated user (mark@example.com)
console.log('📋 Finding migrated user: mark@example.com');
let migratedUser;
try {
migratedUser = await adminAuth.getUserByEmail('mark@example.com');
console.log(`✅ Migrated user found: ${migratedUser.uid}\n`);
} catch (error) {
console.log('⚠️ Migrated user not found, will look for any migrated data by flag\n');
}
// Reassign all collections
const collections = ['sessions', 'projects', 'clients', 'workCompleted'];
for (const collectionName of collections) {
console.log(`\n📋 Processing ${collectionName}...`);
// Query for migrated documents
let query = adminDb.collection(collectionName).where('migratedFrom', '==', 'postgresql');
const snapshot = await query.get();
if (snapshot.empty) {
console.log(` No migrated ${collectionName} found`);
continue;
}
console.log(` Found ${snapshot.size} migrated ${collectionName}`);
// Update each document
const batch = adminDb.batch();
let count = 0;
for (const doc of snapshot.docs) {
const data = doc.data();
const updates: any = {
updatedAt: FieldValue.serverTimestamp(),
};
// Update userId field
if (data.userId) {
updates.userId = currentUser.uid;
}
// Update ownerId field (for clients)
if (data.ownerId) {
updates.ownerId = currentUser.uid;
}
batch.update(doc.ref, updates);
count++;
// Commit batch every 500 documents (Firestore limit)
if (count % 500 === 0) {
await batch.commit();
console.log(` ✅ Committed ${count} updates...`);
}
}
// Commit remaining
if (count % 500 !== 0) {
await batch.commit();
}
console.log(` ✅ Reassigned ${count} ${collectionName} to ${currentUser.email}`);
}
// Delete the temporary migrated user if it exists
if (migratedUser) {
console.log('\n📋 Cleaning up temporary migrated user account...');
// Delete the user document
await adminDb.collection('users').doc(migratedUser.uid).delete();
// Delete the Auth account
await adminAuth.deleteUser(migratedUser.uid);
console.log('✅ Temporary user account deleted');
}
console.log('\n✅ Data reassignment completed successfully!');
console.log(`\n🎉 All migrated data is now assigned to: ${currentUser.email}`);
} catch (error) {
console.error('\n❌ Data reassignment failed:', error);
throw error;
}
}
// Run reassignment
reassignMigratedData()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,74 @@
#!/bin/bash
# Run this script in Google Cloud Shell to set up AlloyDB
# Cloud Shell has VPC access, so it can connect to AlloyDB directly
set -e
echo "🚀 AlloyDB Setup via Cloud Shell"
echo "=================================="
echo ""
# Set project
PROJECT_ID="gen-lang-client-0980079410"
REGION="northamerica-northeast1"
CLUSTER="vibn"
INSTANCE="vibn-primary"
USER="mark@getacquired.com"
echo "Project: $PROJECT_ID"
echo "Cluster: $CLUSTER"
echo "Instance: $INSTANCE"
echo ""
# Get AlloyDB private IP
echo "📍 Getting AlloyDB private IP..."
ALLOYDB_IP=$(gcloud alloydb instances describe $INSTANCE \
--cluster=$CLUSTER \
--region=$REGION \
--project=$PROJECT_ID \
--format="value(ipAddress)")
echo "✅ AlloyDB IP: $ALLOYDB_IP"
echo ""
# Install psql if needed
if ! command -v psql &> /dev/null; then
echo "📦 Installing PostgreSQL client..."
sudo apt-get update && sudo apt-get install -y postgresql-client
fi
echo "✅ PostgreSQL client ready"
echo ""
# Connect and check databases
echo "🔍 Checking existing databases..."
psql "host=$ALLOYDB_IP port=5432 user=$USER dbname=postgres sslmode=disable" -c "\l"
echo ""
read -p "Create 'vibn' database? (y/n) " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo "📝 Creating vibn database..."
psql "host=$ALLOYDB_IP port=5432 user=$USER dbname=postgres sslmode=disable" \
-c "CREATE DATABASE vibn;"
echo "✅ Database created"
fi
echo ""
echo "🔌 Enabling extensions..."
psql "host=$ALLOYDB_IP port=5432 user=$USER dbname=vibn sslmode=disable" <<EOF
CREATE EXTENSION IF NOT EXISTS vector;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
SELECT extname, extversion FROM pg_extension WHERE extname IN ('vector', 'uuid-ossp');
EOF
echo ""
echo "✅ Extensions installed!"
echo ""
echo "📋 Next: Upload and run the schema file"
echo ""
echo "1. Upload lib/db/knowledge-chunks-schema.sql to Cloud Shell"
echo "2. Run: psql \"host=$ALLOYDB_IP port=5432 user=$USER dbname=vibn sslmode=disable\" -f knowledge-chunks-schema.sql"
echo ""
echo "Or copy-paste the SQL manually into psql"

180
scripts/setup-alloydb.sh Executable file
View File

@@ -0,0 +1,180 @@
#!/bin/bash
# AlloyDB Setup Script for Vibn
# This script helps you configure AlloyDB with a service account
set -e # Exit on error
echo "🚀 AlloyDB Setup for Vibn"
echo "=========================="
echo ""
# Get project ID
PROJECT_ID=$(gcloud config get-value project 2>/dev/null)
if [ -z "$PROJECT_ID" ]; then
echo "❌ No GCP project configured. Run: gcloud config set project YOUR_PROJECT_ID"
exit 1
fi
echo "📋 Project: $PROJECT_ID"
echo ""
# Prompt for cluster details
read -p "Enter your AlloyDB cluster name: " CLUSTER_NAME
read -p "Enter your AlloyDB region [us-central1]: " REGION
REGION=${REGION:-us-central1}
read -p "Enter your AlloyDB instance name [${CLUSTER_NAME}-primary]: " INSTANCE_NAME
INSTANCE_NAME=${INSTANCE_NAME:-${CLUSTER_NAME}-primary}
echo ""
echo "Configuration:"
echo " Cluster: $CLUSTER_NAME"
echo " Region: $REGION"
echo " Instance: $INSTANCE_NAME"
echo ""
read -p "Continue? (y/n) " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
# Create service account
echo ""
echo "📝 Step 1: Creating service account..."
SA_NAME="vibn-alloydb-client"
SA_EMAIL="${SA_NAME}@${PROJECT_ID}.iam.gserviceaccount.com"
if gcloud iam service-accounts describe $SA_EMAIL &>/dev/null; then
echo "✅ Service account already exists: $SA_EMAIL"
else
gcloud iam service-accounts create $SA_NAME \
--display-name="Vibn AlloyDB Client" \
--description="Service account for Vibn app to access AlloyDB"
echo "✅ Created service account: $SA_EMAIL"
fi
# Grant permissions
echo ""
echo "🔑 Step 2: Granting permissions..."
gcloud projects add-iam-policy-binding $PROJECT_ID \
--member="serviceAccount:${SA_EMAIL}" \
--role="roles/alloydb.client" \
--condition=None \
--quiet
gcloud projects add-iam-policy-binding $PROJECT_ID \
--member="serviceAccount:${SA_EMAIL}" \
--role="roles/compute.networkUser" \
--condition=None \
--quiet
echo "✅ Granted AlloyDB client and network user roles"
# Create IAM database user
echo ""
echo "👤 Step 3: Creating IAM database user..."
if gcloud alloydb users list \
--cluster=$CLUSTER_NAME \
--instance=$INSTANCE_NAME \
--region=$REGION \
--filter="name:${SA_EMAIL}" \
--format="value(name)" 2>/dev/null | grep -q "${SA_EMAIL}"; then
echo "✅ IAM user already exists"
else
gcloud alloydb users create $SA_EMAIL \
--instance=$INSTANCE_NAME \
--cluster=$CLUSTER_NAME \
--region=$REGION \
--type=IAM_BASED
echo "✅ Created IAM database user"
fi
# Download service account key
echo ""
echo "🔐 Step 4: Downloading service account key..."
KEY_FILE="$HOME/vibn-alloydb-key.json"
if [ -f "$KEY_FILE" ]; then
read -p "Key file already exists. Overwrite? (y/n) " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Skipping key download"
else
gcloud iam service-accounts keys create $KEY_FILE \
--iam-account=$SA_EMAIL
chmod 600 $KEY_FILE
echo "✅ Key saved to: $KEY_FILE"
fi
else
gcloud iam service-accounts keys create $KEY_FILE \
--iam-account=$SA_EMAIL
chmod 600 $KEY_FILE
echo "✅ Key saved to: $KEY_FILE"
fi
# Get AlloyDB instance URI
INSTANCE_URI="projects/${PROJECT_ID}/locations/${REGION}/clusters/${CLUSTER_NAME}/instances/${INSTANCE_NAME}"
echo ""
echo "🎉 Setup Complete!"
echo "=================="
echo ""
echo "Next steps:"
echo ""
echo "1. Add to your .env.local:"
echo " ALLOYDB_HOST=127.0.0.1"
echo " ALLOYDB_PORT=5432"
echo " ALLOYDB_USER=${SA_EMAIL}"
echo " ALLOYDB_PASSWORD="
echo " ALLOYDB_DATABASE=vibn"
echo " ALLOYDB_SSL=false"
echo " GOOGLE_APPLICATION_CREDENTIALS=${KEY_FILE}"
echo ""
echo "2. Start AlloyDB Auth Proxy (in a separate terminal):"
echo " alloydb-auth-proxy \\"
echo " --credentials-file=${KEY_FILE} \\"
echo " --port=5432 \\"
echo " ${INSTANCE_URI}"
echo ""
echo "3. Create database and run schema:"
echo " psql \"host=127.0.0.1 port=5432 user=${SA_EMAIL}\" -c 'CREATE DATABASE vibn;'"
echo " psql \"host=127.0.0.1 port=5432 dbname=vibn user=${SA_EMAIL}\" \\"
echo " -f lib/db/knowledge-chunks-schema.sql"
echo ""
echo "4. Test connection:"
echo " npm run test:db"
echo ""
# Optionally create .env.local entry
read -p "Add these to .env.local now? (y/n) " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
ENV_FILE=".env.local"
# Backup existing .env.local
if [ -f "$ENV_FILE" ]; then
cp $ENV_FILE "${ENV_FILE}.backup"
echo "📦 Backed up existing .env.local"
fi
# Append AlloyDB config
cat >> $ENV_FILE << EOF
# AlloyDB Configuration (added by setup script)
ALLOYDB_HOST=127.0.0.1
ALLOYDB_PORT=5432
ALLOYDB_USER=${SA_EMAIL}
ALLOYDB_PASSWORD=
ALLOYDB_DATABASE=vibn
ALLOYDB_SSL=false
GOOGLE_APPLICATION_CREDENTIALS=${KEY_FILE}
EOF
echo "✅ Added AlloyDB config to .env.local"
fi
echo ""
echo "📚 For full guide, see: SETUP_ALLOYDB_SERVICE_ACCOUNT.md"

18
scripts/start-with-alloydb.sh Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
# Start dev server with fresh AlloyDB access token
echo "🔑 Generating AlloyDB access token..."
export ALLOYDB_PASSWORD=$(gcloud auth print-access-token)
if [ -z "$ALLOYDB_PASSWORD" ]; then
echo "❌ Failed to generate access token"
echo "Make sure you're logged in: gcloud auth login"
exit 1
fi
echo "✅ Token generated (expires in 1 hour)"
echo "🚀 Starting dev server..."
echo ""
npm run dev

152
scripts/test-alloydb.ts Normal file
View File

@@ -0,0 +1,152 @@
/**
* Test AlloyDB Connection
*
* Run with: npx tsx scripts/test-alloydb.ts
*/
import { getAlloyDbClient, checkAlloyDbHealth, executeQuery } from '../lib/db/alloydb';
async function testConnection() {
console.log('🧪 Testing AlloyDB Connection\n');
console.log('='.repeat(50));
try {
// Test 1: Health check
console.log('\n1⃣ Health Check...');
const healthy = await checkAlloyDbHealth();
if (!healthy) {
console.error('❌ Health check failed!');
console.log('\nTroubleshooting:');
console.log(' 1. Is AlloyDB Auth Proxy running?');
console.log(' 2. Check environment variables in .env.local');
console.log(' 3. Verify service account has permissions');
process.exit(1);
}
console.log('✅ Health check passed!');
// Test 2: PostgreSQL version
console.log('\n2⃣ PostgreSQL Version...');
const versionResult = await executeQuery<{ version: string }>('SELECT version()');
console.log('✅ Version:', versionResult.rows[0].version.split(',')[0]);
// Test 3: Check extensions
console.log('\n3⃣ Checking Extensions...');
const extResult = await executeQuery<{ extname: string }>(
"SELECT extname FROM pg_extension WHERE extname IN ('vector', 'uuid-ossp')"
);
const installedExts = extResult.rows.map(r => r.extname);
if (installedExts.includes('vector')) {
console.log('✅ pgvector extension installed');
} else {
console.log('❌ pgvector extension NOT installed');
console.log(' Run: CREATE EXTENSION vector;');
}
if (installedExts.includes('uuid-ossp')) {
console.log('✅ uuid-ossp extension installed');
} else {
console.log('❌ uuid-ossp extension NOT installed');
console.log(' Run: CREATE EXTENSION "uuid-ossp";');
}
// Test 4: Check for knowledge_chunks table
console.log('\n4⃣ Checking Tables...');
const tableResult = await executeQuery<{ table_name: string }>(
`SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'knowledge_chunks'`
);
if (tableResult.rows.length > 0) {
console.log('✅ knowledge_chunks table exists');
// Check indexes
const indexResult = await executeQuery<{ indexname: string }>(
`SELECT indexname
FROM pg_indexes
WHERE tablename = 'knowledge_chunks'`
);
console.log(`${indexResult.rows.length} indexes created:`);
indexResult.rows.forEach(row => {
console.log(` - ${row.indexname}`);
});
// Count chunks
const countResult = await executeQuery<{ count: string }>(
'SELECT COUNT(*) as count FROM knowledge_chunks'
);
const count = parseInt(countResult.rows[0].count, 10);
console.log(`✅ Total chunks: ${count}`);
} else {
console.log('⚠️ knowledge_chunks table NOT found');
console.log(' Run the schema file:');
console.log(' psql "host=127.0.0.1 port=5432 dbname=vibn user=YOUR_SA" \\');
console.log(' -f lib/db/knowledge-chunks-schema.sql');
}
// Test 5: Test vector operations (if table exists and vector extension installed)
if (tableResult.rows.length > 0 && installedExts.includes('vector')) {
console.log('\n5⃣ Testing Vector Operations...');
try {
// Create a test embedding
const testEmbedding = Array.from({ length: 768 }, () => Math.random());
// Test vector similarity query (should not error even with empty table)
await executeQuery(
`SELECT id
FROM knowledge_chunks
ORDER BY embedding <=> $1::vector
LIMIT 1`,
[JSON.stringify(testEmbedding)]
);
console.log('✅ Vector similarity queries working!');
} catch (error) {
console.log('❌ Vector operations failed:', error);
}
}
console.log('\n' + '='.repeat(50));
console.log('🎉 AlloyDB is ready to use!');
console.log('='.repeat(50));
console.log('\nNext steps:');
console.log(' 1. Start your app: npm run dev');
console.log(' 2. Import a knowledge item to test chunking');
console.log(' 3. Send a chat message to test vector search');
console.log('');
process.exit(0);
} catch (error) {
console.error('\n❌ Connection failed!');
console.error('Error:', error instanceof Error ? error.message : String(error));
console.log('\nTroubleshooting:');
console.log(' 1. Check .env.local has correct values');
console.log(' 2. Ensure AlloyDB Auth Proxy is running:');
console.log(' alloydb-auth-proxy --credentials-file=~/vibn-alloydb-key.json --port=5432 YOUR_INSTANCE_URI');
console.log(' 3. Verify service account permissions');
console.log(' 4. Check network connectivity');
console.log('');
process.exit(1);
}
}
// Run test
console.log('Starting AlloyDB connection test...\n');
console.log('Environment:');
console.log(' ALLOYDB_HOST:', process.env.ALLOYDB_HOST);
console.log(' ALLOYDB_PORT:', process.env.ALLOYDB_PORT);
console.log(' ALLOYDB_DATABASE:', process.env.ALLOYDB_DATABASE);
console.log(' ALLOYDB_USER:', process.env.ALLOYDB_USER?.substring(0, 30) + '...');
console.log('');
testConnection();

128
scripts/test-endpoints.sh Executable file
View File

@@ -0,0 +1,128 @@
#!/bin/bash
# Endpoint Health Check Script
# Tests all critical API endpoints to ensure they work after refactor
echo "🧪 Testing Vibn API Endpoints"
echo "======================================"
echo ""
# Colors
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Base URL
BASE_URL="http://localhost:3000"
# Test counter
TOTAL=0
PASSED=0
FAILED=0
# Helper function to test an endpoint
test_endpoint() {
local METHOD=$1
local PATH=$2
local EXPECTED_STATUS=$3
local DESCRIPTION=$4
local AUTH=${5:-""}
TOTAL=$((TOTAL + 1))
if [ -z "$AUTH" ]; then
STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X $METHOD "$BASE_URL$PATH" 2>/dev/null)
else
STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X $METHOD -H "Authorization: Bearer $AUTH" "$BASE_URL$PATH" 2>/dev/null)
fi
if [ "$STATUS" == "$EXPECTED_STATUS" ]; then
echo -e "${GREEN}${NC} $DESCRIPTION"
echo " └─ $METHOD $PATH$STATUS"
PASSED=$((PASSED + 1))
else
echo -e "${RED}${NC} $DESCRIPTION"
echo " └─ $METHOD $PATH → Expected $EXPECTED_STATUS, got $STATUS"
FAILED=$((FAILED + 1))
fi
}
echo "1⃣ Frontend Pages"
echo "-----------------------------------"
test_endpoint "GET" "/" "200" "Home page"
echo ""
echo "2⃣ Project APIs (No Auth Required for Testing Structure)"
echo "-----------------------------------"
test_endpoint "POST" "/api/projects/create" "401" "Create project (should require auth)"
test_endpoint "GET" "/api/projects/phase" "405" "Phase endpoint (POST only)"
test_endpoint "GET" "/api/debug/first-project" "200" "Debug: First project"
echo ""
echo "3⃣ Knowledge & Context APIs"
echo "-----------------------------------"
test_endpoint "POST" "/api/projects/test-project/knowledge/upload-document" "401" "Upload document (should require auth)"
test_endpoint "POST" "/api/projects/test-project/knowledge/import-document" "401" "Import document (should require auth)"
test_endpoint "POST" "/api/projects/test-project/knowledge/import-ai-chat" "401" "Import AI chat (should require auth)"
test_endpoint "POST" "/api/projects/test-project/knowledge/batch-extract" "401" "Batch extract (should require auth)"
test_endpoint "GET" "/api/debug/knowledge-items" "200" "Debug: Knowledge items"
test_endpoint "GET" "/api/debug/context-sources" "200" "Debug: Context sources"
echo ""
echo "4⃣ AI Chat APIs"
echo "-----------------------------------"
test_endpoint "POST" "/api/ai/chat" "401" "AI chat (should require auth)"
test_endpoint "GET" "/api/ai/conversation" "400" "Get conversation (requires projectId)"
test_endpoint "POST" "/api/ai/conversation/reset" "400" "Reset conversation (requires projectId)"
echo ""
echo "5⃣ Extraction APIs"
echo "-----------------------------------"
test_endpoint "POST" "/api/projects/test-project/extract-from-chat" "401" "Extract from chat (should require auth)"
test_endpoint "POST" "/api/projects/test-project/aggregate" "401" "Aggregate extractions (should require auth)"
echo ""
echo "6⃣ GitHub Integration APIs"
echo "-----------------------------------"
test_endpoint "GET" "/api/github/repos" "401" "Get GitHub repos (should require auth)"
test_endpoint "POST" "/api/github/connect" "401" "Connect GitHub (should require auth)"
test_endpoint "GET" "/api/github/repo-tree" "400" "Get repo tree (requires params)"
test_endpoint "GET" "/api/github/file-content" "400" "Get file content (requires params)"
echo ""
echo "7⃣ Planning & Vision APIs"
echo "-----------------------------------"
test_endpoint "POST" "/api/projects/test-project/plan/mvp" "401" "Generate MVP plan (should require auth)"
test_endpoint "POST" "/api/projects/test-project/plan/marketing" "401" "Generate marketing plan (should require auth)"
test_endpoint "POST" "/api/vision/update" "400" "Update vision (requires projectId)"
echo ""
echo "8⃣ Utility APIs"
echo "-----------------------------------"
test_endpoint "POST" "/api/context/summarize" "400" "Summarize context (requires body)"
test_endpoint "GET" "/api/debug/env" "200" "Debug: Environment check"
test_endpoint "GET" "/api/diagnose" "200" "Diagnose system"
echo ""
echo ""
echo "======================================"
echo "📊 Test Results"
echo "======================================"
echo -e "Total: $TOTAL"
echo -e "${GREEN}Passed: $PASSED${NC}"
echo -e "${RED}Failed: $FAILED${NC}"
echo ""
if [ $FAILED -eq 0 ]; then
echo -e "${GREEN}✅ All endpoint structure tests passed!${NC}"
echo ""
echo "Note: 401/400 responses are EXPECTED for auth-protected and"
echo "parameter-required endpoints. This confirms they exist and"
echo "are properly configured."
exit 0
else
echo -e "${RED}❌ Some endpoints failed. Check the output above.${NC}"
exit 1
fi