VIBN Frontend for Coolify deployment

This commit is contained in:
2026-02-15 19:25:52 -08:00
commit 40bf8428cd
398 changed files with 76513 additions and 0 deletions

View File

@@ -0,0 +1,159 @@
/**
* Import ChatGPT conversations from exported conversations.json file
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
interface ChatGPTMessage {
id: string;
author: {
role: string;
name?: string;
};
content: {
content_type: string;
parts: string[];
};
create_time?: number;
update_time?: number;
}
interface ChatGPTConversation {
id: string;
title: string;
create_time: number;
update_time?: number;
mapping: Record<string, {
id: string;
message?: ChatGPTMessage;
parent?: string;
children: string[];
}>;
}
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const body = await request.json();
const { conversations, projectId } = body;
if (!conversations || !Array.isArray(conversations)) {
return NextResponse.json({ error: 'Invalid conversations data' }, { status: 400 });
}
console.log(`[ChatGPT Import] Processing ${conversations.length} conversations for user ${userId}`);
const importedConversations: Array<{ id: string; title: string; messageCount: number }> = [];
const batch = adminDb.batch();
let batchCount = 0;
for (const conv of conversations) {
try {
const conversation = conv as ChatGPTConversation;
// Extract messages from mapping
const messages: Array<{
role: string;
content: string;
timestamp?: number;
}> = [];
// Find the root and traverse the conversation tree
for (const [key, node] of Object.entries(conversation.mapping)) {
if (node.message && node.message.author.role !== 'system') {
const content = node.message.content.parts.join('\n');
if (content.trim()) {
messages.push({
role: node.message.author.role,
content: content,
timestamp: node.message.create_time,
});
}
}
}
// Sort messages by timestamp (if available) or keep order
messages.sort((a, b) => {
if (a.timestamp && b.timestamp) {
return a.timestamp - b.timestamp;
}
return 0;
});
// Store in Firestore
const importRef = adminDb.collection('chatgptImports').doc();
const importData = {
userId,
projectId: projectId || null,
conversationId: conversation.id,
title: conversation.title || 'Untitled Conversation',
messageCount: messages.length,
messages,
createdAt: conversation.create_time
? new Date(conversation.create_time * 1000).toISOString()
: new Date().toISOString(),
importedAt: new Date().toISOString(),
};
batch.set(importRef, importData);
batchCount++;
importedConversations.push({
id: conversation.id,
title: conversation.title,
messageCount: messages.length,
});
// Firestore batch limit is 500 operations
if (batchCount >= 500) {
await batch.commit();
batchCount = 0;
}
} catch (error) {
console.error(`[ChatGPT Import] Error processing conversation ${conv.id}:`, error);
// Continue with other conversations
}
}
// Commit remaining batch
if (batchCount > 0) {
await batch.commit();
}
console.log(`[ChatGPT Import] Successfully imported ${importedConversations.length} conversations`);
return NextResponse.json({
success: true,
imported: importedConversations.length,
conversations: importedConversations,
});
} catch (error) {
console.error('[ChatGPT Import] Error:', error);
return NextResponse.json(
{
error: 'Failed to import conversations',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}