Successful LLM call
This commit is contained in:
@@ -12,7 +12,7 @@ export class LLMService {
|
||||
|
||||
constructor(apiKey: string) {
|
||||
this.genAI = new GoogleGenerativeAI(apiKey);
|
||||
this.model = this.genAI.getGenerativeModel({ model: "gemini-pro" });
|
||||
this.model = this.genAI.getGenerativeModel({ model: "gemini-2.5-flash-lite" });
|
||||
}
|
||||
|
||||
async analyzeDesires(desireSets: DesireSet[]): Promise<Record<string, string>> {
|
||||
|
||||
@@ -222,8 +222,8 @@ export const createWebSocketServer = (server: any) => {
|
||||
broadcastToSession(sessionId, { type: 'STATE_UPDATE', payload: {} });
|
||||
console.log(`Analysis complete for session ${sessionId}. Result:`, decision);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error during analysis for session ${sessionId}:`, error);
|
||||
} catch (error: any) {
|
||||
console.error(`Error during analysis for session ${sessionId}:`, error.message);
|
||||
sessionData.state = SessionState.ERROR;
|
||||
broadcastToSession(sessionId, { type: 'STATE_UPDATE', payload: {} });
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user