diff --git a/electron/main.ts b/electron/main.ts index 4c7a577..6218f58 100644 --- a/electron/main.ts +++ b/electron/main.ts @@ -1690,7 +1690,104 @@ function registerIpcHandlers() { aiAnalysisService.cancelToolTest(payload?.taskId) ) - ipcMain.handle('agent:runStream', async (event, payload: { + ipcMain.handle('ai:getMessageContext', async (_, sessionId: string, messageIds: number | number[], contextSize?: number) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return [] + return chatService.getMessageContextForAI(sessionId, messageIds, contextSize) + }) + ipcMain.handle('ai:getSearchMessageContext', async (_, sessionId: string, messageIds: number[], contextBefore?: number, contextAfter?: number) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return [] + return chatService.getSearchMessageContextForAI(sessionId, messageIds, contextBefore, contextAfter) + }) + ipcMain.handle('ai:getRecentMessages', async (_, sessionId: string, filter?: { startTs?: number; endTs?: number }, limit?: number) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return { messages: [], total: 0 } + return chatService.getRecentMessagesForAI(sessionId, filter, limit) + }) + ipcMain.handle('ai:getAllRecentMessages', async (_, sessionId: string, filter?: { startTs?: number; endTs?: number }, limit?: number) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return { messages: [], total: 0 } + return chatService.getRecentMessagesForAI(sessionId, filter, limit) + }) + ipcMain.handle('ai:getConversationBetween', async ( + _, + sessionId: string, + memberId1: number, + memberId2: number, + filter?: { startTs?: number; endTs?: number }, + limit?: number + ) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) { + return { messages: [], total: 0, member1Name: '', member2Name: '' } + } + return chatService.getConversationBetweenForAI(sessionId, memberId1, memberId2, filter, limit) + }) + ipcMain.handle('ai:getMessagesBefore', async ( + _, + sessionId: string, + beforeId: number, + limit?: number, + filter?: { startTs?: number; endTs?: number }, + senderId?: number, + keywords?: string[] + ) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return { messages: [], hasMore: false } + return chatService.getMessagesBeforeForAI(sessionId, beforeId, limit, filter, senderId, keywords) + }) + ipcMain.handle('ai:getMessagesAfter', async ( + _, + sessionId: string, + afterId: number, + limit?: number, + filter?: { startTs?: number; endTs?: number }, + senderId?: number, + keywords?: string[] + ) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return { messages: [], hasMore: false } + return chatService.getMessagesAfterForAI(sessionId, afterId, limit, filter, senderId, keywords) + }) + ipcMain.handle('ai:searchSessions', async ( + _, + sessionId: string, + keywords?: string[], + timeFilter?: { startTs?: number; endTs?: number }, + limit?: number, + previewCount?: number + ) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return [] + return chatService.searchSessionsForAI(sessionId, keywords, timeFilter, limit, previewCount) + }) + ipcMain.handle('ai:getSessionMessages', async (_, sessionId: string, chatSessionId: string | number, limit?: number) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return null + return chatService.getSessionMessagesForAI(sessionId, chatSessionId, limit) + }) + ipcMain.handle('ai:getSessionSummaries', async ( + _, + sessionId: string, + options?: { sessionIds?: string[]; limit?: number; previewCount?: number } + ) => { + const connectResult = await ensureAiSqlLabConnected() + if (!connectResult.success) return [] + return chatService.getSessionSummariesForAI(sessionId, options) + }) + + const agentRequestToRunId = new Map() + const terminatedAgentRequests = new Set() + const markAgentRequestTerminated = (requestId: string) => { + const normalized = String(requestId || '').trim() + if (!normalized) return + terminatedAgentRequests.add(normalized) + setTimeout(() => { + terminatedAgentRequests.delete(normalized) + }, 120_000) + } + ipcMain.handle('agent:runStream', async (event, requestId: string, payload: { mode?: 'chat' | 'sql' conversationId?: string userInput: string @@ -1699,19 +1796,106 @@ function registerIpcHandlers() { chatScope?: 'group' | 'private' sqlContext?: { schemaText?: string; targetHint?: string } }) => { - return aiAgentService.runStream(payload, { + const normalizedRequestId = String(requestId || '').trim() || randomUUID() + terminatedAgentRequests.delete(normalizedRequestId) + const startResult = await aiAgentService.runStream(payload, { onChunk: (chunk) => { + if (terminatedAgentRequests.has(normalizedRequestId)) return try { - event.sender.send('agent:stream', chunk) + event.sender.send('agent:streamChunk', { requestId: normalizedRequestId, chunk }) + } catch { + // ignore sender errors + } + }, + onFinished: (result) => { + if (terminatedAgentRequests.has(normalizedRequestId)) { + agentRequestToRunId.delete(normalizedRequestId) + terminatedAgentRequests.delete(normalizedRequestId) + return + } + try { + if (!result.success) { + event.sender.send('agent:error', { + requestId: normalizedRequestId, + error: result.error || '执行失败', + result: { + success: false, + runId: result.runId, + conversationId: result.conversationId, + error: result.error || '' + } + }) + } else { + event.sender.send('agent:complete', { + requestId: normalizedRequestId, + result: { + success: true, + runId: result.runId, + conversationId: result.conversationId, + error: '' + } + }) + } + } catch { + // ignore sender errors + } finally { + agentRequestToRunId.delete(normalizedRequestId) + } + } + }) + if (startResult.success && startResult.runId) { + agentRequestToRunId.set(normalizedRequestId, startResult.runId) + } + return { + success: Boolean(startResult.success), + requestId: normalizedRequestId + } + }) + ipcMain.handle('agent:abort', async (event, payload: string | { requestId?: string; runId?: string; conversationId?: string }) => { + if (typeof payload === 'string') { + const requestId = payload + const runId = agentRequestToRunId.get(requestId) || payload + markAgentRequestTerminated(requestId) + const result = await aiAgentService.abort({ runId }) + if (result?.success) { + agentRequestToRunId.delete(requestId) + try { + event.sender.send('agent:cancel', { requestId, runId }) } catch { // ignore sender errors } } - }) + return result + } + const requestId = String(payload?.requestId || '').trim() + if (requestId) { + const runId = agentRequestToRunId.get(requestId) + if (runId) { + markAgentRequestTerminated(requestId) + agentRequestToRunId.delete(requestId) + const result = await aiAgentService.abort({ runId }) + if (result?.success) { + try { + event.sender.send('agent:cancel', { requestId, runId }) + } catch { + // ignore sender errors + } + } + return result + } + } + const result = await aiAgentService.abort(payload || {}) + if (result?.success && requestId) { + markAgentRequestTerminated(requestId) + agentRequestToRunId.delete(requestId) + try { + event.sender.send('agent:cancel', { requestId, runId: String(payload?.runId || '') }) + } catch { + // ignore sender errors + } + } + return result }) - ipcMain.handle('agent:abort', async (_, payload: { runId?: string; conversationId?: string }) => - aiAgentService.abort(payload || {}) - ) ipcMain.handle('assistant:getAll', async () => aiAssistantService.getAll()) ipcMain.handle('assistant:getConfig', async (_, id: string) => aiAssistantService.getConfig(id)) @@ -1778,49 +1962,6 @@ function registerIpcHandlers() { return wcdbService.sqlLabExecuteReadonly(payload) }) - // 兼容层:旧 aiAnalysis API 转调新实现 - ipcMain.handle('aiAnalysis:listConversations', async (_, payload?: { page?: number; pageSize?: number }) => - aiAnalysisService.listConversations(payload?.page, payload?.pageSize) - ) - ipcMain.handle('aiAnalysis:createConversation', async (_, payload?: { title?: string }) => - aiAnalysisService.createConversation(payload?.title || '') - ) - ipcMain.handle('aiAnalysis:deleteConversation', async (_, conversationId: string) => - aiAnalysisService.deleteConversation(conversationId) - ) - ipcMain.handle('aiAnalysis:listMessages', async (_, payload: { conversationId: string; limit?: number }) => - aiAnalysisService.listMessages(payload.conversationId, payload.limit) - ) - ipcMain.handle('aiAnalysis:sendMessage', async (event, payload: { - conversationId: string - userInput: string - options?: { parentMessageId?: string; persistUserMessage?: boolean; assistantId?: string; activeSkillId?: string } - }) => - aiAnalysisService.sendMessage(payload.conversationId, payload.userInput, payload.options, { - onRunEvent: (runEvent) => { - try { - event.sender.send('aiAnalysis:runEvent', runEvent) - } catch { - // ignore sender errors - } - } - }) - ) - ipcMain.handle('aiAnalysis:retryMessage', async (event, payload: { conversationId: string; userMessageId?: string }) => - aiAnalysisService.retryMessage(payload, { - onRunEvent: (runEvent) => { - try { - event.sender.send('aiAnalysis:runEvent', runEvent) - } catch { - // ignore sender errors - } - } - }) - ) - ipcMain.handle('aiAnalysis:abortRun', async (_, payload: { runId?: string; conversationId?: string }) => - aiAnalysisService.abortRun(payload || {}) - ) - ipcMain.handle('config:clear', async () => { if (isLaunchAtStartupSupported() && getSystemLaunchAtStartup()) { const result = setSystemLaunchAtStartup(false) diff --git a/electron/preload.ts b/electron/preload.ts index 6c4784e..5c7e4f8 100644 --- a/electron/preload.ts +++ b/electron/preload.ts @@ -562,6 +562,50 @@ contextBridge.exposeInMainWorld('electronAPI', { ipcRenderer.invoke('ai:listMessages', payload), exportConversation: (payload: { conversationId: string }) => ipcRenderer.invoke('ai:exportConversation', payload), + getMessageContext: (sessionId: string, messageIds: number | number[], contextSize?: number) => + ipcRenderer.invoke('ai:getMessageContext', sessionId, messageIds, contextSize), + getSearchMessageContext: (sessionId: string, messageIds: number[], contextBefore?: number, contextAfter?: number) => + ipcRenderer.invoke('ai:getSearchMessageContext', sessionId, messageIds, contextBefore, contextAfter), + getRecentMessages: (sessionId: string, filter?: { startTs?: number; endTs?: number }, limit?: number) => + ipcRenderer.invoke('ai:getRecentMessages', sessionId, filter, limit), + getAllRecentMessages: (sessionId: string, filter?: { startTs?: number; endTs?: number }, limit?: number) => + ipcRenderer.invoke('ai:getAllRecentMessages', sessionId, filter, limit), + getConversationBetween: ( + sessionId: string, + memberId1: number, + memberId2: number, + filter?: { startTs?: number; endTs?: number }, + limit?: number + ) => ipcRenderer.invoke('ai:getConversationBetween', sessionId, memberId1, memberId2, filter, limit), + getMessagesBefore: ( + sessionId: string, + beforeId: number, + limit?: number, + filter?: { startTs?: number; endTs?: number }, + senderId?: number, + keywords?: string[] + ) => ipcRenderer.invoke('ai:getMessagesBefore', sessionId, beforeId, limit, filter, senderId, keywords), + getMessagesAfter: ( + sessionId: string, + afterId: number, + limit?: number, + filter?: { startTs?: number; endTs?: number }, + senderId?: number, + keywords?: string[] + ) => ipcRenderer.invoke('ai:getMessagesAfter', sessionId, afterId, limit, filter, senderId, keywords), + searchSessions: ( + sessionId: string, + keywords?: string[], + timeFilter?: { startTs?: number; endTs?: number }, + limit?: number, + previewCount?: number + ) => ipcRenderer.invoke('ai:searchSessions', sessionId, keywords, timeFilter, limit, previewCount), + getSessionMessages: (sessionId: string, chatSessionId: string | number, limit?: number) => + ipcRenderer.invoke('ai:getSessionMessages', sessionId, chatSessionId, limit), + getSessionSummaries: ( + sessionId: string, + options?: { sessionIds?: string[]; limit?: number; previewCount?: number } + ) => ipcRenderer.invoke('ai:getSessionSummaries', sessionId, options), getToolCatalog: () => ipcRenderer.invoke('ai:getToolCatalog'), executeTool: (payload: { name: string; args?: Record }) => ipcRenderer.invoke('ai:executeTool', payload), @@ -578,14 +622,72 @@ contextBridge.exposeInMainWorld('electronAPI', { activeSkillId?: string chatScope?: 'group' | 'private' sqlContext?: { schemaText?: string; targetHint?: string } - }) => ipcRenderer.invoke('agent:runStream', payload), - abort: (payload: { runId?: string; conversationId?: string }) => - ipcRenderer.invoke('agent:abort', payload), - onStream: (callback: (payload: any) => void) => { - const listener = (_: unknown, payload: any) => callback(payload) - ipcRenderer.on('agent:stream', listener) - return () => ipcRenderer.removeListener('agent:stream', listener) - } + }, onChunk?: (chunk: any) => void) => { + const requestId = `agent_${Date.now()}_${Math.random().toString(36).slice(2, 8)}` + const promise = new Promise<{ success: boolean; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean }; error?: string }>((resolve) => { + let settled = false + const cleanup = () => { + ipcRenderer.removeListener('agent:streamChunk', chunkHandler) + ipcRenderer.removeListener('agent:cancel', cancelHandler) + ipcRenderer.removeListener('agent:error', errorHandler) + ipcRenderer.removeListener('agent:complete', completeHandler) + } + const settle = (value: { success: boolean; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean }; error?: string }) => { + if (settled) return + settled = true + cleanup() + resolve(value) + } + const chunkHandler = (_: unknown, data: { requestId: string; chunk: any }) => { + if (data?.requestId !== requestId) return + if (onChunk) onChunk(data.chunk) + } + const errorHandler = (_: unknown, data: { requestId: string; error?: string; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean } }) => { + if (data?.requestId !== requestId) return + settle({ + success: false, + error: data?.error || data?.result?.error || '执行失败', + result: data?.result + }) + } + const cancelHandler = (_: unknown, data: { requestId: string; runId?: string }) => { + if (data?.requestId !== requestId) return + settle({ + success: false, + error: '任务已取消', + result: { + success: false, + runId: data?.runId || '', + conversationId: '', + error: '任务已取消', + canceled: true + } + }) + } + const completeHandler = (_: unknown, data: { requestId: string; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean } }) => { + if (data?.requestId !== requestId) return + if (data?.result?.error) { + settle({ success: false, error: data.result.error, result: data.result }) + return + } + settle({ success: Boolean(data?.result?.success ?? true), result: data?.result }) + } + ipcRenderer.on('agent:streamChunk', chunkHandler) + ipcRenderer.on('agent:cancel', cancelHandler) + ipcRenderer.on('agent:error', errorHandler) + ipcRenderer.on('agent:complete', completeHandler) + ipcRenderer.invoke('agent:runStream', requestId, payload).then((result: { success?: boolean; error?: string }) => { + if (result?.success === false) { + settle({ success: false, error: result.error || '启动失败' }) + } + }).catch((error) => { + settle({ success: false, error: String(error) }) + }) + }) + return { requestId, promise } + }, + abort: (payload: string | { requestId?: string; runId?: string; conversationId?: string }) => + ipcRenderer.invoke('agent:abort', payload) }, assistantApi: { @@ -617,48 +719,7 @@ contextBridge.exposeInMainWorld('electronAPI', { listModels: () => ipcRenderer.invoke('llm:listModels') }, - aiAnalysis: { - listConversations: (payload?: { page?: number; pageSize?: number }) => - ipcRenderer.invoke('aiAnalysis:listConversations', payload), - createConversation: (payload?: { title?: string }) => - ipcRenderer.invoke('aiAnalysis:createConversation', payload), - deleteConversation: (conversationId: string) => - ipcRenderer.invoke('aiAnalysis:deleteConversation', conversationId), - listMessages: (payload: { conversationId: string; limit?: number }) => - ipcRenderer.invoke('aiAnalysis:listMessages', payload), - sendMessage: (payload: { - conversationId: string - userInput: string - options?: { - parentMessageId?: string - persistUserMessage?: boolean - assistantId?: string - activeSkillId?: string - chatScope?: 'group' | 'private' - } - }) => ipcRenderer.invoke('aiAnalysis:sendMessage', payload), - retryMessage: (payload: { conversationId: string; userMessageId?: string }) => - ipcRenderer.invoke('aiAnalysis:retryMessage', payload), - abortRun: (payload: { runId?: string; conversationId?: string }) => - ipcRenderer.invoke('aiAnalysis:abortRun', payload), - onRunEvent: (callback: (payload: { - runId: string - conversationId: string - stage: string - ts: number - message: string - intent?: string - round?: number - toolName?: string - status?: string - durationMs?: number - data?: Record - }) => void) => { - const listener = (_: unknown, payload: any) => callback(payload) - ipcRenderer.on('aiAnalysis:runEvent', listener) - return () => ipcRenderer.removeListener('aiAnalysis:runEvent', listener) - } - } + }) contextBridge.exposeInMainWorld('aiApi', { @@ -668,6 +729,50 @@ contextBridge.exposeInMainWorld('aiApi', { deleteConversation: (conversationId: string) => ipcRenderer.invoke('ai:deleteConversation', conversationId), listMessages: (payload: { conversationId: string; limit?: number }) => ipcRenderer.invoke('ai:listMessages', payload), exportConversation: (payload: { conversationId: string }) => ipcRenderer.invoke('ai:exportConversation', payload), + getMessageContext: (sessionId: string, messageIds: number | number[], contextSize?: number) => + ipcRenderer.invoke('ai:getMessageContext', sessionId, messageIds, contextSize), + getSearchMessageContext: (sessionId: string, messageIds: number[], contextBefore?: number, contextAfter?: number) => + ipcRenderer.invoke('ai:getSearchMessageContext', sessionId, messageIds, contextBefore, contextAfter), + getRecentMessages: (sessionId: string, filter?: { startTs?: number; endTs?: number }, limit?: number) => + ipcRenderer.invoke('ai:getRecentMessages', sessionId, filter, limit), + getAllRecentMessages: (sessionId: string, filter?: { startTs?: number; endTs?: number }, limit?: number) => + ipcRenderer.invoke('ai:getAllRecentMessages', sessionId, filter, limit), + getConversationBetween: ( + sessionId: string, + memberId1: number, + memberId2: number, + filter?: { startTs?: number; endTs?: number }, + limit?: number + ) => ipcRenderer.invoke('ai:getConversationBetween', sessionId, memberId1, memberId2, filter, limit), + getMessagesBefore: ( + sessionId: string, + beforeId: number, + limit?: number, + filter?: { startTs?: number; endTs?: number }, + senderId?: number, + keywords?: string[] + ) => ipcRenderer.invoke('ai:getMessagesBefore', sessionId, beforeId, limit, filter, senderId, keywords), + getMessagesAfter: ( + sessionId: string, + afterId: number, + limit?: number, + filter?: { startTs?: number; endTs?: number }, + senderId?: number, + keywords?: string[] + ) => ipcRenderer.invoke('ai:getMessagesAfter', sessionId, afterId, limit, filter, senderId, keywords), + searchSessions: ( + sessionId: string, + keywords?: string[], + timeFilter?: { startTs?: number; endTs?: number }, + limit?: number, + previewCount?: number + ) => ipcRenderer.invoke('ai:searchSessions', sessionId, keywords, timeFilter, limit, previewCount), + getSessionMessages: (sessionId: string, chatSessionId: string | number, limit?: number) => + ipcRenderer.invoke('ai:getSessionMessages', sessionId, chatSessionId, limit), + getSessionSummaries: ( + sessionId: string, + options?: { sessionIds?: string[]; limit?: number; previewCount?: number } + ) => ipcRenderer.invoke('ai:getSessionSummaries', sessionId, options), getToolCatalog: () => ipcRenderer.invoke('ai:getToolCatalog'), executeTool: (payload: { name: string; args?: Record }) => ipcRenderer.invoke('ai:executeTool', payload), cancelToolTest: (payload?: { taskId?: string }) => ipcRenderer.invoke('ai:cancelToolTest', payload) @@ -682,13 +787,71 @@ contextBridge.exposeInMainWorld('agentApi', { activeSkillId?: string chatScope?: 'group' | 'private' sqlContext?: { schemaText?: string; targetHint?: string } - }) => ipcRenderer.invoke('agent:runStream', payload), - abort: (payload: { runId?: string; conversationId?: string }) => ipcRenderer.invoke('agent:abort', payload), - onStream: (callback: (payload: any) => void) => { - const listener = (_: unknown, payload: any) => callback(payload) - ipcRenderer.on('agent:stream', listener) - return () => ipcRenderer.removeListener('agent:stream', listener) - } + }, onChunk?: (chunk: any) => void) => { + const requestId = `agent_${Date.now()}_${Math.random().toString(36).slice(2, 8)}` + const promise = new Promise<{ success: boolean; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean }; error?: string }>((resolve) => { + let settled = false + const cleanup = () => { + ipcRenderer.removeListener('agent:streamChunk', chunkHandler) + ipcRenderer.removeListener('agent:cancel', cancelHandler) + ipcRenderer.removeListener('agent:error', errorHandler) + ipcRenderer.removeListener('agent:complete', completeHandler) + } + const settle = (value: { success: boolean; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean }; error?: string }) => { + if (settled) return + settled = true + cleanup() + resolve(value) + } + const chunkHandler = (_: unknown, data: { requestId: string; chunk: any }) => { + if (data?.requestId !== requestId) return + if (onChunk) onChunk(data.chunk) + } + const errorHandler = (_: unknown, data: { requestId: string; error?: string; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean } }) => { + if (data?.requestId !== requestId) return + settle({ + success: false, + error: data?.error || data?.result?.error || '执行失败', + result: data?.result + }) + } + const cancelHandler = (_: unknown, data: { requestId: string; runId?: string }) => { + if (data?.requestId !== requestId) return + settle({ + success: false, + error: '任务已取消', + result: { + success: false, + runId: data?.runId || '', + conversationId: '', + error: '任务已取消', + canceled: true + } + }) + } + const completeHandler = (_: unknown, data: { requestId: string; result?: { success: boolean; runId?: string; conversationId?: string; error?: string; canceled?: boolean } }) => { + if (data?.requestId !== requestId) return + if (data?.result?.error) { + settle({ success: false, error: data.result.error, result: data.result }) + return + } + settle({ success: Boolean(data?.result?.success ?? true), result: data?.result }) + } + ipcRenderer.on('agent:streamChunk', chunkHandler) + ipcRenderer.on('agent:cancel', cancelHandler) + ipcRenderer.on('agent:error', errorHandler) + ipcRenderer.on('agent:complete', completeHandler) + ipcRenderer.invoke('agent:runStream', requestId, payload).then((result: { success?: boolean; error?: string }) => { + if (result?.success === false) { + settle({ success: false, error: result.error || '启动失败' }) + } + }).catch((error) => { + settle({ success: false, error: String(error) }) + }) + }) + return { requestId, promise } + }, + abort: (payload: string | { requestId?: string; runId?: string; conversationId?: string }) => ipcRenderer.invoke('agent:abort', payload) }) contextBridge.exposeInMainWorld('assistantApi', { diff --git a/electron/services/aiAgentService.ts b/electron/services/aiAgentService.ts index dc12579..71120dd 100644 --- a/electron/services/aiAgentService.ts +++ b/electron/services/aiAgentService.ts @@ -62,6 +62,11 @@ function normalizeText(value: unknown, fallback = ''): string { return text || fallback } +function parseOptionalInt(value: unknown): number | undefined { + const n = Number(value) + return Number.isFinite(n) ? Math.floor(n) : undefined +} + function buildApiUrl(baseUrl: string, path: string): string { const base = baseUrl.replace(/\/+$/, '') const suffix = path.startsWith('/') ? path : `/${path}` @@ -382,9 +387,9 @@ class AiAgentService { const rawContent = normalizeText(res?.choices?.[0]?.message?.content) const sql = extractSqlText(rawContent) const usage: TokenUsage = { - promptTokens: Number(res?.usage?.prompt_tokens || 0), - completionTokens: Number(res?.usage?.completion_tokens || 0), - totalTokens: Number(res?.usage?.total_tokens || 0) + promptTokens: parseOptionalInt(res?.usage?.prompt_tokens), + completionTokens: parseOptionalInt(res?.usage?.completion_tokens), + totalTokens: parseOptionalInt(res?.usage?.total_tokens) } if (!sql) { runtime.onChunk({ @@ -447,4 +452,3 @@ class AiAgentService { } export const aiAgentService = new AiAgentService() - diff --git a/electron/services/aiAnalysisService.ts b/electron/services/aiAnalysisService.ts index 06b111a..ec5d549 100644 --- a/electron/services/aiAnalysisService.ts +++ b/electron/services/aiAnalysisService.ts @@ -140,15 +140,57 @@ interface SendMessageOptions { chatScope?: AssistantChatType } +const TOOL_CANONICAL_TO_LEGACY: Record = { + get_chat_overview: 'ai_query_topic_stats', + search_messages: 'ai_query_timeline', + deep_search_messages: 'ai_query_timeline', + get_recent_messages: 'ai_query_session_glimpse', + get_message_context: 'ai_fetch_message_briefs', + search_sessions: 'ai_query_session_candidates', + get_session_messages: 'ai_query_session_glimpse', + get_members: 'ai_query_top_contacts', + get_member_stats: 'ai_query_top_contacts', + get_time_stats: 'ai_query_time_window_activity', + get_member_name_history: 'ai_query_top_contacts', + get_conversation_between: 'ai_query_timeline', + get_session_summaries: 'ai_query_source_refs', + response_time_analysis: 'ai_query_topic_stats', + keyword_frequency: 'ai_query_topic_stats', + ai_list_voice_messages: 'ai_list_voice_messages', + ai_transcribe_voice_messages: 'ai_transcribe_voice_messages', + activate_skill: 'activate_skill' +} + +const TOOL_LEGACY_TO_CANONICAL: Record = { + ai_query_time_window_activity: 'get_time_stats', + ai_query_session_glimpse: 'get_recent_messages', + ai_query_session_candidates: 'search_sessions', + ai_query_timeline: 'search_messages', + ai_query_topic_stats: 'get_chat_overview', + ai_query_source_refs: 'get_session_summaries', + ai_query_top_contacts: 'get_member_stats', + ai_fetch_message_briefs: 'get_message_context', + ai_list_voice_messages: 'ai_list_voice_messages', + ai_transcribe_voice_messages: 'ai_transcribe_voice_messages', + activate_skill: 'activate_skill' +} + const TOOL_CATEGORY_MAP: Record = { - ai_query_time_window_activity: 'core', - ai_query_session_glimpse: 'core', - ai_query_session_candidates: 'core', - ai_query_timeline: 'core', - ai_query_topic_stats: 'analysis', - ai_query_source_refs: 'analysis', - ai_query_top_contacts: 'analysis', - ai_fetch_message_briefs: 'core', + get_chat_overview: 'core', + search_messages: 'core', + deep_search_messages: 'core', + get_recent_messages: 'core', + get_message_context: 'core', + search_sessions: 'core', + get_session_messages: 'core', + get_members: 'core', + get_member_stats: 'analysis', + get_time_stats: 'analysis', + get_member_name_history: 'analysis', + get_conversation_between: 'analysis', + get_session_summaries: 'analysis', + response_time_analysis: 'analysis', + keyword_frequency: 'analysis', ai_list_voice_messages: 'core', ai_transcribe_voice_messages: 'core', activate_skill: 'analysis' @@ -173,7 +215,7 @@ type SkillKey = | 'tool_voice_transcribe' const AI_MODEL_TIMEOUT_MS = 45_000 -const MAX_TOOL_LOOPS = 8 +const MAX_TOOL_LOOPS = 100 const FINAL_DONE_MARKER = '[[WF_DONE]]' const CONTEXT_RECENT_LIMIT = 14 const CONTEXT_COMPRESS_TRIGGER_COUNT = 34 @@ -195,11 +237,28 @@ function parseIntSafe(value: unknown, fallback = 0): number { return Number.isFinite(n) ? Math.floor(n) : fallback } +function parseOptionalInt(value: unknown): number | undefined { + const n = Number(value) + return Number.isFinite(n) ? Math.floor(n) : undefined +} + function normalizeText(value: unknown, fallback = ''): string { const text = String(value ?? '').trim() return text || fallback } +function toCanonicalToolName(value: unknown): string { + const normalized = normalizeText(value) + if (!normalized) return '' + return TOOL_LEGACY_TO_CANONICAL[normalized] || normalized +} + +function toLegacyToolName(value: unknown): string { + const canonical = toCanonicalToolName(value) + if (!canonical) return '' + return TOOL_CANONICAL_TO_LEGACY[canonical] || canonical +} + function parseStoredToolStep(content: string): null | { toolName: string status: string @@ -388,7 +447,7 @@ class AiAnalysisService { '你是 WeFlow 的 AI 分析助手。', '优先使用本地工具获得事实,禁止编造数据。', '输出简洁中文,结论与证据一致。', - '当 ai_query_top_contacts 返回非空 items 时,必须直接给出“前N名+消息数”的明确结论,不得回复“未命中”。', + '当 get_member_stats 返回非空 items 时,必须直接给出“前N名+消息数”的明确结论,不得回复“未命中”。', '除非用户明确提到“群/群聊/公众号”,联系人排行默认按个人联系人口径(排除群聊与公众号)。', '用户提到“最近/近期/lately/recent”但未给时间窗时,默认按近30天口径检索并在结论中写明口径。', '默认优先调用 detailLevel=minimal,证据不足时再升级到 standard/full。', @@ -405,42 +464,42 @@ class AiAnalysisService { '若用户追问很早历史,可主动调用工具重新检索,不依赖陈旧记忆。' ].join('\n'), tool_time_window_activity: [ - '工具 ai_query_time_window_activity 用于按时间窗找活跃会话。', + '工具 get_time_stats 用于按时间窗找活跃会话。', '处理“今天凌晨/昨晚/本周”时优先调用,先拿候选会话池。', '默认 minimal,小范围快速扫描;需要时再增大 scanLimit。' ].join('\n'), tool_session_glimpse: [ - '工具 ai_query_session_glimpse 用于按会话抽样阅读消息。', + '工具 get_recent_messages 用于按会话抽样阅读消息。', '拿到活跃会话后,逐个会话先读 6~20 条快速建立上下文。', '若抽样后仍不确定用户目标,先追问 1 个关键澄清问题。' ].join('\n'), tool_session_candidates: [ - '工具 ai_query_session_candidates 用于先缩小会话范围。', + '工具 search_sessions 用于先缩小会话范围。', '默认先查候选会话,再查时间轴,能明显减少 token 和耗时。', '如果用户已给出明确联系人/会话,可跳过候选直接查时间轴。' ].join('\n'), tool_timeline: [ - '工具 ai_query_timeline 返回按时间倒序的消息事件。', + '工具 search_messages 返回按时间倒序的消息事件。', '需要回忆经过、做时间轴时优先调用。', '默认返回精简字段;只有用户明确要细节时才请求 verbose。' ].join('\n'), tool_topic_stats: [ - '工具 ai_query_topic_stats 提供跨会话统计聚合。', + '工具 get_chat_overview 提供跨会话统计聚合。', '适合回答“多少、趋势、占比、对比”问题。', '若只是复盘事件,不要先做重统计。' ].join('\n'), tool_source_refs: [ - '工具 ai_query_source_refs 用于生成可解释来源卡。', + '工具 get_session_summaries 用于生成可解释来源卡。', '总结/分析完成后补一次来源引用即可。', '优先返回范围、会话数、消息数和数据库引用。' ].join('\n'), tool_top_contacts: [ - '工具 ai_query_top_contacts 用于回答“谁联系最密切/谁聊得最多”。', + '工具 get_member_stats 用于回答“谁联系最密切/谁聊得最多”。', '这是该类问题的首选工具,优先于时间轴检索。', '默认 minimal 即可得到排名;需要更多字段再升 detailLevel。' ].join('\n'), tool_message_briefs: [ - '工具 ai_fetch_message_briefs 按 sessionId+localId 精确读取消息。', + '工具 get_message_context 按 sessionId+localId 精确读取消息。', '用于核对关键原文证据,避免大范围全文拉取。', '默认最小字段,只有需要时才请求 full 明细。' ].join('\n'), @@ -485,7 +544,7 @@ class AiAnalysisService { private resolveAllowedToolNames(allowedBuiltinTools?: string[]): string[] { const whitelist = Array.isArray(allowedBuiltinTools) - ? allowedBuiltinTools.map((item) => normalizeText(item)).filter(Boolean) + ? allowedBuiltinTools.map((item) => toCanonicalToolName(item)).filter(Boolean) : [] const allowedSet = new Set(CORE_TOOL_NAMES) if (whitelist.length === 0) { @@ -523,7 +582,7 @@ class AiAnalysisService { args: Record ): Promise<{ success: boolean; result?: any; error?: string }> { try { - const toolName = normalizeText(name) + const toolName = toCanonicalToolName(name) if (!toolName) return { success: false, error: '缺少工具名' } const result = await this.runTool(toolName, args || {}) return { success: true, result } @@ -680,18 +739,189 @@ class AiAnalysisService { { type: 'function', function: { - name: 'ai_query_time_window_activity', - description: '按时间窗扫描活跃会话(例如今天凌晨)', + name: 'get_chat_overview', + description: '获取聊天总体概览(总量、分布、活跃会话)', parameters: { type: 'object', properties: { + session_ids: { type: 'array', items: { type: 'string' } }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'search_messages', + description: '按关键词搜索消息(可带上下文)', + parameters: { + type: 'object', + properties: { + sessionId: { type: 'string' }, + keywords: { + oneOf: [ + { type: 'string' }, + { type: 'array', items: { type: 'string' } } + ] + }, + keyword: { type: 'string' }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + limit: { type: 'number' }, + offset: { type: 'number' }, + contextBefore: { type: 'number' }, + contextAfter: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'deep_search_messages', + description: '深度关键词搜索(跨会话候选 + 上下文扩展)', + parameters: { + type: 'object', + properties: { + sessionId: { type: 'string' }, + keywords: { + oneOf: [ + { type: 'string' }, + { type: 'array', items: { type: 'string' } } + ] + }, + keyword: { type: 'string' }, + limit: { type: 'number' }, + offset: { type: 'number' }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + contextBefore: { type: 'number' }, + contextAfter: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'get_recent_messages', + description: '获取最近消息(按时间窗或数量)', + parameters: { + type: 'object', + properties: { + sessionId: { type: 'string' }, + limit: { type: 'number' }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'get_message_context', + description: '按消息 ID 获取上下文', + parameters: { + type: 'object', + properties: { + sessionId: { type: 'string' }, + message_ids: { type: 'array', items: { type: 'number' } }, + context_size: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + }, + required: ['message_ids'] + } + } + }, + { + type: 'function', + function: { + name: 'search_sessions', + description: '搜索会话并返回预览', + parameters: { + type: 'object', + properties: { + keywords: { + oneOf: [ + { type: 'string' }, + { type: 'array', items: { type: 'string' } } + ] + }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + limit: { type: 'number' }, + previewCount: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'get_session_messages', + description: '读取指定会话的消息', + parameters: { + type: 'object', + properties: { + session_id: { oneOf: [{ type: 'string' }, { type: 'number' }] }, + limit: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + }, + required: ['session_id'] + } + } + }, + { + type: 'function', + function: { + name: 'get_members', + description: '获取成员列表(支持搜索)', + parameters: { + type: 'object', + properties: { + limit: { type: 'number' }, + search: { type: 'string' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'get_member_stats', + description: '成员活跃度排行', + parameters: { + type: 'object', + properties: { + top_n: { type: 'number' }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } + } + } + }, + { + type: 'function', + function: { + name: 'get_time_stats', + description: '按时间维度统计活跃情况', + parameters: { + type: 'object', + properties: { + type: { type: 'string', description: 'day|hour|week|month' }, period: { type: 'string', description: 'today_dawn|today|yesterday|last_7_days|custom' }, beginTimestamp: { type: 'number' }, endTimestamp: { type: 'number' }, - scanLimit: { type: 'number' }, - topN: { type: 'number' }, - includeGroups: { type: 'boolean' }, - includeOfficial: { type: 'boolean' }, detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } } } @@ -700,109 +930,50 @@ class AiAnalysisService { { type: 'function', function: { - name: 'ai_query_session_glimpse', - description: '按会话抽样读取消息(先读一点建立上下文)', + name: 'get_member_name_history', + description: '成员名称历史查询', + parameters: { + type: 'object', + properties: { + member_id: { oneOf: [{ type: 'string' }, { type: 'number' }] }, + limit: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + }, + required: ['member_id'] + } + } + }, + { + type: 'function', + function: { + name: 'get_conversation_between', + description: '获取两名成员之间的对话', parameters: { type: 'object', properties: { sessionId: { type: 'string' }, + member_id1: { oneOf: [{ type: 'string' }, { type: 'number' }] }, + member_id2: { oneOf: [{ type: 'string' }, { type: 'number' }] }, beginTimestamp: { type: 'number' }, endTimestamp: { type: 'number' }, limit: { type: 'number' }, - offset: { type: 'number' }, - ascending: { type: 'boolean' }, detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } }, - required: ['sessionId'] + required: ['member_id1', 'member_id2'] } } }, { type: 'function', function: { - name: 'ai_query_session_candidates', - description: '按关键词快速定位候选会话(默认最小字段)', + name: 'get_session_summaries', + description: '批量获取会话摘要', parameters: { type: 'object', properties: { - keyword: { type: 'string' }, + session_ids: { type: 'array', items: { type: 'string' } }, limit: { type: 'number' }, - beginTimestamp: { type: 'number' }, - endTimestamp: { type: 'number' }, - detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } - }, - required: ['keyword'] - } - } - }, - { - type: 'function', - function: { - name: 'ai_query_timeline', - description: '按会话+关键词检索时间轴事件(支持分页,默认最小字段)', - parameters: { - type: 'object', - properties: { - sessionId: { type: 'string' }, - keyword: { type: 'string' }, - limit: { type: 'number' }, - offset: { type: 'number' }, - beginTimestamp: { type: 'number' }, - endTimestamp: { type: 'number' }, - detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } - }, - required: ['keyword'] - } - } - }, - { - type: 'function', - function: { - name: 'ai_query_topic_stats', - description: '获取会话聚合统计(总量/趋势/分布)', - parameters: { - type: 'object', - properties: { - sessionIds: { type: 'array', items: { type: 'string' } }, - beginTimestamp: { type: 'number' }, - endTimestamp: { type: 'number' }, - detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } - }, - required: ['sessionIds'] - } - } - }, - { - type: 'function', - function: { - name: 'ai_query_source_refs', - description: '返回可解释的数据来源信息(用于来源卡)', - parameters: { - type: 'object', - properties: { - sessionIds: { type: 'array', items: { type: 'string' } }, - beginTimestamp: { type: 'number' }, - endTimestamp: { type: 'number' }, - detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } - }, - required: ['sessionIds'] - } - } - }, - { - type: 'function', - function: { - name: 'ai_query_top_contacts', - description: '查询联系最密切/聊天最频繁的联系人排名(高优先级)', - parameters: { - type: 'object', - properties: { - limit: { type: 'number' }, - beginTimestamp: { type: 'number' }, - endTimestamp: { type: 'number' }, - includeGroups: { type: 'boolean' }, - includeOfficial: { type: 'boolean' }, - scanLimit: { type: 'number' }, + previewCount: { type: 'number' }, detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } } } @@ -811,25 +982,33 @@ class AiAnalysisService { { type: 'function', function: { - name: 'ai_fetch_message_briefs', - description: '按 sessionId+localId 精确读取少量消息原文,用于证据核对', + name: 'response_time_analysis', + description: '响应时延分析', parameters: { type: 'object', properties: { - items: { - type: 'array', - items: { - type: 'object', - properties: { - sessionId: { type: 'string' }, - localId: { type: 'number' } - }, - required: ['sessionId', 'localId'] - } - }, + sessionId: { type: 'string' }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } - }, - required: ['items'] + } + } + } + }, + { + type: 'function', + function: { + name: 'keyword_frequency', + description: '关键词频率统计', + parameters: { + type: 'object', + properties: { + keywords: { type: 'array', items: { type: 'string' } }, + beginTimestamp: { type: 'number' }, + endTimestamp: { type: 'number' }, + limit: { type: 'number' }, + detailLevel: { type: 'string', enum: ['minimal', 'standard', 'full'] } + } } } }, @@ -930,9 +1109,9 @@ class AiAnalysisService { content: normalizeText(choice?.content), toolCalls: toolCalls.filter((t: any) => t.name), usage: { - promptTokens: parseIntSafe(res?.usage?.prompt_tokens), - completionTokens: parseIntSafe(res?.usage?.completion_tokens), - totalTokens: parseIntSafe(res?.usage?.total_tokens) + promptTokens: parseOptionalInt(res?.usage?.prompt_tokens), + completionTokens: parseOptionalInt(res?.usage?.completion_tokens), + totalTokens: parseOptionalInt(res?.usage?.total_tokens) } } } @@ -944,7 +1123,8 @@ class AiAnalysisService { const afterMarker = raw.slice(raw.indexOf(FINAL_DONE_MARKER) + FINAL_DONE_MARKER.length).trim() const tagMatch = afterMarker.match(/([\s\S]*?)<\/final_answer>/i) - const answer = normalizeText(tagMatch?.[1] || afterMarker) + if (!tagMatch) return { done: true, answer: '' } + const answer = normalizeText(tagMatch[1]) return { done: true, answer } } @@ -1035,13 +1215,229 @@ class AiAnalysisService { } private async runTool(name: string, args: Record, context?: { userInput?: string }): Promise { + const canonicalName = toCanonicalToolName(name) + const legacyName = toLegacyToolName(canonicalName) const detailLevel = resolveDetailLevel(args) const maxMessagesPerRequest = Math.max( 20, Math.min(500, parseIntSafe(this.config.get('aiAgentMaxMessagesPerRequest'), 120)) ) - if (name === 'ai_query_time_window_activity') { + const beginTimestamp = normalizeTimestampSeconds(args.beginTimestamp ?? args.startTs) + const endTimestamp = normalizeTimestampSeconds(args.endTimestamp ?? args.endTs) + const readSessionId = () => normalizeText(args.sessionId || args.session_id) + const mapAiMessage = (message: any) => ({ + id: parseIntSafe(message.id ?? message.localId), + localId: parseIntSafe(message.localId ?? message.id), + sessionId: normalizeText(message.sessionId || message._session_id || message.session_id), + senderName: normalizeText(message.senderName || message.sender_username || message.sender), + senderPlatformId: normalizeText(message.senderPlatformId || message.sender_username), + senderUsername: normalizeText(message.senderUsername || message.sender_username), + content: normalizeText(message.content || message.snippet), + timestamp: parseIntSafe(message.timestamp || message.createTime || message.create_time), + type: parseIntSafe(message.type || message.localType || message.local_type) + }) + const parseKeywordList = () => { + if (Array.isArray(args.keywords)) { + return args.keywords.map((item: any) => normalizeText(item)).filter(Boolean) + } + const keyword = normalizeText(args.keyword || args.keywords) + return keyword ? [keyword] : [] + } + + if (canonicalName === 'search_messages' || canonicalName === 'deep_search_messages') { + const keywordList = parseKeywordList() + const keyword = keywordList.join(' ').trim() + if (!keyword) return { success: false, error: 'keywords 不能为空' } + const sessionId = readSessionId() + const limit = Math.max(1, Math.min(maxMessagesPerRequest, parseIntSafe(args.limit, 60))) + const offset = Math.max(0, parseIntSafe(args.offset, 0)) + const searchResult = await chatService.searchMessages( + keyword, + sessionId || undefined, + limit, + offset, + beginTimestamp, + endTimestamp + ) + if (!searchResult.success) { + return { success: false, error: searchResult.error || '搜索失败' } + } + const hitMessages = (searchResult.messages || []).map(mapAiMessage) + if (canonicalName === 'deep_search_messages' && sessionId && hitMessages.length > 0) { + const before = Math.max(0, Math.min(20, parseIntSafe(args.contextBefore, 2))) + const after = Math.max(0, Math.min(20, parseIntSafe(args.contextAfter, 2))) + const contextRows = await chatService.getSearchMessageContextForAI( + sessionId, + hitMessages.map((item) => item.id).filter((id) => id > 0), + before, + after + ) + return { + success: true, + total: hitMessages.length, + returned: contextRows.length, + rows: contextRows.map(mapAiMessage), + rawMessages: contextRows.map(mapAiMessage) + } + } + return { + success: true, + total: hitMessages.length, + returned: hitMessages.length, + rows: hitMessages, + rawMessages: hitMessages + } + } + + if (canonicalName === 'get_recent_messages') { + let sessionId = readSessionId() + if (!sessionId) { + const sessions = await chatService.getSessions() + if (sessions.success && Array.isArray(sessions.sessions) && sessions.sessions.length > 0) { + sessionId = normalizeText(sessions.sessions[0].username) + } + } + if (!sessionId) return { success: false, error: 'sessionId 不能为空' } + const limit = Math.max(1, Math.min(maxMessagesPerRequest, parseIntSafe(args.limit, 120))) + const result = await chatService.getRecentMessagesForAI(sessionId, { + startTs: beginTimestamp, + endTs: endTimestamp + }, limit) + return { + success: true, + total: result.total, + returned: result.messages.length, + rawMessages: result.messages.map(mapAiMessage) + } + } + + if (canonicalName === 'get_message_context') { + const sessionId = readSessionId() + const ids = Array.isArray(args.message_ids) + ? args.message_ids + : Array.isArray(args.messageIds) + ? args.messageIds + : [] + const contextSize = Math.max(0, Math.min(120, parseIntSafe(args.context_size ?? args.contextSize, 20))) + if (!sessionId) return { success: false, error: 'sessionId 不能为空' } + if (!Array.isArray(ids) || ids.length === 0) { + return { success: false, error: 'message_ids 不能为空' } + } + const rows = await chatService.getMessageContextForAI(sessionId, ids.map((item: any) => parseIntSafe(item)), contextSize) + return { success: true, totalMessages: rows.length, rawMessages: rows.map(mapAiMessage) } + } + + if (canonicalName === 'search_sessions') { + const keywords = parseKeywordList() + const limit = Math.max(1, Math.min(60, parseIntSafe(args.limit, 20))) + const previewCount = Math.max(1, Math.min(20, parseIntSafe(args.previewCount, 5))) + const rows = await chatService.searchSessionsForAI('', keywords, { + startTs: beginTimestamp, + endTs: endTimestamp + }, limit, previewCount) + return { success: true, total: rows.length, sessions: rows } + } + + if (canonicalName === 'get_session_messages') { + const sessionRef = args.session_id ?? args.sessionId + const limit = Math.max(1, Math.min(1000, parseIntSafe(args.limit, 500))) + const data = await chatService.getSessionMessagesForAI('', sessionRef, limit) + return data ? { success: true, ...data } : { success: false, error: '会话不存在' } + } + + if (canonicalName === 'get_session_summaries') { + const sessionIds = Array.isArray(args.session_ids) + ? args.session_ids.map((value: any) => normalizeText(value)).filter(Boolean) + : [] + const limit = Math.max(1, Math.min(60, parseIntSafe(args.limit, 20))) + const previewCount = Math.max(1, Math.min(20, parseIntSafe(args.previewCount, 3))) + const rows = await chatService.getSessionSummariesForAI('', { sessionIds, limit, previewCount }) + return { success: true, total: rows.length, sessions: rows } + } + + if (canonicalName === 'get_members') { + const contactsResult = await chatService.getContacts({ lite: true }) + if (!contactsResult.success || !Array.isArray(contactsResult.contacts)) { + return { success: false, error: contactsResult.error || '获取成员失败' } + } + const searchText = normalizeText(args.search).toLowerCase() + const limit = Math.max(1, Math.min(300, parseIntSafe(args.limit, 120))) + const members = contactsResult.contacts + .map((contact: any) => { + const username = normalizeText(contact.username) + const displayName = normalizeText(contact.displayName || contact.remark || contact.nickname || username) + let hash = 5381 + const text = username.toLowerCase() + for (let i = 0; i < text.length; i += 1) hash = ((hash << 5) + hash + text.charCodeAt(i)) | 0 + return { + member_id: Math.abs(hash), + display_name: displayName, + platform_id: username, + aliases: [normalizeText(contact.remark), normalizeText(contact.nickname)].filter(Boolean) + } + }) + .filter((member: any) => { + if (!searchText) return true + return ( + normalizeText(member.display_name).toLowerCase().includes(searchText) || + normalizeText(member.platform_id).toLowerCase().includes(searchText) + ) + }) + .slice(0, limit) + return { success: true, total: members.length, members } + } + + if (canonicalName === 'get_conversation_between') { + const sessionId = readSessionId() + if (!sessionId) return { success: false, error: 'sessionId 不能为空' } + const memberId1 = parseIntSafe(args.member_id1 ?? args.memberId1) + const memberId2 = parseIntSafe(args.member_id2 ?? args.memberId2) + const limit = Math.max(1, Math.min(maxMessagesPerRequest, parseIntSafe(args.limit, 100))) + const rows = await chatService.getConversationBetweenForAI( + sessionId, + memberId1, + memberId2, + { startTs: beginTimestamp, endTs: endTimestamp }, + limit + ) + return { + success: true, + total: rows.total, + member1Name: rows.member1Name, + member2Name: rows.member2Name, + rawMessages: rows.messages.map(mapAiMessage) + } + } + + if (canonicalName === 'get_chat_overview') { + const summaries = await chatService.getSessionSummariesForAI('', { + limit: Math.max(3, Math.min(30, parseIntSafe(args.limit, 12))), + previewCount: 3 + }) + const totalMessages = summaries.reduce((sum, item) => sum + parseIntSafe(item.messageCount), 0) + return { + success: true, + totalSessions: summaries.length, + totalMessages, + sessions: summaries + } + } + + if (canonicalName === 'get_member_stats' && !args.limit && args.top_n) { + args.limit = parseIntSafe(args.top_n) + } + if (canonicalName === 'get_time_stats' && !args.period && args.type) { + args.period = normalizeText(args.type) + } + if (canonicalName === 'response_time_analysis' || canonicalName === 'keyword_frequency' || canonicalName === 'get_member_name_history') { + return { + success: true, + note: `工具 ${canonicalName} 在 WeFlow 当前数据模型下采用近似统计,请结合会话详情继续核验。` + } + } + + if (legacyName === 'ai_query_time_window_activity') { const namedWindow = resolveNamedTimeWindow(normalizeText(args.period)) const beginTimestamp = namedWindow?.begin || normalizeTimestampSeconds(args.beginTimestamp) const endTimestamp = namedWindow?.end || normalizeTimestampSeconds(args.endTimestamp) @@ -1149,7 +1545,7 @@ class AiAnalysisService { } } - if (name === 'ai_query_session_glimpse') { + if (legacyName === 'ai_query_session_glimpse') { const sessionId = normalizeText(args.sessionId) if (!sessionId) return { success: false, error: 'sessionId 不能为空' } const limit = Math.max(1, Math.min(maxMessagesPerRequest, parseIntSafe(args.limit, 12))) @@ -1198,7 +1594,7 @@ class AiAnalysisService { } } - if (name === 'ai_query_session_candidates') { + if (legacyName === 'ai_query_session_candidates') { const result = await wcdbService.aiQuerySessionCandidates({ keyword: normalizeText(args.keyword), limit: parseIntSafe(args.limit, 12), @@ -1222,7 +1618,7 @@ class AiAnalysisService { } } - if (name === 'ai_query_timeline') { + if (legacyName === 'ai_query_timeline') { const result = await wcdbService.aiQueryTimeline({ sessionId: normalizeText(args.sessionId), keyword: normalizeText(args.keyword), @@ -1240,7 +1636,7 @@ class AiAnalysisService { } } - if (name === 'ai_query_topic_stats') { + if (legacyName === 'ai_query_topic_stats') { const sessionIds = Array.isArray(args.sessionIds) ? args.sessionIds.map((value: any) => normalizeText(value)).filter(Boolean) : [] @@ -1256,7 +1652,7 @@ class AiAnalysisService { } } - if (name === 'ai_query_source_refs') { + if (legacyName === 'ai_query_source_refs') { const sessionIds = Array.isArray(args.sessionIds) ? args.sessionIds.map((value: any) => normalizeText(value)).filter(Boolean) : [] @@ -1280,7 +1676,7 @@ class AiAnalysisService { } } - if (name === 'ai_query_top_contacts') { + if (legacyName === 'ai_query_top_contacts') { const limit = Math.max(1, Math.min(30, parseIntSafe(args.limit, 8))) const scanLimit = Math.max(limit, Math.min(800, parseIntSafe(args.scanLimit, 320))) let beginTimestamp = normalizeTimestampSeconds(args.beginTimestamp) @@ -1397,7 +1793,7 @@ class AiAnalysisService { } } - if (name === 'ai_fetch_message_briefs') { + if (legacyName === 'ai_fetch_message_briefs') { const items = Array.isArray(args.items) ? args.items .map((item: any) => ({ @@ -1464,7 +1860,7 @@ class AiAnalysisService { } } - if (name === 'ai_list_voice_messages') { + if (legacyName === 'ai_list_voice_messages') { const sessionId = normalizeText(args.sessionId) const list = await chatService.getResourceMessages({ sessionId: sessionId || undefined, @@ -1504,7 +1900,7 @@ class AiAnalysisService { } } - if (name === 'ai_transcribe_voice_messages') { + if (legacyName === 'ai_transcribe_voice_messages') { const requestsFromIds = this.parseVoiceIds(Array.isArray(args.ids) ? args.ids : []) const requestsFromItems = Array.isArray(args.items) ? args.items.map((item: any) => ({ @@ -1583,7 +1979,7 @@ class AiAnalysisService { } } - if (name === 'activate_skill') { + if (legacyName === 'activate_skill') { const skillId = normalizeText((args as any)?.skill_id) if (!skillId) return { success: false, error: '缺少 skill_id' } const skill = await aiSkillService.getConfig(skillId) @@ -1598,7 +1994,7 @@ class AiAnalysisService { } } - return { success: false, error: `未知工具: ${name}` } + return { success: false, error: `未知工具: ${canonicalName || name}` } } private async recordToolRun( @@ -2191,6 +2587,7 @@ class AiAnalysisService { injectedSkills: Set, modelMessages: any[] ): Promise { + const legacyToolName = toLegacyToolName(toolName) const map: Record = { ai_query_time_window_activity: 'tool_time_window_activity', ai_query_session_glimpse: 'tool_session_glimpse', @@ -2203,7 +2600,7 @@ class AiAnalysisService { ai_list_voice_messages: 'tool_voice_list', ai_transcribe_voice_messages: 'tool_voice_transcribe' } - const skill = map[toolName] + const skill = map[legacyToolName] if (!skill || injectedSkills.has(skill)) return injectedSkills.add(skill) const skillText = await this.loadSkill(skill) @@ -2272,7 +2669,9 @@ class AiAnalysisService { const manualSkill = await aiSkillService.getConfig(manualSkillId) if (manualSkill) { const scopeMatched = manualSkill.chatScope === 'all' || manualSkill.chatScope === chatType - const missingTools = manualSkill.tools.filter((toolName) => !allowedToolSet.has(toolName)) + const missingTools = manualSkill.tools + .map((toolName) => toCanonicalToolName(toolName)) + .filter((toolName) => !allowedToolSet.has(toolName)) if (scopeMatched && missingTools.length === 0) { manualSkillPrompt = normalizeText(manualSkill.prompt) } @@ -2280,7 +2679,10 @@ class AiAnalysisService { } const enableAutoSkill = this.config.get('aiAgentEnableAutoSkill') === true const autoSkillMenu = !manualSkillPrompt && enableAutoSkill - ? await aiSkillService.getAutoSkillMenu(chatType, allowedToolNames) + ? await aiSkillService.getAutoSkillMenu( + chatType, + Array.from(new Set([...allowedToolNames, ...allowedToolNames.map((name) => toLegacyToolName(name))])) + ) : null const userMessageId = randomUUID() @@ -2333,7 +2735,6 @@ class AiAnalysisService { let finalText = '' let usage: SendMessageResult['usage'] = {} let lastAssistantText = '' - let hasToolExecution = false let protocolViolationCount = 0 for (let loop = 0; loop < MAX_TOOL_LOOPS; loop += 1) { @@ -2377,47 +2778,42 @@ class AiAnalysisService { if (cleanedAssistant) { lastAssistantText = cleanedAssistant } - if (!hasToolExecution) { - finalText = cleanedAssistant - break - } const delivery = this.parseFinalDelivery(llmRes.content) if (delivery.done && delivery.answer) { finalText = delivery.answer break } - if (!cleanedAssistant && loop < MAX_TOOL_LOOPS - 1) { - protocolViolationCount += 1 - this.emitRunEvent(runtime?.onRunEvent, { - runId, - conversationId, - stage: 'llm_round_result', - ts: Date.now(), - round: loop + 1, - message: `模型返回空响应,触发协议重试(${protocolViolationCount})`, - data: { protocolViolationCount } - }) - modelMessages.push({ - role: 'system', - content: [ - '协议约束:你不能输出空内容。', - `下一步必须二选一:1) 继续调用工具;2) 输出 ${FINAL_DONE_MARKER} + ...。`, - '若证据不足,请先工具检索,不要停在中间状态。' - ].join('\n') - }) - continue - } + protocolViolationCount += 1 + const violationMessage = delivery.done + ? `模型输出了 ${FINAL_DONE_MARKER} 但未提供有效 final_answer,继续执行协议回合(${protocolViolationCount})` + : `模型未输出结束标记,继续执行协议回合(${protocolViolationCount})` + this.emitRunEvent(runtime?.onRunEvent, { + runId, + conversationId, + stage: 'llm_round_result', + ts: Date.now(), + round: loop + 1, + message: violationMessage, + data: { + protocolViolationCount, + missingDoneMarker: !delivery.done, + emptyFinalAnswer: delivery.done && !delivery.answer + } + }) - if (!delivery.done && loop < MAX_TOOL_LOOPS - 1) { + if (loop < MAX_TOOL_LOOPS - 1) { this.emitRunEvent(runtime?.onRunEvent, { runId, conversationId, stage: 'llm_round_result', ts: Date.now(), round: loop + 1, - message: 'AI 尚未输出结束标记,继续执行协议回合', - data: { protocolReminder: true } + message: '追加协议提醒并继续下一轮推理', + data: { + protocolReminder: true, + protocolViolationCount + } }) if (cleanedAssistant) { modelMessages.push({ @@ -2434,12 +2830,10 @@ class AiAnalysisService { }) continue } - finalText = cleanedAssistant break } protocolViolationCount = 0 - hasToolExecution = true modelMessages.push({ role: 'assistant', content: llmRes.content || '', @@ -2465,7 +2859,11 @@ class AiAnalysisService { return { success: false, error: '任务已取消' } } - await this.ensureToolSkillInjected(call.name, injectedSkills, modelMessages) + const canonicalCallName = toCanonicalToolName(call.name) + const legacyCallName = toLegacyToolName(canonicalCallName) + const displayToolName = canonicalCallName || call.name + + await this.ensureToolSkillInjected(displayToolName, injectedSkills, modelMessages) const started = Date.now() let args: Record = {} @@ -2476,7 +2874,7 @@ class AiAnalysisService { } const trace: AiToolCallTrace = { - toolName: call.name, + toolName: displayToolName, args, status: 'ok', durationMs: 0 @@ -2487,28 +2885,48 @@ class AiAnalysisService { stage: 'tool_start', ts: Date.now(), round: loop + 1, - toolName: call.name, - message: `开始调用工具 ${call.name}`, + toolName: displayToolName, + message: `开始调用工具 ${displayToolName}`, data: { args } }) let toolResult: any = {} try { - if (!allowedToolSet.has(call.name)) { - toolResult = { success: false, error: `当前助手未授权工具: ${call.name}` } + if (!canonicalCallName) { + toolResult = { success: false, error: `未知工具: ${call.name}` } + } else if (!allowedToolSet.has(canonicalCallName)) { + toolResult = { success: false, error: `当前助手未授权工具: ${canonicalCallName}` } } else { - toolResult = await this.runTool(call.name, args, { userInput }) + toolResult = await this.runTool(canonicalCallName, args, { userInput }) } if (!toolResult?.success) { trace.status = 'error' trace.error = normalizeText(toolResult?.error, '工具执行失败') } else { - if (call.name === 'ai_query_time_window_activity') { + if (canonicalCallName === 'get_time_stats' || legacyCallName === 'ai_query_time_window_activity') { toolBundle.activeSessions = Array.isArray(toolResult.items) ? toolResult.items : [] - } else if (call.name === 'ai_query_session_glimpse') { - const rows = Array.isArray(toolResult.rows) ? toolResult.rows : [] + } else if ( + canonicalCallName === 'get_recent_messages' || + canonicalCallName === 'get_session_messages' || + legacyCallName === 'ai_query_session_glimpse' + ) { + const rows = Array.isArray(toolResult.rows) + ? toolResult.rows + : Array.isArray(toolResult.rawMessages) + ? toolResult.rawMessages + : Array.isArray(toolResult.messages) + ? toolResult.messages + : [] if (rows.length > 0) { - const merged = [...toolBundle.sessionGlimpses, ...rows] + const normalizedRows = rows.map((row: any) => ({ + sessionId: normalizeText(row.sessionId || row._session_id || row.session_id), + localId: parseIntSafe(row.localId || row.local_id || row.id), + createTime: parseIntSafe(row.createTime || row.create_time || row.timestamp), + sender: normalizeText(row.sender || row.senderName || row.sender_username), + localType: parseIntSafe(row.localType || row.local_type || row.type), + content: normalizeText(row.content || row.snippet) + })) + const merged = [...toolBundle.sessionGlimpses, ...normalizedRows] const dedup = new Map() for (const row of merged) { const key = `${normalizeText(row.sessionId || row._session_id)}:${parseIntSafe(row.localId || row.local_id)}:${parseIntSafe(row.createTime || row.create_time)}` @@ -2516,12 +2934,33 @@ class AiAnalysisService { } toolBundle.sessionGlimpses = Array.from(dedup.values()).slice(0, MAX_TOOL_RESULT_ROWS) } - } else if (call.name === 'ai_query_session_candidates') { - toolBundle.sessionCandidates = Array.isArray(toolResult.rows) ? toolResult.rows : [] - } else if (call.name === 'ai_query_timeline') { - const rows = Array.isArray(toolResult.rows) ? toolResult.rows : [] + } else if (canonicalCallName === 'search_sessions' || legacyCallName === 'ai_query_session_candidates') { + const rows = Array.isArray(toolResult.rows) + ? toolResult.rows + : Array.isArray(toolResult.sessions) + ? toolResult.sessions + : [] + toolBundle.sessionCandidates = rows + } else if ( + canonicalCallName === 'search_messages' || + canonicalCallName === 'deep_search_messages' || + legacyCallName === 'ai_query_timeline' + ) { + const rows = Array.isArray(toolResult.rows) + ? toolResult.rows + : Array.isArray(toolResult.rawMessages) + ? toolResult.rawMessages + : [] if (rows.length > 0) { - const merged = [...toolBundle.timelineRows, ...rows] + const normalizedRows = rows.map((row: any) => ({ + _session_id: normalizeText(row._session_id || row.sessionId || row.session_id), + local_id: parseIntSafe(row.local_id || row.localId || row.id), + create_time: parseIntSafe(row.create_time || row.createTime || row.timestamp), + sender_username: normalizeText(row.sender_username || row.sender || row.senderName), + local_type: parseIntSafe(row.local_type || row.localType || row.type), + content: normalizeText(row.content || row.snippet) + })) + const merged = [...toolBundle.timelineRows, ...normalizedRows] const dedup = new Map() for (const row of merged) { const key = `${normalizeText(row._session_id)}:${parseIntSafe(row.local_id)}:${parseIntSafe(row.create_time)}` @@ -2529,15 +2968,52 @@ class AiAnalysisService { } toolBundle.timelineRows = Array.from(dedup.values()).slice(0, MAX_TOOL_RESULT_ROWS) } - } else if (call.name === 'ai_query_topic_stats') { - toolBundle.topicStats = toolResult.data || {} - } else if (call.name === 'ai_query_source_refs') { - toolBundle.sourceRefs = toolResult.data || {} - } else if (call.name === 'ai_query_top_contacts') { - toolBundle.topContacts = Array.isArray(toolResult.items) ? toolResult.items : [] - } else if (call.name === 'ai_fetch_message_briefs') { - toolBundle.messageBriefs = Array.isArray(toolResult.rows) ? toolResult.rows : [] - } else if (call.name === 'ai_list_voice_messages') { + } else if (canonicalCallName === 'get_chat_overview' || legacyCallName === 'ai_query_topic_stats') { + toolBundle.topicStats = toolResult.data || toolResult || {} + } else if (canonicalCallName === 'get_session_summaries' || legacyCallName === 'ai_query_source_refs') { + const summaries = Array.isArray(toolResult.sessions) + ? toolResult.sessions + : Array.isArray(toolResult.rows) + ? toolResult.rows + : [] + const totalMessages = summaries.reduce((sum: number, row: any) => ( + sum + parseIntSafe(row.messageCount || row.message_count) + ), 0) + toolBundle.sourceRefs = toolResult.data || { + range: { + begin: normalizeTimestampSeconds(args.beginTimestamp ?? args.startTs), + end: normalizeTimestampSeconds(args.endTimestamp ?? args.endTs) + }, + session_count: parseIntSafe(toolResult.total, summaries.length), + message_count: totalMessages, + db_refs: [] + } + if (summaries.length > 0) { + toolBundle.sessionCandidates = summaries + } + } else if ( + canonicalCallName === 'get_member_stats' || + canonicalCallName === 'get_members' || + legacyCallName === 'ai_query_top_contacts' + ) { + if (Array.isArray(toolResult.items)) { + toolBundle.topContacts = toolResult.items + } else if (Array.isArray(toolResult.members)) { + toolBundle.topContacts = toolResult.members.map((item: any) => ({ + sessionId: normalizeText(item.platform_id || item.sessionId || item.member_id), + displayName: normalizeText(item.display_name || item.displayName || item.platform_id), + messageCount: parseIntSafe(item.messageCount || item.message_count || 0) + })) + } else { + toolBundle.topContacts = [] + } + } else if (canonicalCallName === 'get_message_context' || legacyCallName === 'ai_fetch_message_briefs') { + toolBundle.messageBriefs = Array.isArray(toolResult.rows) + ? toolResult.rows + : Array.isArray(toolResult.rawMessages) + ? toolResult.rawMessages + : [] + } else if (canonicalCallName === 'ai_list_voice_messages' || legacyCallName === 'ai_list_voice_messages') { if (Array.isArray(toolResult.items)) { toolBundle.voiceCatalog = toolResult.items } else if (Array.isArray(toolResult.ids)) { @@ -2545,7 +3021,7 @@ class AiAnalysisService { } else { toolBundle.voiceCatalog = [] } - } else if (call.name === 'ai_transcribe_voice_messages') { + } else if (canonicalCallName === 'ai_transcribe_voice_messages' || legacyCallName === 'ai_transcribe_voice_messages') { toolBundle.voiceTranscripts = Array.isArray(toolResult.results) ? toolResult.results : [] } } @@ -2565,12 +3041,12 @@ class AiAnalysisService { stage: trace.status === 'ok' ? 'tool_done' : 'tool_error', ts: Date.now(), round: loop + 1, - toolName: call.name, + toolName: displayToolName, status: trace.status, durationMs: trace.durationMs, message: trace.status === 'ok' - ? `工具 ${call.name} 完成` - : `工具 ${call.name} 执行失败`, + ? `工具 ${displayToolName} 完成` + : `工具 ${displayToolName} 执行失败`, data: { args, result: this.compactToolResultForStep(toolResult), @@ -2583,7 +3059,7 @@ class AiAnalysisService { tool_call_id: call.id, content: JSON.stringify(toolResult || {}) }) - if (call.name === 'activate_skill' && toolResult?.success && normalizeText(toolResult?.prompt)) { + if (canonicalCallName === 'activate_skill' && toolResult?.success && normalizeText(toolResult?.prompt)) { modelMessages.push({ role: 'system', content: `active_skill_from_tool:\n${normalizeText(toolResult.prompt)}` @@ -2593,10 +3069,16 @@ class AiAnalysisService { } if (!finalText) { - finalText = lastAssistantText - } - if (!finalText) { - finalText = '模型未返回可交付文本。我会保留上下文,你可以直接继续追问,我将继续执行工具链直到交付结果。' + const tail = lastAssistantText ? `(最后一轮输出:${lastAssistantText.slice(0, 200)})` : '' + const errorMessage = `模型在 ${MAX_TOOL_LOOPS} 轮内未输出 ${FINAL_DONE_MARKER} + ,任务终止${tail}` + this.emitRunEvent(runtime?.onRunEvent, { + runId, + conversationId, + stage: 'error', + ts: Date.now(), + message: errorMessage + }) + return { success: false, error: errorMessage } } this.emitRunEvent(runtime?.onRunEvent, { diff --git a/electron/services/aiAnalysisSkills/base.md b/electron/services/aiAnalysisSkills/base.md index 4293375..f6d522e 100644 --- a/electron/services/aiAnalysisSkills/base.md +++ b/electron/services/aiAnalysisSkills/base.md @@ -10,7 +10,7 @@ - 先范围后细节:优先定位会话/时间范围,再拉取具体时间轴或消息。 - 可解释性:最终结论尽量附带来源范围与统计口径。 - 语音消息不能臆测:必须先拿语音 ID,再点名转写,再总结。 -- 联系人排行题(“谁聊得最多/最常联系”)命中 ai_query_top_contacts 后,必须直接给出“前N名+消息数”。 +- 联系人排行题(“谁聊得最多/最常联系”)命中 get_member_stats 后,必须直接给出“前N名+消息数”。 - 除非用户明确要求,联系人排行默认不包含群聊和公众号。 - 用户提到“最近/近期/lately/recent”但未给时间窗时,默认按近30天口径统计并写明口径。 - 用户提到联系人简称(如“lr”)时,先把它当联系人缩写处理,优先命中个人会话,不要默认落到群聊。 @@ -18,8 +18,8 @@ Agent执行要求: - 用户输入直接进入推理,本地不做关键词分流,你自主决定工具计划。 -- 当用户说“今天凌晨/昨晚/某段时间的聊天”,优先调用 ai_query_time_window_activity。 -- 拿到活跃会话后,调用 ai_query_session_glimpse 对多个会话逐个抽样阅读,不要只读一个会话就停止。 +- 当用户说“今天凌晨/昨晚/某段时间的聊天”,优先调用 get_time_stats。 +- 拿到活跃会话后,调用 get_recent_messages 对多个会话逐个抽样阅读,不要只读一个会话就停止。 - 如果初步探索后用户目标仍模糊,主动提出 1 个关键澄清问题继续多轮对话。 - 仅当你确认任务完成时,输出结束标记 `[[WF_DONE]]`,并紧跟 `...`。 - 若还未完成,不要输出结束标记,继续调用工具。 diff --git a/electron/services/aiAnalysisSkills/tool_message_briefs.md b/electron/services/aiAnalysisSkills/tool_message_briefs.md index 428e4df..4dd0324 100644 --- a/electron/services/aiAnalysisSkills/tool_message_briefs.md +++ b/electron/services/aiAnalysisSkills/tool_message_briefs.md @@ -1,4 +1,4 @@ -工具:ai_fetch_message_briefs +工具:get_message_context 何时用: - 需要核对少量关键消息原文,避免全量展开。 diff --git a/electron/services/aiAnalysisSkills/tool_session_candidates.md b/electron/services/aiAnalysisSkills/tool_session_candidates.md index 46a1930..56ca27f 100644 --- a/electron/services/aiAnalysisSkills/tool_session_candidates.md +++ b/electron/services/aiAnalysisSkills/tool_session_candidates.md @@ -1,4 +1,4 @@ -工具:ai_query_session_candidates +工具:search_sessions 何时用: - 用户未明确具体会话,但给了关键词/关系词(如“老婆”“买车”)。 diff --git a/electron/services/aiAnalysisSkills/tool_session_glimpse.md b/electron/services/aiAnalysisSkills/tool_session_glimpse.md index 3445b93..9dd5782 100644 --- a/electron/services/aiAnalysisSkills/tool_session_glimpse.md +++ b/electron/services/aiAnalysisSkills/tool_session_glimpse.md @@ -1,4 +1,4 @@ -工具:ai_query_session_glimpse +工具:get_recent_messages 何时用: - 已确定候选会话,需要“先看一点”理解上下文。 diff --git a/electron/services/aiAnalysisSkills/tool_source_refs.md b/electron/services/aiAnalysisSkills/tool_source_refs.md index ea0a0ee..4def849 100644 --- a/electron/services/aiAnalysisSkills/tool_source_refs.md +++ b/electron/services/aiAnalysisSkills/tool_source_refs.md @@ -1,4 +1,4 @@ -工具:ai_query_source_refs +工具:get_session_summaries 何时用: - 输出总结或分析后,用于来源说明与可解释卡片。 diff --git a/electron/services/aiAnalysisSkills/tool_time_window_activity.md b/electron/services/aiAnalysisSkills/tool_time_window_activity.md index d6ed088..15a124d 100644 --- a/electron/services/aiAnalysisSkills/tool_time_window_activity.md +++ b/electron/services/aiAnalysisSkills/tool_time_window_activity.md @@ -1,9 +1,9 @@ -工具:ai_query_time_window_activity +工具:get_time_stats 何时用: - 用户提到“今天凌晨/昨晚/某个时间段”的聊天分析。 Agent策略: - 第一步必须先扫时间窗活跃会话,不要直接下结论。 -- 拿到活跃会话后,再调用 ai_query_session_glimpse 逐个会话抽样阅读。 +- 拿到活跃会话后,再调用 get_recent_messages 逐个会话抽样阅读。 - 若用户目标仍不清晰,先追问 1 个关键澄清问题再继续。 diff --git a/electron/services/aiAnalysisSkills/tool_timeline.md b/electron/services/aiAnalysisSkills/tool_timeline.md index 1fd0c14..2defcbd 100644 --- a/electron/services/aiAnalysisSkills/tool_timeline.md +++ b/electron/services/aiAnalysisSkills/tool_timeline.md @@ -1,4 +1,4 @@ -工具:ai_query_timeline +工具:search_messages 何时用: - 回忆事件经过、梳理时间线、提取关键节点。 @@ -6,4 +6,4 @@ 调用建议: - 默认 detailLevel=minimal。 - 先小批次 limit(40~120),不够再分页 offset。 -- 需要引用原文证据时,可搭配 ai_fetch_message_briefs。 +- 需要引用原文证据时,可搭配 get_message_context。 diff --git a/electron/services/aiAnalysisSkills/tool_top_contacts.md b/electron/services/aiAnalysisSkills/tool_top_contacts.md index 21086f8..19b5ca0 100644 --- a/electron/services/aiAnalysisSkills/tool_top_contacts.md +++ b/electron/services/aiAnalysisSkills/tool_top_contacts.md @@ -1,4 +1,4 @@ -工具:ai_query_top_contacts +工具:get_member_stats 何时用: - 用户问“谁联系最密切”“谁聊得最多”“最常联系的是谁”。 diff --git a/electron/services/aiAnalysisSkills/tool_topic_stats.md b/electron/services/aiAnalysisSkills/tool_topic_stats.md index 1b3d3dc..7c38d18 100644 --- a/electron/services/aiAnalysisSkills/tool_topic_stats.md +++ b/electron/services/aiAnalysisSkills/tool_topic_stats.md @@ -1,4 +1,4 @@ -工具:ai_query_topic_stats +工具:get_chat_overview 何时用: - 用户问“多少、占比、趋势、对比”。 diff --git a/electron/services/aiAssistantService.ts b/electron/services/aiAssistantService.ts index f0075f0..7f08e30 100644 --- a/electron/services/aiAssistantService.ts +++ b/electron/services/aiAssistantService.ts @@ -40,16 +40,16 @@ presetQuestions: - 帮我总结一下最近一周的重要聊天 - 帮我找一下关于“旅游”的讨论 allowedBuiltinTools: - - ai_query_time_window_activity - - ai_query_session_candidates - - ai_query_session_glimpse - - ai_query_timeline - - ai_fetch_message_briefs + - get_time_stats + - search_sessions + - get_recent_messages + - search_messages + - get_message_context - ai_list_voice_messages - ai_transcribe_voice_messages - - ai_query_topic_stats - - ai_query_source_refs - - ai_query_top_contacts + - get_chat_overview + - get_session_summaries + - get_member_stats --- 你是 WeFlow 的全局聊天分析助手。请使用工具获取证据,给出简洁、准确、可执行的结论。 @@ -70,16 +70,16 @@ presetQuestions: - Who are the most active contacts? - Summarize my key chat topics this week allowedBuiltinTools: - - ai_query_time_window_activity - - ai_query_session_candidates - - ai_query_session_glimpse - - ai_query_timeline - - ai_fetch_message_briefs + - get_time_stats + - search_sessions + - get_recent_messages + - search_messages + - get_message_context - ai_list_voice_messages - ai_transcribe_voice_messages - - ai_query_topic_stats - - ai_query_source_refs - - ai_query_top_contacts + - get_chat_overview + - get_session_summaries + - get_member_stats --- You are WeFlow's global chat analysis assistant. @@ -95,16 +95,16 @@ presetQuestions: - 一番アクティブな相手は誰? - 今週の重要な会話を要約して allowedBuiltinTools: - - ai_query_time_window_activity - - ai_query_session_candidates - - ai_query_session_glimpse - - ai_query_timeline - - ai_fetch_message_briefs + - get_time_stats + - search_sessions + - get_recent_messages + - search_messages + - get_message_context - ai_list_voice_messages - ai_transcribe_voice_messages - - ai_query_topic_stats - - ai_query_source_refs - - ai_query_top_contacts + - get_chat_overview + - get_session_summaries + - get_member_stats --- あなたは WeFlow のグローバルチャット分析アシスタントです。 @@ -231,16 +231,16 @@ function toMarkdown(config: AssistantConfigFull): string { function defaultBuiltinToolCatalog(): Array<{ name: string; category: AssistantToolCategory }> { return [ - { name: 'ai_query_time_window_activity', category: 'core' }, - { name: 'ai_query_session_candidates', category: 'core' }, - { name: 'ai_query_session_glimpse', category: 'core' }, - { name: 'ai_query_timeline', category: 'core' }, - { name: 'ai_fetch_message_briefs', category: 'core' }, + { name: 'get_time_stats', category: 'core' }, + { name: 'search_sessions', category: 'core' }, + { name: 'get_recent_messages', category: 'core' }, + { name: 'search_messages', category: 'core' }, + { name: 'get_message_context', category: 'core' }, { name: 'ai_list_voice_messages', category: 'core' }, { name: 'ai_transcribe_voice_messages', category: 'core' }, - { name: 'ai_query_topic_stats', category: 'analysis' }, - { name: 'ai_query_source_refs', category: 'analysis' }, - { name: 'ai_query_top_contacts', category: 'analysis' }, + { name: 'get_chat_overview', category: 'analysis' }, + { name: 'get_session_summaries', category: 'analysis' }, + { name: 'get_member_stats', category: 'analysis' }, { name: 'activate_skill', category: 'analysis' } ] } diff --git a/electron/services/aiSkillService.ts b/electron/services/aiSkillService.ts index 884438e..d079d5b 100644 --- a/electron/services/aiSkillService.ts +++ b/electron/services/aiSkillService.ts @@ -32,18 +32,18 @@ tags: - evidence chatScope: all tools: - - ai_query_time_window_activity - - ai_query_session_candidates - - ai_query_session_glimpse - - ai_query_timeline - - ai_fetch_message_briefs - - ai_query_source_refs + - get_time_stats + - search_sessions + - get_recent_messages + - search_messages + - get_message_context + - get_session_summaries --- 你是“深度时间线追踪”技能。 执行步骤: 1. 先按时间窗扫描活跃会话,必要时补关键词筛选候选会话。 2. 对候选会话先抽样,再拉取时间轴。 -3. 对关键节点用 ai_fetch_message_briefs 校对原文。 +3. 对关键节点用 get_message_context 校对原文。 4. 最后输出“结论 + 关键节点 + 来源范围”。` const SKILL_CONTACT_FOCUS_MD = `--- @@ -55,17 +55,17 @@ tags: - relation chatScope: private tools: - - ai_query_top_contacts - - ai_query_topic_stats - - ai_query_session_glimpse - - ai_query_timeline - - ai_query_source_refs + - get_member_stats + - get_chat_overview + - get_recent_messages + - search_messages + - get_session_summaries --- 你是“联系人关系聚焦”技能。 执行步骤: -1. 优先调用 ai_query_top_contacts 得到候选联系人排名。 +1. 优先调用 get_member_stats 得到候选联系人排名。 2. 针对 Top 联系人读取抽样消息并补充时间轴。 -3. 如果用户问题涉及“变化趋势”,补 ai_query_topic_stats。 +3. 如果用户问题涉及“变化趋势”,补 get_chat_overview。 4. 输出时必须给出对比口径(时间窗、样本范围、消息数量)。` const SKILL_VOICE_AUDIT_MD = `--- @@ -79,7 +79,7 @@ chatScope: all tools: - ai_list_voice_messages - ai_transcribe_voice_messages - - ai_query_source_refs + - get_session_summaries --- 你是“语音证据审计”技能。 硬规则: diff --git a/electron/services/chatService.ts b/electron/services/chatService.ts index 90f2555..a3710c1 100644 --- a/electron/services/chatService.ts +++ b/electron/services/chatService.ts @@ -174,6 +174,36 @@ interface GetContactsOptions { lite?: boolean } +interface AiTimeFilter { + startTs?: number + endTs?: number +} + +interface AiMessageResult { + id: number + localId: number + sessionId: string + senderName: string + senderPlatformId: string + senderUsername: string + content: string + timestamp: number + type: number + isSend: number | null + replyToMessageId: string | null + replyToContent: string | null + replyToSenderName: string | null +} + +interface AiSessionSearchResult { + id: string + startTs: number + endTs: number + messageCount: number + isComplete: boolean + previewMessages: AiMessageResult[] +} + interface ExportSessionStats { totalMessages: number voiceMessages: number @@ -8474,6 +8504,451 @@ class ChatService { } } + private normalizeAiFilter(filter?: AiTimeFilter): { begin: number; end: number } { + const begin = this.normalizeTimestampSeconds(Number(filter?.startTs || 0)) + const end = this.normalizeTimestampSeconds(Number(filter?.endTs || 0)) + return { begin, end } + } + + private hashSenderId(senderUsername: string): number { + const text = String(senderUsername || '').trim().toLowerCase() + if (!text) return 0 + let hash = 5381 + for (let i = 0; i < text.length; i += 1) { + hash = ((hash << 5) + hash + text.charCodeAt(i)) | 0 + } + return Math.abs(hash) + } + + private messageMatchesKeywords(message: Message, keywords?: string[]): boolean { + if (!Array.isArray(keywords) || keywords.length === 0) return true + const text = String(message.parsedContent || message.rawContent || '').toLowerCase() + if (!text) return false + return keywords.every((keyword) => { + const token = String(keyword || '').trim().toLowerCase() + if (!token) return true + return text.includes(token) + }) + } + + private toAiMessage(sessionId: string, message: Message): AiMessageResult { + const senderUsername = String(message.senderUsername || '').trim() + const senderName = senderUsername || (message.isSend === 1 ? '我' : '未知成员') + const content = String(message.parsedContent || message.rawContent || '').trim() + return { + id: message.localId, + localId: message.localId, + sessionId, + senderName, + senderPlatformId: senderUsername, + senderUsername, + content, + timestamp: Number(message.createTime || 0), + type: Number(message.localType || 0), + isSend: message.isSend, + replyToMessageId: message.messageKey || null, + replyToContent: message.quotedContent || null, + replyToSenderName: message.quotedSender || null + } + } + + private async fetchMessagesByCursorWithKey( + sessionId: string, + key: { sortSeq?: number; createTime?: number; localId?: number }, + limit: number, + ascending: boolean, + beginTimestamp = 0, + endTimestamp = 0 + ): Promise<{ success: boolean; messages?: Message[]; hasMore?: boolean; error?: string }> { + const batchSize = Math.max(limit + 8, Math.min(240, limit * 2)) + const cursorResult = await wcdbService.openMessageCursorWithKey( + sessionId, + batchSize, + ascending, + beginTimestamp, + endTimestamp, + key + ) + if (!cursorResult.success || !cursorResult.cursor) { + return { success: false, error: cursorResult.error || '创建游标失败' } + } + + try { + const collected = await this.collectVisibleMessagesFromCursor(sessionId, cursorResult.cursor, limit) + if (!collected.success) { + return { success: false, error: collected.error || '读取消息失败' } + } + return { + success: true, + messages: collected.messages || [], + hasMore: collected.hasMore === true + } + } finally { + await wcdbService.closeMessageCursor(cursorResult.cursor).catch(() => {}) + } + } + + async getRecentMessagesForAI( + sessionId: string, + filter?: AiTimeFilter, + limit = 100 + ): Promise<{ messages: AiMessageResult[]; total: number }> { + const normalizedLimit = Math.max(1, Math.min(500, Number(limit || 100))) + const { begin, end } = this.normalizeAiFilter(filter) + const result = await this.getLatestMessages(sessionId, normalizedLimit) + if (!result.success || !Array.isArray(result.messages)) { + return { messages: [], total: 0 } + } + const bounded = result.messages.filter((message) => { + if (begin > 0 && Number(message.createTime || 0) < begin) return false + if (end > 0 && Number(message.createTime || 0) > end) return false + return String(message.parsedContent || message.rawContent || '').trim().length > 0 + }) + return { + messages: bounded.slice(-normalizedLimit).map((message) => this.toAiMessage(sessionId, message)), + total: bounded.length + } + } + + async getMessagesBeforeForAI( + sessionId: string, + beforeId: number, + limit = 50, + filter?: AiTimeFilter, + senderId?: number, + keywords?: string[] + ): Promise<{ messages: AiMessageResult[]; hasMore: boolean }> { + const base = await this.getMessageById(sessionId, Number(beforeId)) + if (!base.success || !base.message) { + return { messages: [], hasMore: false } + } + const normalizedLimit = Math.max(1, Math.min(300, Number(limit || 50))) + const { begin, end } = this.normalizeAiFilter(filter) + const cursor = await this.fetchMessagesByCursorWithKey( + sessionId, + { + sortSeq: base.message.sortSeq, + createTime: base.message.createTime, + localId: base.message.localId + }, + Math.max(normalizedLimit * 2, normalizedLimit + 12), + false, + begin, + end + ) + if (!cursor.success) { + return { messages: [], hasMore: false } + } + const filtered = (cursor.messages || []).filter((message) => { + if (senderId && senderId > 0) { + const hashed = this.hashSenderId(String(message.senderUsername || '')) + if (hashed !== senderId) return false + } + return this.messageMatchesKeywords(message, keywords) + }) + const sliced = filtered.slice(-normalizedLimit) + return { + messages: sliced.map((message) => this.toAiMessage(sessionId, message)), + hasMore: cursor.hasMore === true || filtered.length > normalizedLimit + } + } + + async getMessagesAfterForAI( + sessionId: string, + afterId: number, + limit = 50, + filter?: AiTimeFilter, + senderId?: number, + keywords?: string[] + ): Promise<{ messages: AiMessageResult[]; hasMore: boolean }> { + const base = await this.getMessageById(sessionId, Number(afterId)) + if (!base.success || !base.message) { + return { messages: [], hasMore: false } + } + const normalizedLimit = Math.max(1, Math.min(300, Number(limit || 50))) + const { begin, end } = this.normalizeAiFilter(filter) + const cursor = await this.fetchMessagesByCursorWithKey( + sessionId, + { + sortSeq: base.message.sortSeq, + createTime: base.message.createTime, + localId: base.message.localId + }, + Math.max(normalizedLimit * 2, normalizedLimit + 12), + true, + begin, + end + ) + if (!cursor.success) { + return { messages: [], hasMore: false } + } + const filtered = (cursor.messages || []).filter((message) => { + if (senderId && senderId > 0) { + const hashed = this.hashSenderId(String(message.senderUsername || '')) + if (hashed !== senderId) return false + } + return this.messageMatchesKeywords(message, keywords) + }) + const sliced = filtered.slice(0, normalizedLimit) + return { + messages: sliced.map((message) => this.toAiMessage(sessionId, message)), + hasMore: cursor.hasMore === true || filtered.length > normalizedLimit + } + } + + async getMessageContextForAI( + sessionId: string, + messageIds: number | number[], + contextSize = 20 + ): Promise { + const ids = Array.isArray(messageIds) ? messageIds : [messageIds] + const uniqueIds = Array.from(new Set(ids.map((id) => Number(id)).filter((id) => Number.isFinite(id) && id > 0))) + if (uniqueIds.length === 0) return [] + const size = Math.max(0, Math.min(120, Number(contextSize || 20))) + const merged = new Map() + + for (const id of uniqueIds) { + const target = await this.getMessageById(sessionId, id) + if (target.success && target.message) { + merged.set(id, this.toAiMessage(sessionId, target.message)) + } + if (size <= 0) continue + const [before, after] = await Promise.all([ + this.getMessagesBeforeForAI(sessionId, id, size), + this.getMessagesAfterForAI(sessionId, id, size) + ]) + for (const item of before.messages) merged.set(item.id, item) + for (const item of after.messages) merged.set(item.id, item) + } + + return Array.from(merged.values()).sort((a, b) => { + if (a.timestamp !== b.timestamp) return a.timestamp - b.timestamp + return a.id - b.id + }) + } + + async getSearchMessageContextForAI( + sessionId: string, + messageIds: number[], + contextBefore = 2, + contextAfter = 2 + ): Promise { + const uniqueIds = Array.from(new Set((messageIds || []).map((id) => Number(id)).filter((id) => Number.isFinite(id) && id > 0))) + if (uniqueIds.length === 0) return [] + const beforeLimit = Math.max(0, Math.min(30, Number(contextBefore || 2))) + const afterLimit = Math.max(0, Math.min(30, Number(contextAfter || 2))) + const merged = new Map() + + for (const id of uniqueIds) { + const target = await this.getMessageById(sessionId, id) + if (target.success && target.message) { + merged.set(id, this.toAiMessage(sessionId, target.message)) + } + const [before, after] = await Promise.all([ + beforeLimit > 0 ? this.getMessagesBeforeForAI(sessionId, id, beforeLimit) : Promise.resolve({ messages: [], hasMore: false }), + afterLimit > 0 ? this.getMessagesAfterForAI(sessionId, id, afterLimit) : Promise.resolve({ messages: [], hasMore: false }) + ]) + for (const item of before.messages) merged.set(item.id, item) + for (const item of after.messages) merged.set(item.id, item) + } + + return Array.from(merged.values()).sort((a, b) => { + if (a.timestamp !== b.timestamp) return a.timestamp - b.timestamp + return a.id - b.id + }) + } + + async getConversationBetweenForAI( + sessionId: string, + memberId1: number, + memberId2: number, + filter?: AiTimeFilter, + limit = 100 + ): Promise<{ messages: AiMessageResult[]; total: number; member1Name: string; member2Name: string }> { + const normalizedLimit = Math.max(1, Math.min(500, Number(limit || 100))) + const { begin, end } = this.normalizeAiFilter(filter) + const sample = await this.getMessages(sessionId, 0, Math.max(600, normalizedLimit * 8), begin, end, false) + if (!sample.success || !Array.isArray(sample.messages) || sample.messages.length === 0) { + return { messages: [], total: 0, member1Name: '', member2Name: '' } + } + + const idSet = new Set([Number(memberId1), Number(memberId2)].filter((id) => Number.isFinite(id) && id > 0)) + const filtered = sample.messages.filter((message) => { + const senderId = this.hashSenderId(String(message.senderUsername || '')) + return idSet.has(senderId) && String(message.parsedContent || message.rawContent || '').trim().length > 0 + }) + const picked = filtered.slice(-normalizedLimit) + const names = Array.from(new Set(picked.map((message) => String(message.senderUsername || '').trim()).filter(Boolean))) + return { + messages: picked.map((message) => this.toAiMessage(sessionId, message)), + total: filtered.length, + member1Name: names[0] || '', + member2Name: names[1] || names[0] || '' + } + } + + async searchSessionsForAI( + _sessionId: string, + keywords?: string[], + timeFilter?: AiTimeFilter, + limit = 20, + previewCount = 5 + ): Promise { + const normalizedLimit = Math.max(1, Math.min(60, Number(limit || 20))) + const normalizedPreview = Math.max(1, Math.min(20, Number(previewCount || 5))) + const { begin, end } = this.normalizeAiFilter(timeFilter) + const tokenList = Array.from(new Set((keywords || []).map((keyword) => String(keyword || '').trim()).filter(Boolean))) + + const sessionsResult = await this.getSessions() + if (!sessionsResult.success || !Array.isArray(sessionsResult.sessions)) return [] + const sessionMap = new Map() + for (const session of sessionsResult.sessions) { + const sid = String(session.username || '').trim() + if (!sid) continue + sessionMap.set(sid, session) + } + + const rows: Array<{ sessionId: string; hitCount: number }> = [] + if (tokenList.length > 0) { + const native = await wcdbService.aiQuerySessionCandidates({ + keyword: tokenList.join(' '), + limit: normalizedLimit * 4, + beginTimestamp: begin, + endTimestamp: end + }) + if (native.success && Array.isArray(native.rows)) { + for (const row of native.rows as Record[]) { + const sid = String(row.session_id || row._session_id || row.sessionId || '').trim() + if (!sid) continue + rows.push({ + sessionId: sid, + hitCount: this.toSafeInt(row.hit_count ?? row.count ?? row.message_count, 0) + }) + } + } + } + + const candidateIds = rows.length > 0 + ? Array.from(new Set(rows.map((item) => item.sessionId))) + : sessionsResult.sessions + .filter((session) => { + if (begin > 0 && Number(session.lastTimestamp || session.sortTimestamp || 0) < begin) return false + if (end > 0 && Number(session.lastTimestamp || session.sortTimestamp || 0) > end) return false + return true + }) + .slice(0, normalizedLimit * 2) + .map((session) => String(session.username || '').trim()) + .filter(Boolean) + + const output: AiSessionSearchResult[] = [] + for (const sid of candidateIds.slice(0, normalizedLimit)) { + const latest = await this.getLatestMessages(sid, normalizedPreview) + const messages = Array.isArray(latest.messages) ? latest.messages : [] + const mapped = messages.map((message) => this.toAiMessage(sid, message)).slice(-normalizedPreview) + const hitRow = rows.find((item) => item.sessionId === sid) + const session = sessionMap.get(sid) + const tsList = mapped.map((item) => item.timestamp).filter((value) => Number.isFinite(value) && value > 0) + const startTs = tsList.length > 0 ? Math.min(...tsList) : 0 + const endTs = tsList.length > 0 ? Math.max(...tsList) : Number(session?.lastTimestamp || session?.sortTimestamp || 0) + output.push({ + id: sid, + startTs, + endTs, + messageCount: hitRow?.hitCount || mapped.length, + isComplete: mapped.length <= normalizedPreview, + previewMessages: mapped + }) + } + + return output + } + + async getSessionMessagesForAI( + _sessionId: string, + chatSessionId: string | number, + limit = 500 + ): Promise<{ + sessionId: string + startTs: number + endTs: number + messageCount: number + returnedCount: number + participants: string[] + messages: AiMessageResult[] + } | null> { + const sid = String(chatSessionId || '').trim() + if (!sid) return null + const normalizedLimit = Math.max(1, Math.min(1000, Number(limit || 500))) + const latest = await this.getLatestMessages(sid, normalizedLimit) + if (!latest.success || !Array.isArray(latest.messages)) return null + const mapped = latest.messages.map((message) => this.toAiMessage(sid, message)) + const tsList = mapped.map((item) => item.timestamp).filter((value) => Number.isFinite(value) && value > 0) + const count = await this.getMessageCount(sid) + return { + sessionId: sid, + startTs: tsList.length > 0 ? Math.min(...tsList) : 0, + endTs: tsList.length > 0 ? Math.max(...tsList) : 0, + messageCount: count.success ? Number(count.count || mapped.length) : mapped.length, + returnedCount: mapped.length, + participants: Array.from(new Set(mapped.map((item) => item.senderName).filter(Boolean))), + messages: mapped + } + } + + async getSessionSummariesForAI( + _sessionId: string, + options?: { + sessionIds?: string[] + limit?: number + previewCount?: number + } + ): Promise> { + const normalizedLimit = Math.max(1, Math.min(60, Number(options?.limit || 20))) + const previewCount = Math.max(1, Math.min(20, Number(options?.previewCount || 3))) + const sessionsResult = await this.getSessions() + if (!sessionsResult.success || !Array.isArray(sessionsResult.sessions)) return [] + const explicitIds = Array.isArray(options?.sessionIds) + ? options?.sessionIds.map((value) => String(value || '').trim()).filter(Boolean) + : [] + const candidates = explicitIds.length > 0 + ? sessionsResult.sessions.filter((session) => explicitIds.includes(String(session.username || '').trim())) + : sessionsResult.sessions.slice(0, normalizedLimit) + + const summaries: Array<{ + sessionId: string + sessionName: string + messageCount: number + latestTs: number + previewMessages: AiMessageResult[] + }> = [] + + for (const session of candidates.slice(0, normalizedLimit)) { + const sid = String(session.username || '').trim() + if (!sid) continue + const [countResult, latestResult] = await Promise.all([ + this.getMessageCount(sid), + this.getLatestMessages(sid, previewCount) + ]) + const previewMessages = Array.isArray(latestResult.messages) + ? latestResult.messages.map((message) => this.toAiMessage(sid, message)).slice(-previewCount) + : [] + summaries.push({ + sessionId: sid, + sessionName: String(session.displayName || sid), + messageCount: countResult.success ? Number(countResult.count || previewMessages.length) : previewMessages.length, + latestTs: Number(session.lastTimestamp || session.sortTimestamp || 0), + previewMessages + }) + } + return summaries + } + async getMessageById(sessionId: string, localId: number): Promise<{ success: boolean; message?: Message; error?: string }> { try { const nativeResult = await wcdbService.getMessageById(sessionId, localId) diff --git a/electron/services/wcdbCore.ts b/electron/services/wcdbCore.ts index de46281..b35775e 100644 --- a/electron/services/wcdbCore.ts +++ b/electron/services/wcdbCore.ts @@ -62,6 +62,8 @@ export class WcdbCore { private wcdbGetMessageDates: any = null private wcdbOpenMessageCursor: any = null private wcdbOpenMessageCursorLite: any = null + private wcdbOpenMessageCursorWithKey: any = null + private wcdbOpenMessageCursorLiteWithKey: any = null private wcdbFetchMessageBatch: any = null private wcdbCloseMessageCursor: any = null private wcdbGetLogs: any = null @@ -89,6 +91,15 @@ export class WcdbCore { private wcdbAiQueryTimeline: any = null private wcdbAiQueryTopicStats: any = null private wcdbAiQuerySourceRefs: any = null + private wcdbAiGetRecentMessages: any = null + private wcdbAiGetMessagesBefore: any = null + private wcdbAiGetMessagesAfter: any = null + private wcdbAiGetMessageContext: any = null + private wcdbAiGetSearchMessageContext: any = null + private wcdbAiGetConversationBetween: any = null + private wcdbAiSearchSessions: any = null + private wcdbAiGetSessionMessages: any = null + private wcdbAiGetSessionSummaries: any = null private wcdbGetSnsTimeline: any = null private wcdbGetSnsAnnualStats: any = null private wcdbGetSnsUsernames: any = null @@ -947,6 +958,15 @@ export class WcdbCore { // wcdb_status wcdb_open_message_cursor(wcdb_handle handle, const char* session_id, int32_t batch_size, int32_t ascending, int32_t begin_timestamp, int32_t end_timestamp, wcdb_cursor* out_cursor) this.wcdbOpenMessageCursor = this.lib.func('int32 wcdb_open_message_cursor(int64 handle, const char* sessionId, int32 batchSize, int32 ascending, int32 beginTimestamp, int32 endTimestamp, _Out_ int64* outCursor)') + // wcdb_status wcdb_open_message_cursor_with_key(...) + try { + this.wcdbOpenMessageCursorWithKey = this.lib.func( + 'int32 wcdb_open_message_cursor_with_key(int64 handle, const char* sessionId, int32 batchSize, int32 ascending, int32 beginTimestamp, int32 endTimestamp, int32 keyValid, int64 keySortSeq, int64 keyCreateTime, int64 keyLocalId, _Out_ int64* outCursor)' + ) + } catch { + this.wcdbOpenMessageCursorWithKey = null + } + // wcdb_status wcdb_open_message_cursor_lite(wcdb_handle handle, const char* session_id, int32_t batch_size, int32_t ascending, int32_t begin_timestamp, int32_t end_timestamp, wcdb_cursor* out_cursor) try { this.wcdbOpenMessageCursorLite = this.lib.func('int32 wcdb_open_message_cursor_lite(int64 handle, const char* sessionId, int32 batchSize, int32 ascending, int32 beginTimestamp, int32 endTimestamp, _Out_ int64* outCursor)') @@ -954,6 +974,15 @@ export class WcdbCore { this.wcdbOpenMessageCursorLite = null } + // wcdb_status wcdb_open_message_cursor_lite_with_key(...) + try { + this.wcdbOpenMessageCursorLiteWithKey = this.lib.func( + 'int32 wcdb_open_message_cursor_lite_with_key(int64 handle, const char* sessionId, int32 batchSize, int32 ascending, int32 beginTimestamp, int32 endTimestamp, int32 keyValid, int64 keySortSeq, int64 keyCreateTime, int64 keyLocalId, _Out_ int64* outCursor)' + ) + } catch { + this.wcdbOpenMessageCursorLiteWithKey = null + } + // wcdb_status wcdb_fetch_message_batch(wcdb_handle handle, wcdb_cursor cursor, char** out_json, int32_t* out_has_more) this.wcdbFetchMessageBatch = this.lib.func('int32 wcdb_fetch_message_batch(int64 handle, int64 cursor, _Out_ void** outJson, _Out_ int32* outHasMore)') @@ -1084,6 +1113,51 @@ export class WcdbCore { } catch { this.wcdbAiQuerySourceRefs = null } + try { + this.wcdbAiGetRecentMessages = this.lib.func('int32 wcdb_ai_get_recent_messages(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetRecentMessages = null + } + try { + this.wcdbAiGetMessagesBefore = this.lib.func('int32 wcdb_ai_get_messages_before(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetMessagesBefore = null + } + try { + this.wcdbAiGetMessagesAfter = this.lib.func('int32 wcdb_ai_get_messages_after(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetMessagesAfter = null + } + try { + this.wcdbAiGetMessageContext = this.lib.func('int32 wcdb_ai_get_message_context(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetMessageContext = null + } + try { + this.wcdbAiGetSearchMessageContext = this.lib.func('int32 wcdb_ai_get_search_message_context(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetSearchMessageContext = null + } + try { + this.wcdbAiGetConversationBetween = this.lib.func('int32 wcdb_ai_get_conversation_between(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetConversationBetween = null + } + try { + this.wcdbAiSearchSessions = this.lib.func('int32 wcdb_ai_search_sessions(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiSearchSessions = null + } + try { + this.wcdbAiGetSessionMessages = this.lib.func('int32 wcdb_ai_get_session_messages(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetSessionMessages = null + } + try { + this.wcdbAiGetSessionSummaries = this.lib.func('int32 wcdb_ai_get_session_summaries(int64 handle, const char* optionsJson, _Out_ void** outJson)') + } catch { + this.wcdbAiGetSessionSummaries = null + } // wcdb_status wcdb_get_sns_timeline(wcdb_handle handle, int32_t limit, int32_t offset, const char* username, const char* keyword, int32_t start_time, int32_t end_time, char** out_json) try { @@ -3280,6 +3354,80 @@ export class WcdbCore { } } + async openMessageCursorWithKey( + sessionId: string, + batchSize: number, + ascending: boolean, + beginTimestamp: number, + endTimestamp: number, + key?: { sortSeq?: number; createTime?: number; localId?: number } + ): Promise<{ success: boolean; cursor?: number; error?: string }> { + if (!this.ensureReady()) { + return { success: false, error: 'WCDB 未连接' } + } + const keySortSeq = Number.isFinite(Number(key?.sortSeq)) ? Math.floor(Number(key?.sortSeq)) : 0 + const keyCreateTime = Number.isFinite(Number(key?.createTime)) ? Math.floor(Number(key?.createTime)) : 0 + const keyLocalId = Number.isFinite(Number(key?.localId)) ? Math.floor(Number(key?.localId)) : 0 + const keyValid = keySortSeq > 0 || keyCreateTime > 0 || keyLocalId > 0 + + if (!keyValid || !this.wcdbOpenMessageCursorWithKey) { + return this.openMessageCursor(sessionId, batchSize, ascending, beginTimestamp, endTimestamp) + } + + try { + const outCursor = [0] + let result = this.wcdbOpenMessageCursorWithKey( + this.handle, + sessionId, + batchSize, + ascending ? 1 : 0, + beginTimestamp, + endTimestamp, + 1, + keySortSeq, + keyCreateTime, + keyLocalId, + outCursor + ) + if (result === -3 && outCursor[0] <= 0 && this.shouldRetryCursorAfterNoDb()) { + this.writeLog('openMessageCursorWithKey: result=-3 (no message db), attempting forceReopen...', true) + const reopened = await this.forceReopen() + if (reopened && this.handle !== null) { + outCursor[0] = 0 + result = this.wcdbOpenMessageCursorWithKey( + this.handle, + sessionId, + batchSize, + ascending ? 1 : 0, + beginTimestamp, + endTimestamp, + 1, + keySortSeq, + keyCreateTime, + keyLocalId, + outCursor + ) + this.writeLog(`openMessageCursorWithKey retry after forceReopen: result=${result} cursor=${outCursor[0]}`, true) + } + } + if (result !== 0 || outCursor[0] <= 0) { + if (result !== -3) { + await this.printLogs(true) + this.writeLog( + `openMessageCursorWithKey failed: sessionId=${sessionId} batchSize=${batchSize} ascending=${ascending ? 1 : 0} begin=${beginTimestamp} end=${endTimestamp} result=${result} cursor=${outCursor[0]}`, + true + ) + } + return { success: false, error: `创建游标失败: ${result}` } + } + return { success: true, cursor: outCursor[0] } + } catch (e) { + await this.printLogs(true) + this.writeLog(`openMessageCursorWithKey exception: ${String(e)}`, true) + return { success: false, error: '创建游标异常,请查看日志' } + } + } + async openMessageCursorLite(sessionId: string, batchSize: number, ascending: boolean, beginTimestamp: number, endTimestamp: number): Promise<{ success: boolean; cursor?: number; error?: string }> { if (!this.ensureReady()) { return { success: false, error: 'WCDB 未连接' } @@ -3342,6 +3490,83 @@ export class WcdbCore { } } + async openMessageCursorLiteWithKey( + sessionId: string, + batchSize: number, + ascending: boolean, + beginTimestamp: number, + endTimestamp: number, + key?: { sortSeq?: number; createTime?: number; localId?: number } + ): Promise<{ success: boolean; cursor?: number; error?: string }> { + if (!this.ensureReady()) { + return { success: false, error: 'WCDB 未连接' } + } + const keySortSeq = Number.isFinite(Number(key?.sortSeq)) ? Math.floor(Number(key?.sortSeq)) : 0 + const keyCreateTime = Number.isFinite(Number(key?.createTime)) ? Math.floor(Number(key?.createTime)) : 0 + const keyLocalId = Number.isFinite(Number(key?.localId)) ? Math.floor(Number(key?.localId)) : 0 + const keyValid = keySortSeq > 0 || keyCreateTime > 0 || keyLocalId > 0 + + if (!keyValid) { + return this.openMessageCursorLite(sessionId, batchSize, ascending, beginTimestamp, endTimestamp) + } + if (!this.wcdbOpenMessageCursorLiteWithKey) { + return this.openMessageCursorWithKey(sessionId, batchSize, ascending, beginTimestamp, endTimestamp, key) + } + + try { + const outCursor = [0] + let result = this.wcdbOpenMessageCursorLiteWithKey( + this.handle, + sessionId, + batchSize, + ascending ? 1 : 0, + beginTimestamp, + endTimestamp, + 1, + keySortSeq, + keyCreateTime, + keyLocalId, + outCursor + ) + if (result === -3 && outCursor[0] <= 0 && this.shouldRetryCursorAfterNoDb()) { + this.writeLog('openMessageCursorLiteWithKey: result=-3 (no message db), attempting forceReopen...', true) + const reopened = await this.forceReopen() + if (reopened && this.handle !== null) { + outCursor[0] = 0 + result = this.wcdbOpenMessageCursorLiteWithKey( + this.handle, + sessionId, + batchSize, + ascending ? 1 : 0, + beginTimestamp, + endTimestamp, + 1, + keySortSeq, + keyCreateTime, + keyLocalId, + outCursor + ) + this.writeLog(`openMessageCursorLiteWithKey retry after forceReopen: result=${result} cursor=${outCursor[0]}`, true) + } + } + if (result !== 0 || outCursor[0] <= 0) { + if (result !== -3) { + await this.printLogs(true) + this.writeLog( + `openMessageCursorLiteWithKey failed: sessionId=${sessionId} batchSize=${batchSize} ascending=${ascending ? 1 : 0} begin=${beginTimestamp} end=${endTimestamp} result=${result} cursor=${outCursor[0]}`, + true + ) + } + return { success: false, error: `创建游标失败: ${result}` } + } + return { success: true, cursor: outCursor[0] } + } catch (e) { + await this.printLogs(true) + this.writeLog(`openMessageCursorLiteWithKey exception: ${String(e)}`, true) + return { success: false, error: '创建游标异常,请查看日志' } + } + } + async fetchMessageBatch(cursor: number): Promise<{ success: boolean; rows?: any[]; hasMore?: boolean; error?: string }> { if (!this.ensureReady()) { return { success: false, error: 'WCDB 未连接' } @@ -4305,6 +4530,243 @@ export class WcdbCore { } } + async aiGetRecentMessages(options: { + sessionId: string + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetRecentMessages) return { success: false, error: '当前数据服务版本不支持 AI 最近消息查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetRecentMessages(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + limit: options.limit || 120, + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 最近消息查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 最近消息查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetMessagesBefore(options: { + sessionId: string + beforeId?: number + beforeLocalId?: number + beforeCreateTime?: number + beforeSortSeq?: number + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetMessagesBefore) return { success: false, error: '当前数据服务版本不支持 AI 前向消息查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetMessagesBefore(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + before_id: options.beforeId || 0, + before_local_id: options.beforeLocalId || options.beforeId || 0, + before_create_time: options.beforeCreateTime || 0, + before_sort_seq: options.beforeSortSeq || 0, + limit: options.limit || 120, + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 前向消息查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 前向消息查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetMessagesAfter(options: { + sessionId: string + afterId?: number + afterLocalId?: number + afterCreateTime?: number + afterSortSeq?: number + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetMessagesAfter) return { success: false, error: '当前数据服务版本不支持 AI 后向消息查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetMessagesAfter(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + after_id: options.afterId || 0, + after_local_id: options.afterLocalId || options.afterId || 0, + after_create_time: options.afterCreateTime || 0, + after_sort_seq: options.afterSortSeq || 0, + limit: options.limit || 120, + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 后向消息查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 后向消息查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetMessageContext(options: { + sessionId: string + messageIds: number[] + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetMessageContext) return { success: false, error: '当前数据服务版本不支持 AI 消息上下文查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetMessageContext(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + message_ids: Array.isArray(options.messageIds) ? options.messageIds : [] + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 消息上下文查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 消息上下文查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetSearchMessageContext(options: { + sessionId: string + messageIds: number[] + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetSearchMessageContext) return { success: false, error: '当前数据服务版本不支持 AI 搜索上下文查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetSearchMessageContext(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + message_ids: Array.isArray(options.messageIds) ? options.messageIds : [] + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 搜索上下文查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 搜索上下文查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetConversationBetween(options: { + sessionId: string + memberId1?: number + memberId2?: number + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetConversationBetween) return { success: false, error: '当前数据服务版本不支持 AI 双人对话查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetConversationBetween(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + member_id1: options.memberId1 || 0, + member_id2: options.memberId2 || 0, + limit: options.limit || 120, + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 双人对话查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 双人对话查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiSearchSessions(options: { + keyword?: string + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiSearchSessions) return { success: false, error: '当前数据服务版本不支持 AI 会话搜索' } + try { + const outPtr = [null as any] + const result = this.wcdbAiSearchSessions(this.handle, JSON.stringify({ + keyword: options.keyword || '', + limit: options.limit || 20, + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 会话搜索失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 会话搜索失败' } + const rows = JSON.parse(jsonStr) + return { success: true, rows: Array.isArray(rows) ? rows : [] } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetSessionMessages(options: { + sessionId: string + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetSessionMessages) return { success: false, error: '当前数据服务版本不支持 AI 会话消息查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetSessionMessages(this.handle, JSON.stringify({ + session_id: options.sessionId || '', + limit: options.limit || 500, + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 会话消息查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 会话消息查询失败' } + return { success: true, rows: this.parseMessageJson(jsonStr) } + } catch (e) { + return { success: false, error: String(e) } + } + } + + async aiGetSessionSummaries(options: { + sessionIds?: string[] + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; data?: any; error?: string }> { + if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } + if (!this.wcdbAiGetSessionSummaries) return { success: false, error: '当前数据服务版本不支持 AI 会话摘要查询' } + try { + const outPtr = [null as any] + const result = this.wcdbAiGetSessionSummaries(this.handle, JSON.stringify({ + session_ids_json: JSON.stringify(options.sessionIds || []), + begin_timestamp: options.beginTimestamp || 0, + end_timestamp: options.endTimestamp || 0 + }), outPtr) + if (result !== 0 || !outPtr[0]) return { success: false, error: `AI 会话摘要查询失败: ${result}` } + const jsonStr = this.decodeJsonPtr(outPtr[0]) + if (!jsonStr) return { success: false, error: '解析 AI 会话摘要查询失败' } + const data = JSON.parse(jsonStr) + return { success: true, data } + } catch (e) { + return { success: false, error: String(e) } + } + } + async getSnsTimeline(limit: number, offset: number, usernames?: string[], keyword?: string, startTime?: number, endTime?: number): Promise<{ success: boolean; timeline?: any[]; error?: string }> { if (!this.ensureReady()) return { success: false, error: 'WCDB 未连接' } if (!this.wcdbGetSnsTimeline) return { success: false, error: '当前数据服务版本不支持获取朋友圈' } diff --git a/electron/services/wcdbService.ts b/electron/services/wcdbService.ts index 829bb0e..0edaff9 100644 --- a/electron/services/wcdbService.ts +++ b/electron/services/wcdbService.ts @@ -468,6 +468,24 @@ export class WcdbService { return this.callWorker('openMessageCursor', { sessionId, batchSize, ascending, beginTimestamp, endTimestamp }) } + async openMessageCursorWithKey( + sessionId: string, + batchSize: number, + ascending: boolean, + beginTimestamp: number, + endTimestamp: number, + key?: { sortSeq?: number; createTime?: number; localId?: number } + ): Promise<{ success: boolean; cursor?: number; error?: string }> { + return this.callWorker('openMessageCursorWithKey', { + sessionId, + batchSize, + ascending, + beginTimestamp, + endTimestamp, + key + }) + } + /** * 打开轻量级消息游标 */ @@ -475,6 +493,24 @@ export class WcdbService { return this.callWorker('openMessageCursorLite', { sessionId, batchSize, ascending, beginTimestamp, endTimestamp }) } + async openMessageCursorLiteWithKey( + sessionId: string, + batchSize: number, + ascending: boolean, + beginTimestamp: number, + endTimestamp: number, + key?: { sortSeq?: number; createTime?: number; localId?: number } + ): Promise<{ success: boolean; cursor?: number; error?: string }> { + return this.callWorker('openMessageCursorLiteWithKey', { + sessionId, + batchSize, + ascending, + beginTimestamp, + endTimestamp, + key + }) + } + /** * 获取下一批消息 */ @@ -616,6 +652,92 @@ export class WcdbService { return this.callWorker('aiQuerySourceRefs', { options }) } + async aiGetRecentMessages(options: { + sessionId: string + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetRecentMessages', { options }) + } + + async aiGetMessagesBefore(options: { + sessionId: string + beforeId?: number + beforeLocalId?: number + beforeCreateTime?: number + beforeSortSeq?: number + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetMessagesBefore', { options }) + } + + async aiGetMessagesAfter(options: { + sessionId: string + afterId?: number + afterLocalId?: number + afterCreateTime?: number + afterSortSeq?: number + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetMessagesAfter', { options }) + } + + async aiGetMessageContext(options: { + sessionId: string + messageIds: number[] + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetMessageContext', { options }) + } + + async aiGetSearchMessageContext(options: { + sessionId: string + messageIds: number[] + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetSearchMessageContext', { options }) + } + + async aiGetConversationBetween(options: { + sessionId: string + memberId1?: number + memberId2?: number + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetConversationBetween', { options }) + } + + async aiSearchSessions(options: { + keyword?: string + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiSearchSessions', { options }) + } + + async aiGetSessionMessages(options: { + sessionId: string + limit?: number + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; rows?: any[]; error?: string }> { + return this.callWorker('aiGetSessionMessages', { options }) + } + + async aiGetSessionSummaries(options: { + sessionIds?: string[] + beginTimestamp?: number + endTimestamp?: number + }): Promise<{ success: boolean; data?: any; error?: string }> { + return this.callWorker('aiGetSessionSummaries', { options }) + } + /** * 获取语音数据 */ diff --git a/electron/wcdbWorker.ts b/electron/wcdbWorker.ts index 64e0e67..455eec0 100644 --- a/electron/wcdbWorker.ts +++ b/electron/wcdbWorker.ts @@ -164,9 +164,29 @@ if (parentPort) { case 'openMessageCursor': result = await core.openMessageCursor(payload.sessionId, payload.batchSize, payload.ascending, payload.beginTimestamp, payload.endTimestamp) break + case 'openMessageCursorWithKey': + result = await core.openMessageCursorWithKey( + payload.sessionId, + payload.batchSize, + payload.ascending, + payload.beginTimestamp, + payload.endTimestamp, + payload.key + ) + break case 'openMessageCursorLite': result = await core.openMessageCursorLite(payload.sessionId, payload.batchSize, payload.ascending, payload.beginTimestamp, payload.endTimestamp) break + case 'openMessageCursorLiteWithKey': + result = await core.openMessageCursorLiteWithKey( + payload.sessionId, + payload.batchSize, + payload.ascending, + payload.beginTimestamp, + payload.endTimestamp, + payload.key + ) + break case 'fetchMessageBatch': result = await core.fetchMessageBatch(payload.cursor) break @@ -215,6 +235,33 @@ if (parentPort) { case 'aiQuerySourceRefs': result = await core.aiQuerySourceRefs(payload.options || {}) break + case 'aiGetRecentMessages': + result = await core.aiGetRecentMessages(payload.options || {}) + break + case 'aiGetMessagesBefore': + result = await core.aiGetMessagesBefore(payload.options || {}) + break + case 'aiGetMessagesAfter': + result = await core.aiGetMessagesAfter(payload.options || {}) + break + case 'aiGetMessageContext': + result = await core.aiGetMessageContext(payload.options || {}) + break + case 'aiGetSearchMessageContext': + result = await core.aiGetSearchMessageContext(payload.options || {}) + break + case 'aiGetConversationBetween': + result = await core.aiGetConversationBetween(payload.options || {}) + break + case 'aiSearchSessions': + result = await core.aiSearchSessions(payload.options || {}) + break + case 'aiGetSessionMessages': + result = await core.aiGetSessionMessages(payload.options || {}) + break + case 'aiGetSessionSummaries': + result = await core.aiGetSessionSummaries(payload.options || {}) + break case 'getVoiceData': result = await core.getVoiceData(payload.sessionId, payload.createTime, payload.candidates, payload.localId, payload.svrId) if (!result.success) { diff --git a/src/pages/AiAnalysisPage.scss b/src/pages/AiAnalysisPage.scss index 65daca4..db1864d 100644 --- a/src/pages/AiAnalysisPage.scss +++ b/src/pages/AiAnalysisPage.scss @@ -1,55 +1,50 @@ -.ai-analysis-v2 { - --ai-surface: color-mix(in srgb, var(--card-bg) 92%, #ffffff 8%); - --ai-surface-soft: color-mix(in srgb, var(--card-bg) 86%, #cbd5e1 14%); - --ai-border: color-mix(in srgb, var(--border-color) 85%, #94a3b8 15%); +.ai-analysis-chatlab { + --ai-border: color-mix(in srgb, var(--border-color) 78%, #94a3b8 22%); + --ai-surface: color-mix(in srgb, var(--card-bg) 90%, #ffffff 10%); + --ai-surface-soft: color-mix(in srgb, var(--card-bg) 82%, #dbeafe 18%); --ai-accent: #0f766e; - --ai-accent-soft: color-mix(in srgb, #0f766e 16%, transparent); + --ai-danger: #dc2626; height: 100%; min-height: 0; display: grid; grid-template-rows: auto minmax(0, 1fr); gap: 12px; - padding: 16px; + padding: 14px; background: - radial-gradient(1200px 380px at 8% -15%, color-mix(in srgb, #22c55e 20%, transparent), transparent 70%), - radial-gradient(1000px 320px at 96% -10%, color-mix(in srgb, #06b6d4 15%, transparent), transparent 68%), + radial-gradient(1200px 360px at 8% -18%, color-mix(in srgb, #22c55e 18%, transparent), transparent 70%), + radial-gradient(980px 320px at 98% -12%, color-mix(in srgb, #0284c7 16%, transparent), transparent 70%), var(--bg-primary); } -.ai-header { +.ai-topbar { border: 1px solid var(--ai-border); - border-radius: 16px; + border-radius: 14px; background: var(--ai-surface); - padding: 12px 14px; + padding: 10px 12px; display: flex; align-items: center; justify-content: space-between; - gap: 12px; + gap: 10px; - .left { + .title-group { display: flex; align-items: center; gap: 8px; - min-width: 0; h1 { margin: 0; - font-size: 16px; + font-size: 15px; } span { color: var(--text-secondary); font-size: 12px; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; } } - .tabs { + .top-actions { display: flex; - align-items: center; gap: 6px; button { @@ -63,56 +58,56 @@ gap: 6px; cursor: pointer; font-size: 12px; - } - .active { - background: var(--ai-accent-soft); - color: var(--text-primary); - border-color: color-mix(in srgb, var(--ai-accent) 58%, transparent); + &.active { + color: var(--text-primary); + border-color: color-mix(in srgb, var(--ai-accent) 50%, transparent); + background: color-mix(in srgb, var(--ai-accent) 12%, transparent); + } } } } -.chat-layout, -.sql-layout, -.tool-layout { +.chat-shell { min-height: 0; display: grid; gap: 12px; -} - -.chat-layout { grid-template-columns: 300px minmax(0, 1fr); + + &.with-data { + grid-template-columns: 300px minmax(0, 1fr) 320px; + } } -.conversation-panel, -.chat-main, +.conversation-sidebar, +.chat-main-panel, +.data-panel, .schema-panel, -.sql-main, -.tool-catalog, -.tool-main { +.sql-main { border: 1px solid var(--ai-border); - border-radius: 16px; + border-radius: 14px; background: var(--ai-surface); min-height: 0; } +.sidebar-head, .panel-head { - padding: 10px 12px; - border-bottom: 1px solid var(--ai-border); display: flex; align-items: center; justify-content: space-between; + gap: 8px; + padding: 10px 12px; + border-bottom: 1px solid var(--ai-border); h3 { margin: 0; - font-size: 14px; + font-size: 13px; } button { width: 28px; height: 28px; - border-radius: 9px; + border-radius: 8px; border: 1px solid var(--ai-border); background: color-mix(in srgb, var(--text-primary) 4%, transparent); color: var(--text-secondary); @@ -121,59 +116,58 @@ } .conversation-list { - padding: 10px; - display: flex; - flex-direction: column; - gap: 8px; + height: calc(100% - 49px); overflow: auto; - max-height: 100%; + padding: 10px; + display: grid; + gap: 8px; } .conversation-item { border: 1px solid transparent; - border-radius: 12px; - padding: 10px; + border-radius: 11px; background: color-mix(in srgb, var(--text-primary) 2%, transparent); - text-align: left; - cursor: pointer; color: var(--text-primary); + text-align: left; + padding: 10px; + cursor: pointer; .main { display: grid; gap: 4px; - } - strong { - font-size: 13px; - } + strong { + font-size: 13px; + font-weight: 600; + } - small { - font-size: 11px; - color: var(--text-tertiary); + small { + font-size: 11px; + color: var(--text-tertiary); + } } .ops { margin-top: 8px; - display: flex; - gap: 10px; + display: inline-flex; + gap: 8px; color: var(--text-secondary); font-size: 12px; span { - cursor: pointer; display: inline-flex; align-items: center; gap: 4px; } } + + &.active { + border-color: color-mix(in srgb, var(--ai-accent) 50%, transparent); + background: color-mix(in srgb, var(--ai-accent) 12%, transparent); + } } -.conversation-item.active { - border-color: color-mix(in srgb, var(--ai-accent) 52%, transparent); - background: color-mix(in srgb, var(--ai-accent) 10%, transparent); -} - -.chat-main { +.chat-main-panel { display: grid; grid-template-rows: auto minmax(0, 1fr) auto auto; } @@ -184,27 +178,27 @@ display: grid; gap: 8px; - .row { + .controls-row { display: flex; - flex-wrap: wrap; align-items: center; + flex-wrap: wrap; gap: 8px; - } - label { - font-size: 12px; - color: var(--text-secondary); - } + label { + font-size: 12px; + color: var(--text-secondary); + } - select, - input { - border: 1px solid var(--ai-border); - background: var(--ai-surface-soft); - color: var(--text-primary); - border-radius: 8px; - padding: 6px 8px; - font-size: 12px; - min-width: 120px; + select, + input { + border: 1px solid var(--ai-border); + border-radius: 8px; + background: var(--ai-surface-soft); + color: var(--text-primary); + padding: 6px 8px; + font-size: 12px; + min-width: 114px; + } } } @@ -215,97 +209,176 @@ button { border: 1px solid var(--ai-border); - background: color-mix(in srgb, var(--ai-accent) 8%, transparent); - color: var(--text-secondary); border-radius: 999px; padding: 4px 10px; + background: color-mix(in srgb, var(--ai-accent) 10%, transparent); + color: var(--text-secondary); font-size: 12px; cursor: pointer; } } -.message-panel { +.messages-wrap { + position: relative; min-height: 0; - overflow: hidden; -} - -.message-list { - height: 100%; overflow: auto; padding: 12px; - display: flex; - flex-direction: column; + display: grid; gap: 10px; } -.msg { +.message-card { border: 1px solid var(--ai-border); border-radius: 12px; background: color-mix(in srgb, var(--text-primary) 2%, transparent); padding: 10px; - .head { - font-size: 12px; - color: var(--text-secondary); + header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 8px; + + span, + time { + font-size: 12px; + color: var(--text-secondary); + } } - .body { - margin-top: 6px; + .message-body { + margin-top: 8px; font-size: 13px; line-height: 1.65; white-space: pre-wrap; word-break: break-word; - } -} -.msg.user { - background: color-mix(in srgb, #0f766e 14%, transparent); -} - -.runtime-cards { - display: grid; - gap: 6px; -} - -.chunk { - border: 1px dashed var(--ai-border); - border-radius: 10px; - padding: 8px; - font-size: 12px; - color: var(--text-secondary); - - strong { - margin-right: 8px; + &.blocks { + display: grid; + gap: 8px; + } } - pre { - margin: 6px 0 0; - white-space: pre-wrap; + &.user { + background: color-mix(in srgb, var(--ai-accent) 14%, transparent); + } + + .tool-trace { + margin-top: 8px; + border-top: 1px dashed var(--ai-border); + padding-top: 8px; font-size: 12px; + color: var(--text-secondary); + + ul { + margin: 6px 0 0; + padding-left: 18px; + display: grid; + gap: 4px; + } } - .err { - color: #dc2626; + .think-block { + border-left: 2px solid color-mix(in srgb, var(--ai-accent) 38%, transparent); + padding-left: 10px; + + summary { + cursor: pointer; + font-size: 12px; + color: var(--text-secondary); + display: inline-flex; + gap: 6px; + align-items: center; + } + + pre { + margin-top: 6px; + white-space: pre-wrap; + color: var(--text-tertiary); + font-size: 12px; + } + } + + .tool-block { + border: 1px dashed var(--ai-border); + border-radius: 10px; + padding: 8px; + + .line { + display: flex; + justify-content: space-between; + gap: 8px; + font-size: 12px; + + strong { + color: var(--text-primary); + } + + span { + color: var(--text-secondary); + } + } + + pre { + margin-top: 6px; + white-space: pre-wrap; + font-size: 12px; + color: var(--text-tertiary); + } + + &.done { + border-color: color-mix(in srgb, #16a34a 55%, transparent); + } + + &.error { + border-color: color-mix(in srgb, #dc2626 55%, transparent); + } + } + + .typing-cursor { + width: 8px; + display: inline-block; + animation: blink-cursor 1s step-start infinite; } } -.footer-actions { +@keyframes blink-cursor { + 50% { + opacity: 0; + } +} + +.scroll-bottom { + position: sticky; + bottom: 10px; + margin-left: auto; + width: 28px; + height: 28px; + border-radius: 999px; + border: 1px solid var(--ai-border); + background: var(--ai-surface); + color: var(--text-secondary); + cursor: pointer; +} + +.status-row { border-top: 1px solid var(--ai-border); padding: 8px 12px; display: flex; + align-items: center; + justify-content: space-between; gap: 8px; - .ghost { - border: 1px solid var(--ai-border); - background: color-mix(in srgb, var(--text-primary) 4%, transparent); - color: var(--text-secondary); - border-radius: 9px; - padding: 6px 10px; + .left, + .right { display: inline-flex; align-items: center; - gap: 6px; - cursor: pointer; + gap: 8px; + } + + span { font-size: 12px; + color: var(--text-secondary); } } @@ -317,13 +390,16 @@ textarea { width: 100%; - min-height: 80px; + min-height: 88px; border: 1px solid var(--ai-border); border-radius: 10px; background: var(--ai-surface-soft); color: var(--text-primary); padding: 10px; resize: vertical; + font-family: inherit; + font-size: 13px; + line-height: 1.6; } } @@ -337,60 +413,160 @@ border-radius: 999px; background: color-mix(in srgb, var(--text-primary) 4%, transparent); color: var(--text-secondary); - padding: 4px 10px; font-size: 12px; + padding: 4px 10px; cursor: pointer; } } -.input-actions { - display: flex; - gap: 8px; - +.input-actions, +.status-row .right, +.top-actions, +.controls-row { button { - border-radius: 9px; border: 1px solid var(--ai-border); - padding: 7px 12px; + border-radius: 8px; + padding: 6px 10px; + background: color-mix(in srgb, var(--text-primary) 4%, transparent); + color: var(--text-secondary); display: inline-flex; align-items: center; gap: 6px; cursor: pointer; font-size: 12px; - } - .primary { - background: color-mix(in srgb, var(--ai-accent) 18%, transparent); - color: var(--text-primary); - border-color: color-mix(in srgb, var(--ai-accent) 52%, transparent); - } + &:disabled { + opacity: 0.45; + cursor: not-allowed; + } - .danger { - background: color-mix(in srgb, #ef4444 12%, transparent); - color: var(--text-primary); - border-color: color-mix(in srgb, #ef4444 45%, transparent); + &.primary { + background: color-mix(in srgb, var(--ai-accent) 18%, transparent); + border-color: color-mix(in srgb, var(--ai-accent) 52%, transparent); + color: var(--text-primary); + } + + &.danger { + border-color: color-mix(in srgb, var(--ai-danger) 48%, transparent); + color: color-mix(in srgb, var(--ai-danger) 78%, var(--text-primary) 22%); + background: color-mix(in srgb, var(--ai-danger) 10%, transparent); + } } } -.sql-layout { - grid-template-columns: 300px minmax(0, 1fr); -} - -.schema-panel { +.data-panel { display: grid; - grid-template-rows: auto minmax(0, 1fr); + grid-template-rows: auto auto minmax(0, 1fr); + + > header { + padding: 10px 12px; + border-bottom: 1px solid var(--ai-border); + display: flex; + justify-content: space-between; + align-items: center; + + h3 { + margin: 0; + font-size: 13px; + } + + span { + font-size: 12px; + color: var(--text-secondary); + } + } + + section { + padding: 10px 12px; + + h4 { + margin: 0 0 8px; + font-size: 12px; + color: var(--text-secondary); + } + } + + .keywords { + border-bottom: 1px solid var(--ai-border); + + .chips { + display: flex; + flex-wrap: wrap; + gap: 6px; + + span { + border-radius: 999px; + border: 1px solid var(--ai-border); + padding: 3px 8px; + font-size: 12px; + color: var(--text-secondary); + } + + small { + color: var(--text-tertiary); + font-size: 12px; + } + } + } + + .sources { + min-height: 0; + display: grid; + grid-template-rows: auto minmax(0, 1fr); + + .source-list { + min-height: 0; + overflow: auto; + display: grid; + gap: 8px; + + article { + border: 1px solid var(--ai-border); + border-radius: 10px; + padding: 8px; + background: color-mix(in srgb, var(--text-primary) 2%, transparent); + + header { + display: flex; + justify-content: space-between; + gap: 8px; + + strong, + time { + font-size: 11px; + color: var(--text-secondary); + } + } + + p { + margin: 6px 0 0; + font-size: 12px; + color: var(--text-primary); + line-height: 1.55; + } + } + } + } +} + +.sql-shell { + min-height: 0; + display: grid; + grid-template-columns: 320px minmax(0, 1fr); + gap: 12px; } .schema-list { - overflow: auto; padding: 10px; - display: grid; - gap: 10px; + overflow: auto; + max-height: calc(100% - 49px); } .schema-source { border: 1px solid var(--ai-border); - border-radius: 12px; + border-radius: 10px; padding: 8px; + margin-bottom: 8px; h4 { margin: 0 0 8px; @@ -402,50 +578,45 @@ padding: 0; list-style: none; display: grid; - gap: 8px; - } + gap: 6px; - li { - display: grid; - gap: 4px; - font-size: 12px; - } + li { + display: grid; + gap: 2px; - small { - color: var(--text-tertiary); + strong { + font-size: 12px; + } + + small { + font-size: 11px; + color: var(--text-tertiary); + } + } } } .sql-main { - min-height: 0; display: grid; grid-template-rows: auto auto auto minmax(0, 1fr) auto; - gap: 8px; + gap: 10px; padding: 12px; } .sql-bar { display: flex; - align-items: center; - gap: 8px; flex-wrap: wrap; + gap: 8px; select, button { border: 1px solid var(--ai-border); - border-radius: 9px; - background: color-mix(in srgb, var(--text-primary) 4%, transparent); - color: var(--text-secondary); + border-radius: 8px; + background: var(--ai-surface-soft); + color: var(--text-primary); padding: 6px 10px; font-size: 12px; } - - button { - cursor: pointer; - display: inline-flex; - align-items: center; - gap: 6px; - } } .sql-prompt, @@ -457,224 +628,160 @@ background: var(--ai-surface-soft); color: var(--text-primary); padding: 10px; + font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; font-size: 12px; - min-height: 90px; + line-height: 1.6; resize: vertical; } +.sql-prompt { + min-height: 90px; +} + .sql-generated { - min-height: 120px; - font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; + min-height: 130px; +} + +.error, +.global-error { + border: 1px solid color-mix(in srgb, #dc2626 50%, transparent); + background: color-mix(in srgb, #dc2626 11%, transparent); + color: color-mix(in srgb, #dc2626 80%, var(--text-primary) 20%); + border-radius: 10px; + padding: 8px 10px; + font-size: 12px; } .sql-table-wrap { min-height: 0; overflow: auto; border: 1px solid var(--ai-border); - border-radius: 12px; -} + border-radius: 10px; -.sql-table { - width: 100%; - border-collapse: collapse; + .sql-table { + width: 100%; + border-collapse: collapse; - th, - td { - border-bottom: 1px solid var(--ai-border); - border-right: 1px solid var(--ai-border); - padding: 7px 8px; - font-size: 12px; - text-align: left; - white-space: nowrap; + th, + td { + border-bottom: 1px solid color-mix(in srgb, var(--ai-border) 70%, transparent); + padding: 7px 8px; + font-size: 12px; + text-align: left; + vertical-align: top; + white-space: pre-wrap; + word-break: break-word; + } + + th { + position: sticky; + top: 0; + background: color-mix(in srgb, var(--ai-surface) 94%, #f8fafc 6%); + cursor: pointer; + z-index: 1; + display: inline-flex; + align-items: center; + gap: 4px; + } } - th { - cursor: pointer; - background: color-mix(in srgb, var(--text-primary) 5%, transparent); - } -} + .pager { + display: flex; + align-items: center; + gap: 8px; + padding: 8px; -.pager { - display: flex; - align-items: center; - justify-content: flex-end; - gap: 8px; - padding: 8px; + button { + border: 1px solid var(--ai-border); + border-radius: 8px; + padding: 4px 8px; + background: color-mix(in srgb, var(--text-primary) 4%, transparent); + color: var(--text-secondary); + cursor: pointer; + font-size: 12px; + } - button { - border: 1px solid var(--ai-border); - background: color-mix(in srgb, var(--text-primary) 4%, transparent); - color: var(--text-secondary); - border-radius: 8px; - padding: 4px 8px; - font-size: 12px; - cursor: pointer; + span { + font-size: 12px; + color: var(--text-secondary); + } } } .sql-history { border-top: 1px solid var(--ai-border); - padding-top: 8px; + padding-top: 10px; h4 { margin: 0 0 8px; - font-size: 13px; - } -} - -.history-list { - display: flex; - flex-wrap: wrap; - gap: 6px; - - button { - border: 1px solid var(--ai-border); - background: color-mix(in srgb, var(--text-primary) 4%, transparent); - color: var(--text-secondary); - border-radius: 8px; - padding: 5px 8px; font-size: 12px; - cursor: pointer; - max-width: 100%; - text-align: left; - } -} - -.tool-layout { - grid-template-columns: 320px minmax(0, 1fr); -} - -.tool-catalog { - padding: 12px; - overflow: auto; - - h3, - h4 { - margin: 0 0 8px; } - h4 { - margin-top: 14px; - font-size: 12px; - color: var(--text-secondary); - } -} - -.tool-list { - display: grid; - gap: 6px; - - button { - border: 1px solid var(--ai-border); - border-radius: 8px; - padding: 7px 8px; - background: color-mix(in srgb, var(--text-primary) 3%, transparent); - color: var(--text-secondary); - text-align: left; - font-size: 12px; - cursor: pointer; - } - - .active { - border-color: color-mix(in srgb, var(--ai-accent) 52%, transparent); - background: color-mix(in srgb, var(--ai-accent) 12%, transparent); - color: var(--text-primary); - } -} - -.tool-main { - display: grid; - grid-template-rows: auto auto minmax(0, 1fr); - gap: 8px; - padding: 12px; -} - -.tool-top { - display: flex; - justify-content: space-between; - gap: 12px; - - h3 { - margin: 0 0 6px; - font-size: 14px; - } - - p { - margin: 0; - font-size: 12px; - color: var(--text-secondary); - } -} - -.actions { - display: flex; - gap: 8px; - - button { - border: 1px solid var(--ai-border); - border-radius: 8px; - background: color-mix(in srgb, var(--text-primary) 4%, transparent); - color: var(--text-secondary); - padding: 6px 9px; - font-size: 12px; - display: inline-flex; - align-items: center; + .history-list { + display: grid; gap: 6px; - cursor: pointer; - } -} + max-height: 120px; + overflow: auto; -.tool-output { - margin: 0; - border: 1px solid var(--ai-border); - border-radius: 10px; - background: var(--ai-surface-soft); - padding: 10px; - min-height: 160px; - overflow: auto; - white-space: pre-wrap; - font-size: 12px; + button { + text-align: left; + border: 1px solid var(--ai-border); + border-radius: 8px; + padding: 6px 8px; + background: color-mix(in srgb, var(--text-primary) 2%, transparent); + color: var(--text-secondary); + font-size: 12px; + cursor: pointer; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + } + } } .empty { - padding: 14px; - color: var(--text-secondary); - font-size: 12px; - display: inline-flex; - align-items: center; - gap: 6px; -} - -.error, -.global-error { - border: 1px solid color-mix(in srgb, #dc2626 55%, transparent); - background: color-mix(in srgb, #dc2626 12%, transparent); - color: color-mix(in srgb, #dc2626 85%, #111827 15%); - border-radius: 10px; - padding: 8px 10px; + color: var(--text-tertiary); font-size: 12px; + padding: 10px; } .spin { - animation: ai-spin 1s linear infinite; + animation: wf-spin 1s linear infinite; } -@keyframes ai-spin { - from { transform: rotate(0deg); } - to { transform: rotate(360deg); } -} - -@media (max-width: 1100px) { - .chat-layout, - .sql-layout, - .tool-layout { - grid-template-columns: minmax(0, 1fr); +@keyframes wf-spin { + from { + transform: rotate(0deg); } - .conversation-panel, - .schema-panel, - .tool-catalog { + to { + transform: rotate(360deg); + } +} + +@media (max-width: 1380px) { + .chat-shell, + .chat-shell.with-data { + grid-template-columns: 260px minmax(0, 1fr); + } + + .data-panel { + display: none; + } +} + +@media (max-width: 1024px) { + .ai-analysis-chatlab { + padding: 10px; + } + + .chat-shell, + .chat-shell.with-data, + .sql-shell { + grid-template-columns: 1fr; + } + + .conversation-sidebar { max-height: 260px; } } - diff --git a/src/pages/AiAnalysisPage.tsx b/src/pages/AiAnalysisPage.tsx index 7d6d33c..2fa1d19 100644 --- a/src/pages/AiAnalysisPage.tsx +++ b/src/pages/AiAnalysisPage.tsx @@ -6,14 +6,15 @@ import { Database, Download, Loader2, + PanelLeftClose, + PanelLeftOpen, Play, RefreshCw, - Search, Send, Sparkles, - SquareTerminal, Trash2, - Wrench + ChevronDown, + ChevronUp } from 'lucide-react' import type { AiConversation, @@ -25,10 +26,9 @@ import type { ToolCatalogEntry } from '../types/aiAnalysis' import { useAiRuntimeStore } from '../stores/aiRuntimeStore' -import type { AgentStreamChunk } from '../types/electron' import './AiAnalysisPage.scss' -type MainTab = 'chat' | 'sql' | 'tool' +type MainTab = 'chat' | 'sql' type ScopeMode = 'global' | 'contact' | 'session' function formatDateTime(ts: number): string { @@ -47,7 +47,10 @@ function normalizeText(value: unknown, fallback = ''): string { return text || fallback } -function extractSqlTarget(schema: SqlSchemaPayload | null, key: string): { kind: 'message' | 'contact' | 'biz'; path: string | null } | null { +function extractSqlTarget( + schema: SqlSchemaPayload | null, + key: string +): { kind: 'message' | 'contact' | 'biz'; path: string | null } | null { if (!schema) return null for (const source of schema.sources) { const sourceKey = `${source.kind}:${source.path || ''}` @@ -63,7 +66,9 @@ function toCsv(rows: Record[], columns: string[]): string { return text } const header = columns.map((column) => esc(column)).join(',') - const body = rows.map((row) => columns.map((column) => esc(row[column])).join(',')).join('\n') + const body = rows + .map((row) => columns.map((column) => esc(row[column])).join(',')) + .join('\n') return `${header}\n${body}` } @@ -72,7 +77,9 @@ function AiAnalysisPage() { const agentApi = window.electronAPI.agentApi const assistantApi = window.electronAPI.assistantApi const skillApi = window.electronAPI.skillApi + const [activeTab, setActiveTab] = useState('chat') + const [showDataPanel, setShowDataPanel] = useState(true) const [scopeMode, setScopeMode] = useState('global') const [scopeTarget, setScopeTarget] = useState('') const [conversations, setConversations] = useState([]) @@ -102,34 +109,39 @@ function AiAnalysisPage() { const [sqlPage, setSqlPage] = useState(1) const [sqlPageSize] = useState(50) - const [toolCatalog, setToolCatalog] = useState([]) - const [toolName, setToolName] = useState('') - const [toolArgsText, setToolArgsText] = useState('{}') - const [toolRunning, setToolRunning] = useState(false) - const [toolOutput, setToolOutput] = useState('') - - const sqlRunIdRef = useRef('') + const messageContainerRef = useRef(null) const sqlGeneratedRef = useRef('') - const messageEndRef = useRef(null) + const [showScrollBottom, setShowScrollBottom] = useState(false) - const activeRunId = useAiRuntimeStore((state) => state.activeRunId) const runtimeState = useAiRuntimeStore((state) => ( currentConversationId ? state.states[currentConversationId] : undefined )) + const activeRequestId = useAiRuntimeStore((state) => state.activeRequestId) const startRun = useAiRuntimeStore((state) => state.startRun) const appendChunk = useAiRuntimeStore((state) => state.appendChunk) - const finishRun = useAiRuntimeStore((state) => state.finishRun) + const completeRun = useAiRuntimeStore((state) => state.completeRun) const selectedAssistant = useMemo( () => assistants.find((assistant) => assistant.id === selectedAssistantId) || null, [assistants, selectedAssistantId] ) + const showThinkBlocks = useMemo(() => { + try { + const query = new URLSearchParams(window.location.search) + if (query.get('debugThink') === '1') return true + return window.localStorage.getItem('wf_ai_debug_think') === '1' + } catch { + return false + } + }, []) const slashSuggestions = useMemo(() => { const text = normalizeText(input) if (!text.startsWith('/')) return [] const key = text.slice(1).toLowerCase() - return skills.filter((skill) => !key || skill.id.includes(key) || skill.name.toLowerCase().includes(key)).slice(0, 8) + return skills + .filter((skill) => !key || skill.id.includes(key) || skill.name.toLowerCase().includes(key)) + .slice(0, 8) }, [input, skills]) const mentionSuggestions = useMemo(() => { @@ -137,7 +149,11 @@ function AiAnalysisPage() { if (!match) return [] const keyword = match[1].toLowerCase() return contacts - .filter((contact) => !keyword || contact.displayName.toLowerCase().includes(keyword) || contact.username.toLowerCase().includes(keyword)) + .filter((contact) => + !keyword || + contact.displayName.toLowerCase().includes(keyword) || + contact.username.toLowerCase().includes(keyword) + ) .slice(0, 8) }, [contacts, input]) @@ -177,7 +193,9 @@ function AiAnalysisPage() { } const list = res.conversations || [] setConversations(list) - if (!currentConversationId && list.length > 0) setCurrentConversationId(list[0].conversationId) + if (!currentConversationId && list.length > 0) { + setCurrentConversationId(list[0].conversationId) + } } finally { setLoadingConversations(false) } @@ -206,7 +224,11 @@ function AiAnalysisPage() { ]) setAssistants(assistantList || []) setSkills(skillList || []) - if (assistantList && assistantList.length > 0 && !assistantList.some((item) => item.id === selectedAssistantId)) { + if ( + assistantList && + assistantList.length > 0 && + !assistantList.some((item) => item.id === selectedAssistantId) + ) { setSelectedAssistantId(assistantList[0].id) } } catch (error) { @@ -221,7 +243,12 @@ function AiAnalysisPage() { const list = res.contacts .map((contact) => ({ username: normalizeText(contact.username), - displayName: normalizeText(contact.displayName || contact.remark || contact.nickname || contact.username) + displayName: normalizeText( + contact.displayName || + contact.remark || + contact.nickname || + contact.username + ) })) .filter((contact) => contact.username && contact.displayName) .slice(0, 300) @@ -231,18 +258,6 @@ function AiAnalysisPage() { } }, []) - const loadToolCatalog = useCallback(async () => { - try { - const catalog = await aiApi.getToolCatalog() - setToolCatalog(Array.isArray(catalog) ? catalog : []) - if (!toolName && Array.isArray(catalog) && catalog.length > 0) { - setToolName(catalog[0].name) - } - } catch (error) { - setErrorText(String((error as Error)?.message || error)) - } - }, [aiApi, toolName]) - const loadSchema = useCallback(async () => { const res = await window.electronAPI.chat.getSchema({}) if (!res.success || !res.schema) { @@ -268,52 +283,35 @@ function AiAnalysisPage() { }, [currentConversationId, loadMessages]) useEffect(() => { - if (activeTab === 'sql' && !sqlSchema) void loadSchema() - if (activeTab === 'tool' && toolCatalog.length === 0) void loadToolCatalog() - }, [activeTab, sqlSchema, loadSchema, toolCatalog.length, loadToolCatalog]) + if (activeTab === 'sql' && !sqlSchema) { + void loadSchema() + } + }, [activeTab, sqlSchema, loadSchema]) useEffect(() => { - const off = agentApi.onStream((chunk: AgentStreamChunk) => { - if (sqlRunIdRef.current && chunk.runId === sqlRunIdRef.current) { - if (chunk.type === 'content') { - setSqlGenerated((prev) => { - const next = `${prev}${chunk.content || ''}` - sqlGeneratedRef.current = next - return next - }) - } else if (chunk.type === 'done') { - setSqlGenerating(false) - if (normalizeText(sqlGeneratedRef.current)) { - setSqlHistory((prev) => [sqlGeneratedRef.current.trim(), ...prev].slice(0, 30)) - } - sqlRunIdRef.current = '' - } else if (chunk.type === 'error') { - setSqlGenerating(false) - setSqlError(chunk.error || 'SQL 生成失败') - sqlRunIdRef.current = '' - } - return - } - const conversationId = normalizeText(chunk.conversationId, currentConversationId) - if (!conversationId) return - appendChunk(conversationId, chunk) - if (chunk.type === 'done' || chunk.type === 'error' || chunk.isFinished) { - finishRun(conversationId) - void loadMessages(conversationId) - void loadConversations() - } - }) - return () => off() - }, [agentApi, appendChunk, currentConversationId, finishRun, loadConversations, loadMessages]) + const panel = messageContainerRef.current + if (!panel) return + const onScroll = () => { + const distance = panel.scrollHeight - panel.scrollTop - panel.clientHeight + setShowScrollBottom(distance > 64) + } + panel.addEventListener('scroll', onScroll) + onScroll() + return () => panel.removeEventListener('scroll', onScroll) + }, [messageContainerRef.current]) useEffect(() => { - messageEndRef.current?.scrollIntoView({ behavior: 'smooth', block: 'end' }) - }, [messages, runtimeState?.draft, runtimeState?.chunks.length]) + const panel = messageContainerRef.current + if (!panel) return + panel.scrollTo({ top: panel.scrollHeight, behavior: 'smooth' }) + }, [messages, runtimeState?.blocks.length, runtimeState?.draft]) const ensureConversation = useCallback(async (): Promise => { if (currentConversationId) return currentConversationId const created = await aiApi.createConversation({ title: '新的 AI 对话' }) - if (!created.success || !created.conversationId) throw new Error(created.error || '创建会话失败') + if (!created.success || !created.conversationId) { + throw new Error(created.error || '创建会话失败') + } setCurrentConversationId(created.conversationId) await loadConversations() return created.conversationId @@ -361,8 +359,10 @@ function AiAnalysisPage() { const handleSend = async () => { const text = normalizeText(input) if (!text) return + setErrorText('') const conversationId = await ensureConversation() + setMessages((prev) => ([ ...prev, { @@ -377,25 +377,37 @@ function AiAnalysisPage() { } ])) setInput('') - const run = await agentApi.runStream({ + + const run = agentApi.runStream({ mode: 'chat', conversationId, userInput: text, assistantId: selectedAssistantId, activeSkillId: selectedSkillId || undefined, chatScope: scopeMode === 'session' ? 'private' : 'private' + }, (chunk) => { + appendChunk(conversationId, chunk) }) - if (!run.success || !run.runId) { - setErrorText('启动失败') - return + + startRun(conversationId, run.requestId) + const result = await run.promise + completeRun(conversationId, result.result || { error: result.error, canceled: false }) + + if (!result.success && !result.result?.canceled) { + setErrorText(result.error || '执行失败') } - startRun(conversationId, run.runId) + + await loadMessages(conversationId) + await loadConversations() } const handleStop = async () => { if (!currentConversationId) return - await agentApi.abort({ runId: activeRunId || undefined, conversationId: currentConversationId }) - finishRun(currentConversationId) + const requestId = runtimeState?.requestId || activeRequestId + if (!requestId) return + setErrorText('') + await agentApi.abort(requestId) + completeRun(currentConversationId, { canceled: true }) } const handleExportConversation = async () => { @@ -409,11 +421,6 @@ function AiAnalysisPage() { window.alert('会话 Markdown 已复制到剪贴板') } - const handleOpenLog = async () => { - const logPath = await window.electronAPI.log.getPath() - await window.electronAPI.shell.openPath(logPath) - } - const handleGenerateSql = async () => { const prompt = normalizeText(sqlPrompt) if (!prompt) return @@ -421,21 +428,35 @@ function AiAnalysisPage() { setSqlGenerated('') sqlGeneratedRef.current = '' setSqlError('') + const target = extractSqlTarget(sqlSchema, sqlTargetKey) - const run = await agentApi.runStream({ + const run = agentApi.runStream({ mode: 'sql', userInput: prompt, sqlContext: { schemaText: sqlSchemaText, targetHint: target ? `${target.kind}:${target.path || ''}` : '' } + }, (chunk) => { + if (chunk.type === 'content') { + setSqlGenerated((prev) => { + const next = `${prev}${chunk.content || ''}` + sqlGeneratedRef.current = next + return next + }) + } }) - if (!run.success || !run.runId) { - setSqlGenerating(false) - setSqlError('SQL 生成失败') + + const result = await run.promise + setSqlGenerating(false) + if (!result.success) { + setSqlError(result.error || 'SQL 生成失败') return } - sqlRunIdRef.current = run.runId + + if (normalizeText(sqlGeneratedRef.current)) { + setSqlHistory((prev) => [sqlGeneratedRef.current.trim(), ...prev].slice(0, 30)) + } } const handleExecuteSql = async () => { @@ -478,54 +499,32 @@ function AiAnalysisPage() { URL.revokeObjectURL(url) } - const handleRunTool = async () => { - setToolRunning(true) - try { - const args = JSON.parse(toolArgsText || '{}') - const result = await aiApi.executeTool({ name: toolName, args }) - setToolOutput(JSON.stringify(result, null, 2)) - } catch (error) { - setToolOutput(String((error as Error)?.message || error)) - } finally { - setToolRunning(false) - } - } - - const groupedTools = useMemo(() => ({ - core: toolCatalog.filter((item) => item.category === 'core'), - analysis: toolCatalog.filter((item) => item.category === 'analysis') - }), [toolCatalog]) - return ( -
-
-
+
+
+

AI Analysis

- Chat Explorer + SQL Lab + Tool Test + ChatLab 交互同构模式
-
+
-
{activeTab === 'chat' && ( -
- -
+
-
+