超级无敌帅气到爆炸起飞的更新

This commit is contained in:
cc
2026-02-03 21:45:17 +08:00
parent 0b308803bf
commit 79648cd9d5
18 changed files with 5938 additions and 394 deletions

View File

@@ -22,6 +22,7 @@ import SnsPage from './pages/SnsPage'
import ContactsPage from './pages/ContactsPage'
import ChatHistoryPage from './pages/ChatHistoryPage'
import NotificationWindow from './pages/NotificationWindow'
import AIChatPage from './pages/AIChatPage'
import { useAppStore } from './stores/appStore'
import { themes, useThemeStore, type ThemeId } from './stores/themeStore'
@@ -429,6 +430,7 @@ function App() {
<Route path="/" element={<HomePage />} />
<Route path="/home" element={<HomePage />} />
<Route path="/chat" element={<ChatPage />} />
<Route path="/ai-chat" element={<AIChatPage />} />
<Route path="/analytics" element={<AnalyticsWelcomePage />} />
<Route path="/analytics/view" element={<AnalyticsPage />} />
<Route path="/group-analytics" element={<GroupAnalyticsPage />} />

View File

@@ -0,0 +1,36 @@
import React from 'react'
import { Bot, User } from 'lucide-react'
interface ChatMessage {
id: string;
role: 'user' | 'ai';
content: string;
timestamp: number;
}
interface MessageBubbleProps {
message: ChatMessage;
}
/**
* 优化后的消息气泡组件
* 使用 React.memo 避免不必要的重新渲染
*/
export const MessageBubble = React.memo<MessageBubbleProps>(({ message }) => {
return (
<div className={`message-row ${message.role}`}>
<div className="avatar">
{message.role === 'ai' ? <Bot size={24} /> : <User size={24} />}
</div>
<div className="bubble">
<div className="content">{message.content}</div>
</div>
</div>
)
}, (prevProps, nextProps) => {
// 自定义比较函数只有内容或ID变化时才重新渲染
return prevProps.message.content === nextProps.message.content &&
prevProps.message.id === nextProps.message.id
})
MessageBubble.displayName = 'MessageBubble'

View File

@@ -1,6 +1,6 @@
import { useState, useEffect } from 'react'
import { NavLink, useLocation } from 'react-router-dom'
import { Home, MessageSquare, BarChart3, Users, FileText, Database, Settings, ChevronLeft, ChevronRight, Download, Bot, Aperture, UserCircle, Lock } from 'lucide-react'
import { Home, MessageSquare, BarChart3, Users, FileText, Database, Settings, ChevronLeft, ChevronRight, Download, Aperture, UserCircle, Lock } from 'lucide-react'
import { useAppStore } from '../stores/appStore'
import * as configService from '../services/config'
import './Sidebar.scss'

552
src/pages/AIChatPage.scss Normal file
View File

@@ -0,0 +1,552 @@
// AI 对话页面 - 简约大气风格
.ai-chat-page {
display: flex;
height: 100%;
width: 100%;
background: var(--bg-gradient);
color: var(--text-primary);
overflow: hidden;
.chat-container {
flex: 1;
display: flex;
flex-direction: column;
max-width: 1200px;
margin: 0 auto;
width: 100%;
}
// ========== 顶部 Header - 已移除 ==========
// 模型选择器现已集成到输入框
// ========== 聊天区域 ==========
.chat-main {
flex: 1;
display: flex;
flex-direction: column;
background: var(--bg-secondary);
position: relative;
overflow: hidden;
// 空状态
.empty-state {
flex: 1;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
padding: 40px;
.icon {
width: 80px;
height: 80px;
border-radius: 50%;
background: var(--primary-light);
display: flex;
align-items: center;
justify-content: center;
margin-bottom: 24px;
svg {
width: 40px;
height: 40px;
color: var(--primary);
}
}
h2 {
font-size: 20px;
font-weight: 600;
color: var(--text-primary);
margin: 0 0 8px;
}
p {
font-size: 14px;
color: var(--text-tertiary);
margin: 0;
}
}
// 消息列表
.messages-list {
flex: 1;
overflow-y: auto;
padding: 24px 32px;
display: flex;
flex-direction: column;
gap: 20px;
&::-webkit-scrollbar {
width: 6px;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--border-color);
border-radius: 3px;
}
.message-row {
display: flex;
gap: 12px;
max-width: 80%;
animation: messageIn 0.3s ease-out;
// 用户消息
&.user {
align-self: flex-end;
flex-direction: row-reverse;
.avatar {
background: var(--primary-light);
color: var(--primary);
}
.bubble {
background: var(--primary-gradient);
color: white;
border-radius: 18px 18px 4px 18px;
box-shadow: 0 2px 10px color-mix(in srgb, var(--primary) 20%, transparent);
.content {
color: white;
}
}
}
// AI 消息
&.ai {
align-self: flex-start;
.avatar {
background: var(--bg-tertiary);
color: var(--text-secondary);
}
.bubble {
background: var(--card-bg);
border: 1px solid var(--border-color);
border-radius: 18px 18px 18px 4px;
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
}
}
.avatar {
flex-shrink: 0;
width: 32px;
height: 32px;
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
}
.bubble {
padding: 12px 16px;
flex: 1;
min-width: 0;
.content,
.markdown-content {
font-size: 14px;
line-height: 1.6;
color: var(--text-primary);
word-wrap: break-word;
overflow-wrap: break-word;
}
// Markdown 样式
.markdown-content {
p {
margin: 0 0 0.8em;
&:last-child {
margin-bottom: 0;
}
}
h1,
h2,
h3,
h4,
h5,
h6 {
margin: 1em 0 0.5em;
font-weight: 600;
line-height: 1.3;
color: var(--text-primary);
&:first-child {
margin-top: 0;
}
}
h1 {
font-size: 1.5em;
}
h2 {
font-size: 1.3em;
}
h3 {
font-size: 1.1em;
}
ul,
ol {
margin: 0.5em 0;
padding-left: 1.5em;
}
li {
margin: 0.3em 0;
}
code {
background: var(--bg-tertiary);
padding: 2px 6px;
border-radius: 4px;
font-family: 'Consolas', 'Monaco', monospace;
font-size: 0.9em;
}
pre {
background: var(--bg-tertiary);
padding: 12px;
border-radius: 8px;
overflow-x: auto;
margin: 0.8em 0;
code {
background: none;
padding: 0;
}
}
blockquote {
border-left: 3px solid var(--primary);
padding-left: 12px;
margin: 0.8em 0;
color: var(--text-secondary);
}
a {
color: var(--primary);
text-decoration: none;
&:hover {
text-decoration: underline;
}
}
strong {
font-weight: 600;
color: var(--text-primary);
}
hr {
border: none;
border-top: 1px solid var(--border-color);
margin: 1em 0;
}
table {
border-collapse: collapse;
width: 100%;
margin: 0.8em 0;
th,
td {
border: 1px solid var(--border-color);
padding: 8px 12px;
text-align: left;
}
th {
background: var(--bg-tertiary);
font-weight: 600;
}
}
}
}
}
.list-spacer {
height: 100px;
flex-shrink: 0;
}
}
// 输入区域
.input-area {
position: absolute;
bottom: 24px;
left: 50%;
transform: translateX(-50%);
width: calc(100% - 64px);
max-width: 800px;
z-index: 10;
.input-wrapper {
display: flex;
align-items: flex-end;
gap: 10px;
background: var(--card-bg);
backdrop-filter: blur(20px);
-webkit-backdrop-filter: blur(20px);
border: 1px solid var(--border-color);
border-radius: 20px;
padding: 10px 14px;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.08);
transition: all 0.2s ease;
&:focus-within {
border-color: var(--primary);
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1),
0 0 0 3px color-mix(in srgb, var(--primary) 15%, transparent);
}
textarea {
flex: 1;
min-height: 24px;
max-height: 120px;
padding: 8px 0;
background: transparent;
border: none;
resize: none;
color: var(--text-primary);
font-size: 14px;
font-family: inherit;
line-height: 1.5;
&:focus {
outline: none;
}
&::placeholder {
color: var(--text-tertiary);
}
&:disabled {
cursor: not-allowed;
}
}
.input-actions {
display: flex;
align-items: center;
gap: 8px;
flex-shrink: 0;
// 模型选择器
.model-selector {
position: relative;
.model-btn {
display: flex;
align-items: center;
justify-content: center;
gap: 6px;
width: auto;
height: 36px;
padding: 6px 12px;
background: transparent;
border: 1px solid var(--border-color);
border-radius: 10px;
cursor: pointer;
color: var(--text-secondary);
font-size: 12px;
font-weight: 500;
white-space: nowrap;
transition: all 0.2s ease;
flex-shrink: 0;
svg {
flex-shrink: 0;
&.spin {
animation: spin 1s linear infinite;
}
}
&:hover:not(:disabled) {
background: var(--bg-hover);
border-color: var(--text-tertiary);
color: var(--text-primary);
}
&.loaded {
background: color-mix(in srgb, var(--primary) 15%, transparent);
border-color: var(--primary);
color: var(--primary);
}
&.loading {
opacity: 0.7;
}
&.disabled {
opacity: 0.5;
cursor: not-allowed;
}
}
.model-dropdown {
position: absolute;
bottom: 100%;
right: 0;
margin-bottom: 8px;
background: var(--card-bg);
backdrop-filter: blur(20px);
-webkit-backdrop-filter: blur(20px);
border: 1px solid var(--border-color);
border-radius: 12px;
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.12);
z-index: 100;
overflow: hidden;
animation: dropdownIn 0.2s ease-out;
min-width: 140px;
.model-option {
display: flex;
align-items: center;
justify-content: space-between;
padding: 10px 14px;
cursor: pointer;
font-size: 13px;
color: var(--text-primary);
transition: background 0.15s ease;
white-space: nowrap;
&:hover:not(.disabled) {
background: var(--bg-hover);
}
&.active {
background: color-mix(in srgb, var(--primary) 20%, transparent);
color: var(--primary);
font-weight: 600;
.check {
color: var(--primary);
}
}
.check {
margin-left: 8px;
color: var(--text-tertiary);
font-weight: 600;
}
}
}
}
.mode-toggle {
width: 36px;
height: 36px;
display: flex;
align-items: center;
justify-content: center;
background: transparent;
border: 1px solid var(--border-color);
border-radius: 10px;
cursor: pointer;
color: var(--text-tertiary);
transition: all 0.2s ease;
flex-shrink: 0;
&:hover:not(:disabled) {
background: var(--bg-hover);
color: var(--text-primary);
}
&.active {
background: color-mix(in srgb, var(--primary) 15%, transparent);
border-color: var(--primary);
color: var(--primary);
}
&:disabled {
opacity: 0.4;
cursor: not-allowed;
}
}
.send-btn {
width: 36px;
height: 36px;
display: flex;
align-items: center;
justify-content: center;
background: var(--primary-gradient);
border: none;
border-radius: 10px;
cursor: pointer;
color: white;
transition: all 0.2s ease;
flex-shrink: 0;
box-shadow: 0 2px 8px color-mix(in srgb, var(--primary) 25%, transparent);
&:hover:not(:disabled) {
transform: scale(1.05);
box-shadow: 0 4px 12px color-mix(in srgb, var(--primary) 35%, transparent);
}
&:active:not(:disabled) {
transform: scale(0.98);
}
&:disabled {
background: var(--bg-tertiary);
color: var(--text-tertiary);
box-shadow: none;
cursor: not-allowed;
}
}
}
}
}
}
}
@keyframes messageIn {
from {
opacity: 0;
transform: translateY(8px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
@keyframes dropdownIn {
from {
opacity: 0;
transform: translateY(-8px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
@keyframes spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}

391
src/pages/AIChatPage.tsx Normal file
View File

@@ -0,0 +1,391 @@
import { useState, useEffect, useRef, useCallback } from 'react'
import { Send, Bot, User, Cpu, ChevronDown, Loader2 } from 'lucide-react'
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso'
import { engineService, PRESET_MODELS, ModelInfo } from '../services/EngineService'
import { MessageBubble } from '../components/MessageBubble'
import './AIChatPage.scss'
interface ChatMessage {
id: string;
role: 'user' | 'ai';
content: string;
timestamp: number;
}
// 消息数量限制,避免内存过载
const MAX_MESSAGES = 200
export default function AIChatPage() {
const [input, setInput] = useState('')
const [messages, setMessages] = useState<ChatMessage[]>([])
const [isTyping, setIsTyping] = useState(false)
const [models, setModels] = useState<ModelInfo[]>([...PRESET_MODELS])
const [selectedModel, setSelectedModel] = useState<string | null>(null)
const [modelLoaded, setModelLoaded] = useState(false)
const [loadingModel, setLoadingModel] = useState(false)
const [isThinkingMode, setIsThinkingMode] = useState(true)
const [showModelDropdown, setShowModelDropdown] = useState(false)
const textareaRef = useRef<HTMLTextAreaElement>(null)
const virtuosoRef = useRef<VirtuosoHandle>(null)
const dropdownRef = useRef<HTMLDivElement>(null)
// 流式渲染优化:使用 ref 缓存内容,使用 RAF 批量更新
const streamingContentRef = useRef('')
const streamingMessageIdRef = useRef<string | null>(null)
const rafIdRef = useRef<number | null>(null)
useEffect(() => {
checkModelsStatus()
// 初始化Llama服务延迟初始化用户进入此页面时启动
const initLlama = async () => {
try {
await window.electronAPI.llama?.init()
console.log('[AIChatPage] Llama service initialized')
} catch (e) {
console.error('[AIChatPage] Failed to initialize Llama:', e)
}
}
initLlama()
// 清理函数:组件卸载时释放所有资源
return () => {
// 取消未完成的 RAF
if (rafIdRef.current !== null) {
cancelAnimationFrame(rafIdRef.current)
rafIdRef.current = null
}
// 清理 engine service 的回调引用
engineService.clearCallbacks()
}
}, [])
// 监听页面卸载事件,确保资源释放
useEffect(() => {
const handleBeforeUnload = () => {
// 清理回调和监听器
engineService.dispose()
}
window.addEventListener('beforeunload', handleBeforeUnload)
return () => window.removeEventListener('beforeunload', handleBeforeUnload)
}, [])
// 点击外部关闭下拉框
useEffect(() => {
const handleClickOutside = (event: MouseEvent) => {
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
setShowModelDropdown(false)
}
}
document.addEventListener('mousedown', handleClickOutside)
return () => document.removeEventListener('mousedown', handleClickOutside)
}, [])
const scrollToBottom = useCallback(() => {
// 使用 virtuoso 的 scrollToIndex 方法滚动到底部
if (virtuosoRef.current && messages.length > 0) {
virtuosoRef.current.scrollToIndex({
index: messages.length - 1,
behavior: 'smooth'
})
}
}, [messages.length])
const checkModelsStatus = async () => {
const updatedModels = await Promise.all(models.map(async (m) => {
const exists = await engineService.checkModelExists(m.path)
return { ...m, downloaded: exists }
}))
setModels(updatedModels)
// Auto-select first available model
if (!selectedModel) {
const available = updatedModels.find(m => m.downloaded)
if (available) {
setSelectedModel(available.path)
}
}
}
// 自动加载模型
const handleLoadModel = async (modelPath?: string) => {
const pathToLoad = modelPath || selectedModel
if (!pathToLoad) return false
setLoadingModel(true)
try {
await engineService.loadModel(pathToLoad)
// Initialize session with system prompt
await engineService.createSession("You are a helpful AI assistant.")
setModelLoaded(true)
return true
} catch (e) {
console.error("Load failed", e)
alert("模型加载失败: " + String(e))
return false
} finally {
setLoadingModel(false)
}
}
// 选择模型(如果有多个)
const handleSelectModel = (modelPath: string) => {
setSelectedModel(modelPath)
setShowModelDropdown(false)
}
// 获取可用的已下载模型
const availableModels = models.filter(m => m.downloaded)
const selectedModelInfo = models.find(m => m.path === selectedModel)
// 优化的流式更新函数:使用 RAF 批量更新
const updateStreamingMessage = useCallback(() => {
if (!streamingMessageIdRef.current) return
setMessages(prev => prev.map(msg =>
msg.id === streamingMessageIdRef.current
? { ...msg, content: streamingContentRef.current }
: msg
))
rafIdRef.current = null
}, [])
// Token 回调:使用 RAF 批量更新 UI
const handleToken = useCallback((token: string) => {
streamingContentRef.current += token
// 使用 requestAnimationFrame 批量更新,避免频繁渲染
if (rafIdRef.current === null) {
rafIdRef.current = requestAnimationFrame(updateStreamingMessage)
}
}, [updateStreamingMessage])
const handleSend = async () => {
if (!input.trim() || isTyping) return
// 如果模型未加载,先自动加载
if (!modelLoaded) {
if (!selectedModel) {
alert("请先下载模型(设置页面)")
return
}
const loaded = await handleLoadModel()
if (!loaded) return
}
const userMsg: ChatMessage = {
id: Date.now().toString(),
role: 'user',
content: input,
timestamp: Date.now()
}
setMessages(prev => {
const newMessages = [...prev, userMsg]
// 限制消息数量,避免内存过载
return newMessages.length > MAX_MESSAGES
? newMessages.slice(-MAX_MESSAGES)
: newMessages
})
setInput('')
setIsTyping(true)
// Reset textarea height
if (textareaRef.current) {
textareaRef.current.style.height = 'auto'
}
const aiMsgId = (Date.now() + 1).toString()
streamingContentRef.current = ''
streamingMessageIdRef.current = aiMsgId
// Optimistic update for AI message start
setMessages(prev => {
const newMessages = [...prev, {
id: aiMsgId,
role: 'ai' as const,
content: '',
timestamp: Date.now()
}]
return newMessages.length > MAX_MESSAGES
? newMessages.slice(-MAX_MESSAGES)
: newMessages
})
// Append thinking command based on mode
const msgWithSuffix = input + (isThinkingMode ? " /think" : " /no_think")
try {
await engineService.chat(msgWithSuffix, handleToken, { thinking: isThinkingMode })
} catch (e) {
console.error("Chat failed", e)
setMessages(prev => [...prev, {
id: Date.now().toString(),
role: 'ai',
content: "❌ Error: Failed to get response from AI.",
timestamp: Date.now()
}])
} finally {
setIsTyping(false)
streamingMessageIdRef.current = null
// 确保最终状态同步
if (rafIdRef.current !== null) {
cancelAnimationFrame(rafIdRef.current)
updateStreamingMessage()
}
}
}
// 渲染模型选择按钮(集成在输入框作为下拉项)
const renderModelSelector = () => {
// 没有可用模型
if (availableModels.length === 0) {
return (
<button
className="model-btn disabled"
title="请先在设置页面下载模型"
>
<Bot size={16} />
<span></span>
</button>
)
}
// 只有一个模型,直接显示
if (availableModels.length === 1) {
return (
<button
className={`model-btn ${modelLoaded ? 'loaded' : ''} ${loadingModel ? 'loading' : ''}`}
title={modelLoaded ? "模型已就绪" : "发送消息时自动加载"}
>
{loadingModel ? (
<Loader2 size={16} className="spin" />
) : (
<Bot size={16} />
)}
<span>{loadingModel ? '加载中' : selectedModelInfo?.name || '模型'}</span>
</button>
)
}
// 多个模型,显示下拉选择
return (
<div className="model-selector" ref={dropdownRef}>
<button
className={`model-btn ${modelLoaded ? 'loaded' : ''} ${loadingModel ? 'loading' : ''}`}
onClick={() => !loadingModel && setShowModelDropdown(!showModelDropdown)}
title="点击选择模型"
>
{loadingModel ? (
<Loader2 size={16} className="spin" />
) : (
<Bot size={16} />
)}
<span>{loadingModel ? '加载中' : selectedModelInfo?.name || '选择模型'}</span>
<ChevronDown size={13} className={showModelDropdown ? 'rotate' : ''} />
</button>
{showModelDropdown && (
<div className="model-dropdown">
{availableModels.map(model => (
<div
key={model.path}
className={`model-option ${selectedModel === model.path ? 'active' : ''}`}
onClick={() => handleSelectModel(model.path)}
>
<span>{model.name}</span>
{selectedModel === model.path && (
<span className="check"></span>
)}
</div>
))}
</div>
)}
</div>
)
}
return (
<div className="ai-chat-page">
<div className="chat-main">
{messages.length === 0 ? (
<div className="empty-state">
<div className="icon">
<Bot size={40} />
</div>
<h2>AI </h2>
<p>
{availableModels.length === 0
? "请先在设置页面下载模型"
: "输入消息开始对话,模型将自动加载"
}
</p>
</div>
) : (
<Virtuoso
ref={virtuosoRef}
data={messages}
className="messages-list"
initialTopMostItemIndex={messages.length - 1}
followOutput="smooth"
itemContent={(index, message) => (
<MessageBubble key={message.id} message={message} />
)}
components={{
Footer: () => <div className="list-spacer" />
}}
/>
)}
<div className="input-area">
<div className="input-wrapper">
<textarea
ref={textareaRef}
value={input}
onChange={e => {
setInput(e.target.value)
e.target.style.height = 'auto'
e.target.style.height = `${Math.min(e.target.scrollHeight, 120)}px`
}}
onKeyDown={e => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault()
handleSend()
// Reset height after send
if (textareaRef.current) textareaRef.current.style.height = 'auto'
}
}}
placeholder={availableModels.length === 0 ? "请先下载模型..." : "输入消息..."}
disabled={availableModels.length === 0 || loadingModel}
rows={1}
/>
<div className="input-actions">
{renderModelSelector()}
<button
className={`mode-toggle ${isThinkingMode ? 'active' : ''}`}
onClick={() => setIsThinkingMode(!isThinkingMode)}
title={isThinkingMode ? "深度思考模式已开启" : "深度思考模式已关闭"}
disabled={availableModels.length === 0}
>
<Cpu size={18} />
</button>
<button
className="send-btn"
onClick={handleSend}
disabled={!input.trim() || availableModels.length === 0 || isTyping || loadingModel}
>
<Send size={18} />
</button>
</div>
</div>
</div>
</div>
</div>
)
}

View File

@@ -651,14 +651,80 @@
align-items: center;
justify-content: space-between;
gap: 12px;
margin-top: 6px;
margin-top: 10px;
padding: 12px 16px;
background: var(--bg-primary);
border-radius: 12px;
border: 1px solid var(--border-color);
transition: all 0.2s;
&:hover {
border-color: var(--text-tertiary);
background: color-mix(in srgb, var(--bg-primary) 98%, var(--primary));
}
}
.log-status {
font-size: 13px;
font-weight: 500;
color: var(--text-secondary);
}
/* Premium Switch Style */
.switch {
position: relative;
display: inline-block;
width: 44px;
height: 24px;
flex-shrink: 0;
input {
opacity: 0;
width: 0;
height: 0;
&:checked+.switch-slider {
background-color: var(--primary);
box-shadow: 0 0 8px color-mix(in srgb, var(--primary) 30%, transparent);
&::before {
transform: translateX(18px);
background-color: white;
}
}
&:focus+.switch-slider {
box-shadow: 0 0 1px var(--primary);
}
}
.switch-slider {
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: var(--bg-tertiary);
transition: .4s cubic-bezier(0.4, 0, 0.2, 1);
border-radius: 24px;
border: 1px solid var(--border-color);
&::before {
position: absolute;
content: "";
height: 18px;
width: 18px;
left: 2px;
bottom: 2px;
background-color: var(--text-tertiary);
transition: .4s cubic-bezier(0.4, 0, 0.2, 1);
border-radius: 50%;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
}
}
.language-checkboxes {
display: flex;
flex-wrap: wrap;
@@ -1555,4 +1621,238 @@
min-height: 100px;
font-size: 13px;
color: var(--text-tertiary);
// Add styles for the new model cards
}
.setting-control.vertical.has-border {
border: 1px solid var(--border-color);
border-radius: 12px;
padding: 16px;
background: var(--bg-primary);
}
.model-status-card {
display: flex;
justify-content: space-between;
align-items: center;
gap: 16px;
}
.model-info {
flex: 1;
min-width: 0;
.model-name {
font-size: 15px;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 6px;
}
.model-path {
display: flex;
flex-direction: column;
gap: 4px;
.status-indicator {
display: inline-flex;
align-items: center;
gap: 4px;
font-size: 12px;
font-weight: 500;
&.success {
color: #10b981;
}
&.warning {
color: #f59e0b;
}
}
.path-text {
font-size: 12px;
color: var(--text-tertiary);
font-family: monospace;
word-break: break-all;
}
}
}
.model-actions {
flex-shrink: 0;
.btn-download {
display: inline-flex;
align-items: center;
gap: 8px;
padding: 8px 16px;
background: var(--primary);
color: white;
border: none;
border-radius: 999px;
font-size: 13px;
font-weight: 600;
cursor: pointer;
transition: all 0.2s cubic-bezier(0.4, 0, 0.2, 1);
box-shadow: 0 4px 12px color-mix(in srgb, var(--primary) 25%, transparent);
&:hover {
background: var(--primary-hover);
transform: translateY(-1px);
box-shadow: 0 6px 16px color-mix(in srgb, var(--primary) 35%, transparent);
}
&:active {
transform: translateY(0);
}
svg {
flex-shrink: 0;
}
}
.download-status {
display: flex;
flex-direction: column;
gap: 6px;
width: 280px;
.status-header,
.progress-info {
// specific layout class
display: flex;
justify-content: space-between;
align-items: center; // Align vertically
width: 100%;
}
.percent {
font-size: 14px;
font-weight: 700;
color: var(--primary);
font-family: 'Inter', system-ui, sans-serif;
}
.metrics,
.details {
display: flex;
align-items: center;
gap: 6px;
font-size: 12px;
color: var(--text-secondary);
font-family: var(--font-mono);
.speed {
color: var(--text-primary);
font-weight: 600;
opacity: 0.8;
}
}
}
.progress-bar-mini {
width: 100%;
height: 6px;
background: var(--bg-tertiary);
border-radius: 3px;
overflow: hidden;
border: 1px solid var(--border-color);
.fill {
height: 100%;
background: linear-gradient(90deg, var(--primary) 0%, color-mix(in srgb, var(--primary) 80%, white) 100%);
border-radius: 3px;
transition: width 0.3s ease;
position: relative;
&::after {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: linear-gradient(90deg,
transparent,
rgba(255, 255, 255, 0.2),
transparent);
animation: progress-shimmer 2s infinite;
}
}
}
.spin {
animation: spin 1s linear infinite;
}
}
@keyframes progress-shimmer {
0% {
transform: translateX(-100%);
}
100% {
transform: translateX(100%);
}
}
.sub-setting {
margin-top: 16px;
padding-top: 16px;
border-top: 1px solid var(--border-color);
.sub-label {
font-size: 13px;
color: var(--text-secondary);
margin-bottom: 8px;
}
}
.path-selector {
display: flex;
gap: 8px;
input {
margin-bottom: 0 !important;
flex: 1;
font-family: monospace;
font-size: 12px;
}
.btn-icon {
width: 38px;
height: 38px;
display: flex;
align-items: center;
justify-content: center;
border-radius: 999px; // Circle
border: 1px solid var(--border-color);
background: var(--bg-primary);
color: var(--text-secondary);
cursor: pointer;
transition: all 0.2s;
&:hover {
color: var(--text-primary);
background: var(--bg-tertiary);
}
&.danger:hover {
color: var(--danger);
background: rgba(220, 38, 38, 0.1);
border-color: rgba(220, 38, 38, 0.2);
}
}
}
@keyframes spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}

View File

@@ -14,13 +14,13 @@ import {
import { Avatar } from '../components/Avatar'
import './SettingsPage.scss'
type SettingsTab = 'appearance' | 'notification' | 'database' | 'whisper' | 'export' | 'cache' | 'security' | 'about'
type SettingsTab = 'appearance' | 'notification' | 'database' | 'models' | 'export' | 'cache' | 'security' | 'about'
const tabs: { id: SettingsTab; label: string; icon: React.ElementType }[] = [
{ id: 'appearance', label: '外观', icon: Palette },
{ id: 'notification', label: '通知', icon: Bell },
{ id: 'database', label: '数据库连接', icon: Database },
{ id: 'whisper', label: '语音识别模型', icon: Mic },
{ id: 'models', label: '模型管理', icon: Mic },
{ id: 'export', label: '导出', icon: Download },
{ id: 'cache', label: '缓存', icon: HardDrive },
{ id: 'security', label: '安全', icon: ShieldCheck },
@@ -76,7 +76,21 @@ function SettingsPage() {
const [whisperModelDir, setWhisperModelDir] = useState('')
const [isWhisperDownloading, setIsWhisperDownloading] = useState(false)
const [whisperDownloadProgress, setWhisperDownloadProgress] = useState(0)
const [whisperProgressData, setWhisperProgressData] = useState<{ downloaded: number; total: number; speed: number }>({ downloaded: 0, total: 0, speed: 0 })
const [whisperModelStatus, setWhisperModelStatus] = useState<{ exists: boolean; modelPath?: string; tokensPath?: string } | null>(null)
const [llamaModelStatus, setLlamaModelStatus] = useState<{ exists: boolean; path?: string; size?: number } | null>(null)
const [isLlamaDownloading, setIsLlamaDownloading] = useState(false)
const [llamaDownloadProgress, setLlamaDownloadProgress] = useState(0)
const [llamaProgressData, setLlamaProgressData] = useState<{ downloaded: number; total: number; speed: number }>({ downloaded: 0, total: 0, speed: 0 })
const formatBytes = (bytes: number) => {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
};
const [autoTranscribeVoice, setAutoTranscribeVoice] = useState(false)
const [transcribeLanguages, setTranscribeLanguages] = useState<string[]>(['zh'])
const [exportDefaultFormat, setExportDefaultFormat] = useState('excel')
@@ -273,6 +287,9 @@ function SettingsPage() {
if (savedWhisperModelDir) setWhisperModelDir(savedWhisperModelDir)
// Load Llama status after config
void checkLlamaModelStatus()
} catch (e: any) {
console.error('加载配置失败:', e)
}
@@ -313,7 +330,12 @@ function SettingsPage() {
}, [])
useEffect(() => {
const removeListener = window.electronAPI.whisper?.onDownloadProgress?.((payload: { modelName: string; downloadedBytes: number; totalBytes?: number; percent?: number }) => {
const removeListener = window.electronAPI.whisper?.onDownloadProgress?.((payload: { modelName: string; downloadedBytes: number; totalBytes?: number; percent?: number; speed?: number }) => {
setWhisperProgressData({
downloaded: payload.downloadedBytes,
total: payload.totalBytes || 0,
speed: payload.speed || 0
})
if (typeof payload.percent === 'number') {
setWhisperDownloadProgress(payload.percent)
}
@@ -582,6 +604,7 @@ function SettingsPage() {
setWhisperModelDir(dir)
await configService.setWhisperModelDir(dir)
showMessage('已选择 Whisper 模型目录', true)
await checkLlamaModelStatus()
}
} catch (e: any) {
showMessage('选择目录失败', false)
@@ -617,6 +640,68 @@ function SettingsPage() {
const handleResetWhisperModelDir = async () => {
setWhisperModelDir('')
await configService.setWhisperModelDir('')
await checkLlamaModelStatus()
}
const checkLlamaModelStatus = async () => {
try {
// @ts-ignore
const modelsPath = await window.electronAPI.llama?.getModelsPath()
if (!modelsPath) return
const modelName = "Qwen3-4B-Q4_K_M.gguf" // Hardcoded preset for now
const fullPath = `${modelsPath}\\${modelName}`
// @ts-ignore
const status = await window.electronAPI.llama?.getModelStatus(fullPath)
if (status) {
setLlamaModelStatus({
exists: status.exists,
path: status.path,
size: status.size
})
}
} catch (e) {
console.error("Check llama model status failed", e)
}
}
useEffect(() => {
const handleLlamaProgress = (payload: { downloaded: number; total: number; speed: number }) => {
setLlamaProgressData(payload)
if (payload.total > 0) {
setLlamaDownloadProgress((payload.downloaded / payload.total) * 100)
}
}
// @ts-ignore
const removeListener = window.electronAPI.llama?.onDownloadProgress(handleLlamaProgress)
return () => {
if (typeof removeListener === 'function') removeListener()
}
}, [])
const handleDownloadLlamaModel = async () => {
if (isLlamaDownloading) return
setIsLlamaDownloading(true)
setLlamaDownloadProgress(0)
try {
const modelUrl = "https://www.modelscope.cn/models/Qwen/Qwen3-4B-GGUF/resolve/master/Qwen3-4B-Q4_K_M.gguf"
// @ts-ignore
const modelsPath = await window.electronAPI.llama?.getModelsPath()
const modelName = "Qwen3-4B-Q4_K_M.gguf"
const fullPath = `${modelsPath}\\${modelName}`
// @ts-ignore
const result = await window.electronAPI.llama?.downloadModel(modelUrl, fullPath)
if (result?.success) {
showMessage('Qwen3 模型下载完成', true)
await checkLlamaModelStatus()
} else {
showMessage(`模型下载失败: ${result?.error || '未知错误'}`, false)
}
} catch (e: any) {
showMessage(`模型下载失败: ${e}`, false)
} finally {
setIsLlamaDownloading(false)
}
}
const handleAutoGetDbKey = async () => {
@@ -1309,113 +1394,142 @@ function SettingsPage() {
</div>
</div>
)
const renderWhisperTab = () => (
const renderModelsTab = () => (
<div className="tab-content">
<div className="form-group">
<label></label>
<span className="form-hint"></span>
<div className="log-toggle-line">
<span className="log-status">{autoTranscribeVoice ? '已开启' : '已关闭'}</span>
<label className="switch" htmlFor="auto-transcribe-toggle">
<input
id="auto-transcribe-toggle"
className="switch-input"
type="checkbox"
checked={autoTranscribeVoice}
onChange={async (e) => {
const enabled = e.target.checked
setAutoTranscribeVoice(enabled)
await configService.setAutoTranscribeVoice(enabled)
showMessage(enabled ? '已开启自动转文字' : '已关闭自动转文字', true)
}}
/>
<span className="switch-slider" />
</label>
</div>
<label></label>
<span className="form-hint"> AI </span>
</div>
<div className="form-group">
<label></label>
<span className="form-hint"></span>
<div className="language-checkboxes">
{[
{ code: 'zh', name: '中文' },
{ code: 'yue', name: '粤语' },
{ code: 'en', name: '英文' },
{ code: 'ja', name: '日文' },
{ code: 'ko', name: '韩文' }
].map((lang) => (
<label key={lang.code} className="language-checkbox">
<input
type="checkbox"
checked={transcribeLanguages.includes(lang.code)}
onChange={async (e) => {
const checked = e.target.checked
let newLanguages: string[]
<label> (Whisper)</label>
<span className="form-hint"></span>
if (checked) {
newLanguages = [...transcribeLanguages, lang.code]
} else {
if (transcribeLanguages.length <= 1) {
showMessage('至少需要选择一种语言', false)
return
}
newLanguages = transcribeLanguages.filter(l => l !== lang.code)
}
setTranscribeLanguages(newLanguages)
await configService.setTranscribeLanguages(newLanguages)
showMessage(`${checked ? '添加' : '移除'}${lang.name}`, true)
}}
/>
<div className="checkbox-custom">
<Check size={14} />
<span>{lang.name}</span>
<div className="setting-control vertical has-border">
<div className="model-status-card">
<div className="model-info">
<div className="model-name">SenseVoiceSmall (245 MB)</div>
<div className="model-path">
{whisperModelStatus?.exists ? (
<span className="status-indicator success"><Check size={14} /> </span>
) : (
<span className="status-indicator warning"></span>
)}
{whisperModelDir && <div className="path-text" title={whisperModelDir}>{whisperModelDir}</div>}
</div>
</label>
))}
</div>
</div>
<div className="form-group whisper-section">
<label> (SenseVoiceSmall)</label>
<span className="form-hint"> Sherpa-onnx/</span>
<span className="form-hint"></span>
<input
type="text"
placeholder="留空使用默认目录"
value={whisperModelDir}
onChange={(e) => {
const value = e.target.value
setWhisperModelDir(value)
scheduleConfigSave('whisperModelDir', () => configService.setWhisperModelDir(value))
}}
/>
<div className="btn-row">
<button className="btn btn-secondary" onClick={handleSelectWhisperModelDir}><FolderOpen size={16} /> </button>
<button className="btn btn-secondary" onClick={handleResetWhisperModelDir}><RotateCcw size={16} /> </button>
</div>
<div className="whisper-status-line">
<span className={`status ${whisperModelStatus?.exists ? 'ok' : 'warn'}`}>
{whisperModelStatus?.exists ? '已下载 (240 MB)' : '未下载 (240 MB)'}
</span>
{whisperModelStatus?.modelPath && <span className="path">{whisperModelStatus.modelPath}</span>}
</div>
{isWhisperDownloading ? (
<div className="whisper-progress">
<div className="progress-info">
<span>...</span>
<span className="percent">{whisperDownloadProgress.toFixed(0)}%</span>
</div>
<div className="progress-bar-container">
<div className="progress-bar">
<div className="progress-fill" style={{ width: `${whisperDownloadProgress}%` }} />
</div>
<div className="model-actions">
{!whisperModelStatus?.exists && !isWhisperDownloading && (
<button
className="btn-download"
onClick={handleDownloadWhisperModel}
>
<Download size={16} />
</button>
)}
{isWhisperDownloading && (
<div className="download-status">
<div className="status-header">
<span className="percent">{Math.round(whisperDownloadProgress)}%</span>
{whisperProgressData.total > 0 && (
<span className="details">
{formatBytes(whisperProgressData.downloaded)} / {formatBytes(whisperProgressData.total)}
<span className="speed">({formatBytes(whisperProgressData.speed)}/s)</span>
</span>
)}
</div>
<div className="progress-bar-mini">
<div className="fill" style={{ width: `${whisperDownloadProgress}%` }}></div>
</div>
</div>
)}
</div>
</div>
) : (
<button className="btn btn-primary btn-download-model" onClick={handleDownloadWhisperModel}>
<Download size={18} />
</button>
)}
<div className="sub-setting">
<div className="sub-label"></div>
<div className="path-selector">
<input
type="text"
value={whisperModelDir}
readOnly
placeholder="默认目录"
/>
<button className="btn-icon" onClick={handleSelectWhisperModelDir} title="选择目录">
<FolderOpen size={18} />
</button>
{whisperModelDir && (
<button className="btn-icon danger" onClick={handleResetWhisperModelDir} title="重置为默认">
<RotateCcw size={18} />
</button>
)}
</div>
</div>
</div>
</div>
<div className="form-group">
<label>AI (Llama)</label>
<span className="form-hint"> AI </span>
<div className="setting-control vertical has-border">
<div className="model-status-card">
<div className="model-info">
<div className="model-name">Qwen3 4B (Preset) (~2.6GB)</div>
<div className="model-path">
{llamaModelStatus?.exists ? (
<span className="status-indicator success"><Check size={14} /> </span>
) : (
<span className="status-indicator warning"></span>
)}
{llamaModelStatus?.path && <div className="path-text" title={llamaModelStatus.path}>{llamaModelStatus.path}</div>}
</div>
</div>
<div className="model-actions">
{!llamaModelStatus?.exists && !isLlamaDownloading && (
<button
className="btn-download"
onClick={handleDownloadLlamaModel}
>
<Download size={16} />
</button>
)}
{isLlamaDownloading && (
<div className="download-status">
<div className="status-header">
<span className="percent">{Math.floor(llamaDownloadProgress)}%</span>
<span className="metrics">
{formatBytes(llamaProgressData.downloaded)} / {formatBytes(llamaProgressData.total)}
<span className="speed">({formatBytes(llamaProgressData.speed)}/s)</span>
</span>
</div>
<div className="progress-bar-mini">
<div className="fill" style={{ width: `${llamaDownloadProgress}%` }}></div>
</div>
</div>
)}
</div>
</div>
</div>
</div>
<div className="form-group">
<label></label>
<span className="form-hint"></span>
<div className="log-toggle-line">
<span className="log-status">{autoTranscribeVoice ? '已开启' : '已关闭'}</span>
<label className="switch">
<input
type="checkbox"
className="switch-input"
checked={autoTranscribeVoice}
onChange={(e) => {
setAutoTranscribeVoice(e.target.checked)
configService.setAutoTranscribeVoice(e.target.checked)
}}
/>
<span className="switch-slider"></span>
</label>
</div>
</div>
</div>
)
@@ -1958,7 +2072,7 @@ function SettingsPage() {
{activeTab === 'appearance' && renderAppearanceTab()}
{activeTab === 'notification' && renderNotificationTab()}
{activeTab === 'database' && renderDatabaseTab()}
{activeTab === 'whisper' && renderWhisperTab()}
{activeTab === 'models' && renderModelsTab()}
{activeTab === 'export' && renderExportTab()}
{activeTab === 'cache' && renderCacheTab()}
{activeTab === 'security' && renderSecurityTab()}

View File

@@ -0,0 +1,108 @@
export interface ModelInfo {
name: string;
path: string;
downloadUrl?: string; // If it's a known preset
size?: number;
downloaded: boolean;
}
export const PRESET_MODELS: ModelInfo[] = [
{
name: "Qwen3 4B (Preset)",
path: "Qwen3-4B-Q4_K_M.gguf",
downloadUrl: "https://www.modelscope.cn/models/Qwen/Qwen3-4B-GGUF/resolve/master/Qwen3-4B-Q4_K_M.gguf",
downloaded: false
}
];
class EngineService {
private onTokenCallback: ((token: string) => void) | null = null;
private onProgressCallback: ((percent: number) => void) | null = null;
private _removeTokenListener: (() => void) | null = null;
private _removeProgressListener: (() => void) | null = null;
constructor() {
// Initialize listeners
this._removeTokenListener = window.electronAPI.llama.onToken((token: string) => {
if (this.onTokenCallback) {
this.onTokenCallback(token);
}
});
this._removeProgressListener = window.electronAPI.llama.onDownloadProgress((percent: number) => {
if (this.onProgressCallback) {
this.onProgressCallback(percent);
}
});
}
public async checkModelExists(filename: string): Promise<boolean> {
const modelsPath = await window.electronAPI.llama.getModelsPath();
const fullPath = `${modelsPath}\\${filename}`; // Windows path separator
// We might need to handle path separator properly or let main process handle it
// Updated preload to take full path or handling in main?
// Let's rely on main process exposing join or just checking relative to models dir if implemented
// Actually main process `checkFileExists` takes a path.
// Let's assume we construct path here or Main helps.
// Better: getModelsPath returns the directory.
return await window.electronAPI.llama.checkFileExists(fullPath);
}
public async getModelsPath(): Promise<string> {
return await window.electronAPI.llama.getModelsPath();
}
public async loadModel(filename: string) {
const modelsPath = await this.getModelsPath();
const fullPath = `${modelsPath}\\${filename}`;
console.log("Loading model:", fullPath);
return await window.electronAPI.llama.loadModel(fullPath);
}
public async createSession(systemPrompt?: string) {
return await window.electronAPI.llama.createSession(systemPrompt);
}
public async chat(message: string, onToken: (token: string) => void, options?: { thinking?: boolean }) {
this.onTokenCallback = onToken;
return await window.electronAPI.llama.chat(message, options);
}
public async downloadModel(url: string, filename: string, onProgress: (percent: number) => void) {
const modelsPath = await this.getModelsPath();
const fullPath = `${modelsPath}\\${filename}`;
this.onProgressCallback = onProgress;
return await window.electronAPI.llama.downloadModel(url, fullPath);
}
/**
* 清除当前的回调函数引用
* 用于避免内存泄漏
*/
public clearCallbacks() {
this.onTokenCallback = null;
this.onProgressCallback = null;
}
/**
* 释放所有资源
* 包括事件监听器和回调引用
*/
public dispose() {
// 清除回调
this.clearCallbacks();
// 移除事件监听器
if (this._removeTokenListener) {
this._removeTokenListener();
this._removeTokenListener = null;
}
if (this._removeProgressListener) {
this._removeProgressListener();
this._removeProgressListener = null;
}
}
}
export const engineService = new EngineService();

View File

@@ -459,6 +459,17 @@ export interface ElectronAPI {
debugResource: (url: string) => Promise<{ success: boolean; status?: number; headers?: any; error?: string }>
proxyImage: (url: string) => Promise<{ success: boolean; dataUrl?: string; error?: string }>
}
llama: {
loadModel: (modelPath: string) => Promise<boolean>
createSession: (systemPrompt?: string) => Promise<boolean>
chat: (message: string) => Promise<{ success: boolean; response?: any; error?: string }>
downloadModel: (url: string, savePath: string) => Promise<void>
getModelsPath: () => Promise<string>
checkFileExists: (filePath: string) => Promise<boolean>
getModelStatus: (modelPath: string) => Promise<{ exists: boolean; path?: string; size?: number; error?: string }>
onToken: (callback: (token: string) => void) => () => void
onDownloadProgress: (callback: (payload: { downloaded: number; total: number; speed: number }) => void) => () => void
}
}
export interface ExportOptions {