修复实时更新偶发失效的问题;删除AI对话有关组件与依赖

This commit is contained in:
cc
2026-02-22 15:26:13 +08:00
parent 70481fd468
commit 4b9d94eb62
15 changed files with 162 additions and 3507 deletions

View File

@@ -90,10 +90,6 @@ function SettingsPage() {
const [whisperDownloadProgress, setWhisperDownloadProgress] = useState(0)
const [whisperProgressData, setWhisperProgressData] = useState<{ downloaded: number; total: number; speed: number }>({ downloaded: 0, total: 0, speed: 0 })
const [whisperModelStatus, setWhisperModelStatus] = useState<{ exists: boolean; modelPath?: string; tokensPath?: string } | null>(null)
const [llamaModelStatus, setLlamaModelStatus] = useState<{ exists: boolean; path?: string; size?: number } | null>(null)
const [isLlamaDownloading, setIsLlamaDownloading] = useState(false)
const [llamaDownloadProgress, setLlamaDownloadProgress] = useState(0)
const [llamaProgressData, setLlamaProgressData] = useState<{ downloaded: number; total: number; speed: number }>({ downloaded: 0, total: 0, speed: 0 })
const formatBytes = (bytes: number) => {
if (bytes === 0) return '0 B';
@@ -336,8 +332,7 @@ function SettingsPage() {
if (savedWhisperModelDir) setWhisperModelDir(savedWhisperModelDir)
// Load Llama status after config
void checkLlamaModelStatus()
} catch (e: any) {
console.error('加载配置失败:', e)
}
@@ -653,7 +648,6 @@ function SettingsPage() {
setWhisperModelDir(dir)
await configService.setWhisperModelDir(dir)
showMessage('已选择 Whisper 模型目录', true)
await checkLlamaModelStatus()
}
} catch (e: any) {
showMessage('选择目录失败', false)
@@ -689,68 +683,6 @@ function SettingsPage() {
const handleResetWhisperModelDir = async () => {
setWhisperModelDir('')
await configService.setWhisperModelDir('')
await checkLlamaModelStatus()
}
const checkLlamaModelStatus = async () => {
try {
// @ts-ignore
const modelsPath = await window.electronAPI.llama?.getModelsPath()
if (!modelsPath) return
const modelName = "Qwen3-4B-Q4_K_M.gguf" // Hardcoded preset for now
const fullPath = `${modelsPath}\\${modelName}`
// @ts-ignore
const status = await window.electronAPI.llama?.getModelStatus(fullPath)
if (status) {
setLlamaModelStatus({
exists: status.exists,
path: status.path,
size: status.size
})
}
} catch (e) {
console.error("Check llama model status failed", e)
}
}
useEffect(() => {
const handleLlamaProgress = (payload: { downloaded: number; total: number; speed: number }) => {
setLlamaProgressData(payload)
if (payload.total > 0) {
setLlamaDownloadProgress((payload.downloaded / payload.total) * 100)
}
}
// @ts-ignore
const removeListener = window.electronAPI.llama?.onDownloadProgress(handleLlamaProgress)
return () => {
if (typeof removeListener === 'function') removeListener()
}
}, [])
const handleDownloadLlamaModel = async () => {
if (isLlamaDownloading) return
setIsLlamaDownloading(true)
setLlamaDownloadProgress(0)
try {
const modelUrl = "https://www.modelscope.cn/models/Qwen/Qwen3-4B-GGUF/resolve/master/Qwen3-4B-Q4_K_M.gguf"
// @ts-ignore
const modelsPath = await window.electronAPI.llama?.getModelsPath()
const modelName = "Qwen3-4B-Q4_K_M.gguf"
const fullPath = `${modelsPath}\\${modelName}`
// @ts-ignore
const result = await window.electronAPI.llama?.downloadModel(modelUrl, fullPath)
if (result?.success) {
showMessage('Qwen3 模型下载完成', true)
await checkLlamaModelStatus()
} else {
showMessage(`模型下载失败: ${result?.error || '未知错误'}`, false)
}
} catch (e: any) {
showMessage(`模型下载失败: ${e}`, false)
} finally {
setIsLlamaDownloading(false)
}
}
const handleAutoGetDbKey = async () => {
@@ -1452,7 +1384,7 @@ function SettingsPage() {
<div className="tab-content">
<div className="form-group">
<label></label>
<span className="form-hint"> AI </span>
<span className="form-hint"></span>
</div>
<div className="form-group">
@@ -1522,50 +1454,6 @@ function SettingsPage() {
</div>
</div>
<div className="form-group">
<label>AI (Llama)</label>
<span className="form-hint"> AI </span>
<div className="setting-control vertical has-border">
<div className="model-status-card">
<div className="model-info">
<div className="model-name">Qwen3 4B (Preset) (~2.6GB)</div>
<div className="model-path">
{llamaModelStatus?.exists ? (
<span className="status-indicator success"><Check size={14} /> </span>
) : (
<span className="status-indicator warning"></span>
)}
{llamaModelStatus?.path && <div className="path-text" title={llamaModelStatus.path}>{llamaModelStatus.path}</div>}
</div>
</div>
<div className="model-actions">
{!llamaModelStatus?.exists && !isLlamaDownloading && (
<button
className="btn-download"
onClick={handleDownloadLlamaModel}
>
<Download size={16} />
</button>
)}
{isLlamaDownloading && (
<div className="download-status">
<div className="status-header">
<span className="percent">{Math.floor(llamaDownloadProgress)}%</span>
<span className="metrics">
{formatBytes(llamaProgressData.downloaded)} / {formatBytes(llamaProgressData.total)}
<span className="speed">({formatBytes(llamaProgressData.speed)}/s)</span>
</span>
</div>
<div className="progress-bar-mini">
<div className="fill" style={{ width: `${llamaDownloadProgress}%` }}></div>
</div>
</div>
)}
</div>
</div>
</div>
</div>
<div className="form-group">
<label></label>
<span className="form-hint"></span>