超级无敌帅气到爆炸起飞的更新

This commit is contained in:
cc
2026-02-03 21:45:17 +08:00
committed by xuncha
parent 04d690dcf1
commit 551995df68
18 changed files with 5938 additions and 394 deletions

View File

@@ -21,6 +21,7 @@ import { videoService } from './services/videoService'
import { snsService } from './services/snsService'
import { contactExportService } from './services/contactExportService'
import { windowsHelloService } from './services/windowsHelloService'
import { llamaService } from './services/llamaService'
import { registerNotificationHandlers, showNotification } from './windows/notificationWindow'
@@ -800,6 +801,64 @@ function registerIpcHandlers() {
return await chatService.getContact(username)
})
// Llama AI
ipcMain.handle('llama:init', async () => {
return await llamaService.init()
})
ipcMain.handle('llama:loadModel', async (_, modelPath: string) => {
return llamaService.loadModel(modelPath)
})
ipcMain.handle('llama:createSession', async (_, systemPrompt?: string) => {
return llamaService.createSession(systemPrompt)
})
ipcMain.handle('llama:chat', async (event, message: string, options?: { thinking?: boolean }) => {
// We use a callback to stream back to the renderer
const webContents = event.sender
try {
if (!webContents) return { success: false, error: 'No sender' }
const response = await llamaService.chat(message, options, (token) => {
if (!webContents.isDestroyed()) {
webContents.send('llama:token', token)
}
})
return { success: true, response }
} catch (e) {
return { success: false, error: String(e) }
}
})
ipcMain.handle('llama:downloadModel', async (event, url: string, savePath: string) => {
const webContents = event.sender
try {
await llamaService.downloadModel(url, savePath, (payload) => {
if (!webContents.isDestroyed()) {
webContents.send('llama:downloadProgress', payload)
}
})
return { success: true }
} catch (e) {
return { success: false, error: String(e) }
}
})
ipcMain.handle('llama:getModelsPath', async () => {
return llamaService.getModelsPath()
})
ipcMain.handle('llama:checkFileExists', async (_, filePath: string) => {
const { existsSync } = await import('fs')
return existsSync(filePath)
})
ipcMain.handle('llama:getModelStatus', async (_, modelPath: string) => {
return llamaService.getModelStatus(modelPath)
})
ipcMain.handle('chat:getContactAvatar', async (_, username: string) => {
return await chatService.getContactAvatar(username)
})

View File

@@ -265,5 +265,26 @@ contextBridge.exposeInMainWorld('electronAPI', {
ipcRenderer.invoke('sns:getTimeline', limit, offset, usernames, keyword, startTime, endTime),
debugResource: (url: string) => ipcRenderer.invoke('sns:debugResource', url),
proxyImage: (url: string) => ipcRenderer.invoke('sns:proxyImage', url)
},
// Llama AI
llama: {
loadModel: (modelPath: string) => ipcRenderer.invoke('llama:loadModel', modelPath),
createSession: (systemPrompt?: string) => ipcRenderer.invoke('llama:createSession', systemPrompt),
chat: (message: string, options?: any) => ipcRenderer.invoke('llama:chat', message, options),
downloadModel: (url: string, savePath: string) => ipcRenderer.invoke('llama:downloadModel', url, savePath),
getModelsPath: () => ipcRenderer.invoke('llama:getModelsPath'),
checkFileExists: (filePath: string) => ipcRenderer.invoke('llama:checkFileExists', filePath),
getModelStatus: (modelPath: string) => ipcRenderer.invoke('llama:getModelStatus', modelPath),
onToken: (callback: (token: string) => void) => {
const listener = (_: any, token: string) => callback(token)
ipcRenderer.on('llama:token', listener)
return () => ipcRenderer.removeListener('llama:token', listener)
},
onDownloadProgress: (callback: (payload: { downloaded: number; total: number; speed: number }) => void) => {
const listener = (_: any, payload: { downloaded: number; total: number; speed: number }) => callback(payload)
ipcRenderer.on('llama:downloadProgress', listener)
return () => ipcRenderer.removeListener('llama:downloadProgress', listener)
}
}
})

View File

@@ -299,3 +299,33 @@ body[data-theme="teal-water"] {
color: var(--muted);
padding: 40px;
}
/* Virtual Scroll */
.virtual-scroll-container {
height: calc(100vh - 180px);
/* Adjust based on header height */
overflow-y: auto;
position: relative;
border: 1px solid var(--border);
border-radius: var(--radius);
background: var(--bg);
margin-top: 20px;
}
.virtual-scroll-spacer {
opacity: 0;
pointer-events: none;
width: 1px;
}
.virtual-scroll-content {
position: absolute;
top: 0;
left: 0;
width: 100%;
}
.message-list {
/* Override message-list to be inside virtual scroll */
display: block;
}

View File

@@ -159,7 +159,7 @@ class ExportService {
}
const suffixMatch = trimmed.match(/^(.+)_([a-zA-Z0-9]{4})$/)
const cleaned = suffixMatch ? suffixMatch[1] : trimmed
return cleaned
}
@@ -1148,11 +1148,11 @@ class ExportService {
const emojiMd5 = msg.emojiMd5
if (!emojiUrl && !emojiMd5) {
return null
}
const key = emojiMd5 || String(msg.localId)
// 根据 URL 判断扩展名
@@ -3013,6 +3013,165 @@ class ExportService {
}
}
private getVirtualScrollScript(): string {
return `
class VirtualScroller {
constructor(container, list, data, renderItem) {
this.container = container;
this.list = list;
this.data = data;
this.renderItem = renderItem;
this.rowHeight = 80; // Estimated height
this.buffer = 5;
this.heightCache = new Map();
this.visibleItems = new Set();
this.spacer = document.createElement('div');
this.spacer.className = 'virtual-scroll-spacer';
this.content = document.createElement('div');
this.content.className = 'virtual-scroll-content';
this.container.appendChild(this.spacer);
this.container.appendChild(this.content);
this.container.addEventListener('scroll', () => this.onScroll());
window.addEventListener('resize', () => this.onScroll());
this.updateTotalHeight();
this.onScroll();
}
setData(newData) {
this.data = newData;
this.heightCache.clear();
this.content.innerHTML = '';
this.container.scrollTop = 0;
this.updateTotalHeight();
this.onScroll();
// Show/Hide empty state
if (this.data.length === 0) {
this.content.innerHTML = '<div class="empty">暂无消息</div>';
}
}
updateTotalHeight() {
let total = 0;
for (let i = 0; i < this.data.length; i++) {
total += this.heightCache.get(i) || this.rowHeight;
}
this.spacer.style.height = total + 'px';
}
onScroll() {
if (this.data.length === 0) return;
const scrollTop = this.container.scrollTop;
const containerHeight = this.container.clientHeight;
// Find start index
let currentY = 0;
let startIndex = 0;
for (let i = 0; i < this.data.length; i++) {
const h = this.heightCache.get(i) || this.rowHeight;
if (currentY + h > scrollTop) {
startIndex = i;
break;
}
currentY += h;
}
// Find end index
let endIndex = startIndex;
let visibleHeight = 0;
for (let i = startIndex; i < this.data.length; i++) {
const h = this.heightCache.get(i) || this.rowHeight;
visibleHeight += h;
endIndex = i;
if (visibleHeight > containerHeight) break;
}
const start = Math.max(0, startIndex - this.buffer);
const end = Math.min(this.data.length - 1, endIndex + this.buffer);
this.renderRange(start, end, currentY);
}
renderRange(start, end, startY) {
// Calculate offset for start item
let topOffset = 0;
for(let i=0; i<start; i++) {
topOffset += this.heightCache.get(i) || this.rowHeight;
}
const newKeys = new Set();
// Create or update items
let currentTop = topOffset;
const fragment = document.createDocumentFragment();
for (let i = start; i <= end; i++) {
newKeys.add(i);
const itemData = this.data[i];
let el = this.content.querySelector(\`[data-index="\${i}"]\`);
if (!el) {
el = document.createElement('div');
el.setAttribute('data-index', i);
el.className = 'virtual-item';
el.style.position = 'absolute';
el.style.left = '0';
el.style.width = '100%';
el.innerHTML = this.renderItem(itemData, i);
// Measure height after render
this.content.appendChild(el);
const rect = el.getBoundingClientRect();
const actualHeight = rect.height;
if (Math.abs(actualHeight - (this.heightCache.get(i) || this.rowHeight)) > 1) {
this.heightCache.set(i, actualHeight);
// If height changed significantly, we might need to adjust total height
// But for performance, maybe just do it on next scroll or rarely?
// For now, let's keep it simple. If we update inline style top, we need to know exact previous heights.
}
}
el.style.top = currentTop + 'px';
currentTop += (this.heightCache.get(i) || this.rowHeight);
}
// Cleanup
Array.from(this.content.children).forEach(child => {
if (child.classList.contains('empty')) return;
const idx = parseInt(child.getAttribute('data-index'));
if (!newKeys.has(idx)) {
child.remove();
}
});
this.updateTotalHeight();
}
scrollToTime(timestamp) {
const idx = this.data.findIndex(item => item.ts >= timestamp);
if (idx !== -1) {
this.scrollToIndex(idx);
}
}
scrollToIndex(index) {
let top = 0;
for(let i=0; i<index; i++) {
top += this.heightCache.get(i) || this.rowHeight;
}
this.container.scrollTop = top;
}
}
`;
}
/**
* 导出单个会话为 HTML 格式
*/
@@ -3127,85 +3286,29 @@ class ExportService {
)
: new Map<string, string>()
const renderedMessages = sortedMessages.map((msg, index) => {
const mediaKey = `${msg.localType}_${msg.localId}`
const mediaItem = mediaCache.get(mediaKey) || null
const isSenderMe = msg.isSend
const senderInfo = collected.memberSet.get(msg.senderUsername)?.member
const senderName = isSenderMe
? (myInfo.displayName || '我')
: (isGroup
? (senderInfo?.groupNickname || senderInfo?.accountName || msg.senderUsername)
: (sessionInfo.displayName || sessionId))
const avatarData = avatarMap.get(isSenderMe ? cleanedMyWxid : msg.senderUsername)
const avatarHtml = avatarData
? `<img src="${this.escapeAttribute(encodeURI(avatarData))}" alt="${this.escapeAttribute(senderName)}" />`
: `<span>${this.escapeHtml(this.getAvatarFallback(senderName))}</span>`
const timeText = this.formatTimestamp(msg.createTime)
const typeName = this.getMessageTypeName(msg.localType)
let textContent = this.formatHtmlMessageText(msg.content, msg.localType)
if (msg.localType === 34 && useVoiceTranscript) {
textContent = voiceTranscriptMap.get(msg.localId) || '[语音消息 - 转文字失败]'
}
if (mediaItem && (msg.localType === 3 || msg.localType === 47)) {
textContent = ''
}
let mediaHtml = ''
if (mediaItem?.kind === 'image') {
const mediaPath = this.escapeAttribute(encodeURI(mediaItem.relativePath))
mediaHtml = `<img class="message-media image previewable" src="${mediaPath}" data-full="${mediaPath}" alt="${this.escapeAttribute(typeName)}" />`
} else if (mediaItem?.kind === 'emoji') {
const mediaPath = this.escapeAttribute(encodeURI(mediaItem.relativePath))
mediaHtml = `<img class="message-media emoji previewable" src="${mediaPath}" data-full="${mediaPath}" alt="${this.escapeAttribute(typeName)}" />`
} else if (mediaItem?.kind === 'voice') {
mediaHtml = `<audio class="message-media audio" controls src="${this.escapeAttribute(encodeURI(mediaItem.relativePath))}"></audio>`
} else if (mediaItem?.kind === 'video') {
const posterAttr = mediaItem.posterDataUrl ? ` poster="${this.escapeAttribute(mediaItem.posterDataUrl)}"` : ''
mediaHtml = `<video class="message-media video" controls preload="metadata"${posterAttr} src="${this.escapeAttribute(encodeURI(mediaItem.relativePath))}"></video>`
}
const textHtml = textContent
? `<div class="message-text">${this.renderTextWithEmoji(textContent).replace(/\r?\n/g, '<br />')}</div>`
: ''
const senderHtml = isGroup
? `<div class="sender-name">${this.escapeHtml(senderName)}</div>`
: ''
const timeHtml = `<div class="message-time">${this.escapeHtml(timeText)}</div>`
const messageBody = `
${timeHtml}
${senderHtml}
<div class="message-content">
${mediaHtml}
${textHtml}
</div>
`
return `
<div class="message ${isSenderMe ? 'sent' : 'received'}" data-timestamp="${msg.createTime}" data-index="${index + 1}">
<div class="message-row">
<div class="avatar">${avatarHtml}</div>
<div class="bubble">
${messageBody}
</div>
</div>
</div>
`
}).join('\n')
onProgress?.({
current: 85,
current: 60,
total: 100,
currentSession: sessionInfo.displayName,
phase: 'writing'
})
// ================= BEGIN STREAM WRITING =================
const exportMeta = this.getExportMeta(sessionId, sessionInfo, isGroup)
const htmlStyles = this.loadExportHtmlStyles()
const html = `<!DOCTYPE html>
const stream = fs.createWriteStream(outputPath, { encoding: 'utf-8' })
const writePromise = (str: string) => {
return new Promise<void>((resolve, reject) => {
if (!stream.write(str)) {
stream.once('drain', resolve)
} else {
resolve()
}
})
}
await writePromise(`<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8" />
@@ -3250,15 +3353,109 @@ class ExportService {
</div>
</div>
</div>
<div class="message-list" id="messageList">
${renderedMessages || '<div class="empty">暂无消息</div>'}
</div>
<!-- Virtual Scroll Container -->
<div id="virtualScrollContainer" class="virtual-scroll-container"></div>
</div>
<div class="image-preview" id="imagePreview">
<img id="imagePreviewTarget" alt="预览" />
</div>
<!-- Data Injection -->
<script>
const messages = Array.from(document.querySelectorAll('.message'))
window.WEFLOW_DATA = [
`);
// Write messages in chunks
for (let i = 0; i < sortedMessages.length; i++) {
const msg = sortedMessages[i]
const mediaKey = `${msg.localType}_${msg.localId}`
const mediaItem = mediaCache.get(mediaKey) || null
const isSenderMe = msg.isSend
const senderInfo = collected.memberSet.get(msg.senderUsername)?.member
const senderName = isSenderMe
? (myInfo.displayName || '我')
: (isGroup
? (senderInfo?.groupNickname || senderInfo?.accountName || msg.senderUsername)
: (sessionInfo.displayName || sessionId))
const avatarData = avatarMap.get(isSenderMe ? cleanedMyWxid : msg.senderUsername)
const avatarHtml = avatarData
? `<img src="${this.escapeAttribute(encodeURI(avatarData))}" alt="${this.escapeAttribute(senderName)}" />`
: `<span>${this.escapeHtml(this.getAvatarFallback(senderName))}</span>`
const timeText = this.formatTimestamp(msg.createTime)
const typeName = this.getMessageTypeName(msg.localType)
let textContent = this.formatHtmlMessageText(msg.content, msg.localType)
if (msg.localType === 34 && useVoiceTranscript) {
textContent = voiceTranscriptMap.get(msg.localId) || '[语音消息 - 转文字失败]'
}
if (mediaItem && (msg.localType === 3 || msg.localType === 47)) {
textContent = ''
}
let mediaHtml = ''
if (mediaItem?.kind === 'image') {
const mediaPath = this.escapeAttribute(encodeURI(mediaItem.relativePath))
mediaHtml = `<img class="message-media image previewable" src="${mediaPath}" data-full="${mediaPath}" alt="${this.escapeAttribute(typeName)}" />`
} else if (mediaItem?.kind === 'emoji') {
const mediaPath = this.escapeAttribute(encodeURI(mediaItem.relativePath))
mediaHtml = `<img class="message-media emoji previewable" src="${mediaPath}" data-full="${mediaPath}" alt="${this.escapeAttribute(typeName)}" />`
} else if (mediaItem?.kind === 'voice') {
mediaHtml = `<audio class="message-media audio" controls src="${this.escapeAttribute(encodeURI(mediaItem.relativePath))}"></audio>`
} else if (mediaItem?.kind === 'video') {
const posterAttr = mediaItem.posterDataUrl ? ` poster="${this.escapeAttribute(mediaItem.posterDataUrl)}"` : ''
mediaHtml = `<video class="message-media video" controls preload="metadata"${posterAttr} src="${this.escapeAttribute(encodeURI(mediaItem.relativePath))}"></video>`
}
const textHtml = textContent
? `<div class="message-text">${this.renderTextWithEmoji(textContent).replace(/\r?\n/g, '<br />')}</div>`
: ''
const senderNameHtml = isGroup
? `<div class="sender-name">${this.escapeHtml(senderName)}</div>`
: ''
const timeHtml = `<div class="message-time">${this.escapeHtml(timeText)}</div>`
const messageBody = `
${timeHtml}
${senderNameHtml}
<div class="message-content">
${mediaHtml}
${textHtml}
</div>
`
// Compact JSON object
const itemObj = {
i: i + 1, // index
t: msg.createTime, // timestamp
s: isSenderMe ? 1 : 0, // isSend
a: avatarHtml, // avatar HTML
b: messageBody // body HTML
}
const jsonStr = JSON.stringify(itemObj)
await writePromise(jsonStr + (i < sortedMessages.length - 1 ? ',\n' : '\n'))
// Report progress occasionally
if ((i + 1) % 500 === 0) {
onProgress?.({
current: 60 + Math.floor((i + 1) / sortedMessages.length * 30),
total: 100,
currentSession: sessionInfo.displayName,
phase: 'writing'
})
}
}
await writePromise(`];
</script>
<script>
${this.getVirtualScrollScript()}
const searchInput = document.getElementById('searchInput')
const timeInput = document.getElementById('timeInput')
const jumpBtn = document.getElementById('jumpBtn')
@@ -3266,47 +3463,69 @@ class ExportService {
const themeSelect = document.getElementById('themeSelect')
const imagePreview = document.getElementById('imagePreview')
const imagePreviewTarget = document.getElementById('imagePreviewTarget')
const container = document.getElementById('virtualScrollContainer')
let imageZoom = 1
// Initial Data
let allData = window.WEFLOW_DATA || [];
let currentList = allData;
// Render Item Function
const renderItem = (item, index) => {
const isSenderMe = item.s === 1;
return \`
<div class="message \${isSenderMe ? 'sent' : 'received'}" data-index="\${item.i}">
<div class="message-row">
<div class="avatar">\${item.a}</div>
<div class="bubble">
\${item.b}
</div>
</div>
</div>
\`;
};
const scroller = new VirtualScroller(container, [], currentList, renderItem);
const updateCount = () => {
const visible = messages.filter((msg) => !msg.classList.contains('hidden'))
resultCount.textContent = \`\${visible.length}\`
resultCount.textContent = \`\${currentList.length}\`
}
// Search Logic
let searchTimeout;
searchInput.addEventListener('input', () => {
const keyword = searchInput.value.trim().toLowerCase()
messages.forEach((msg) => {
const text = msg.textContent ? msg.textContent.toLowerCase() : ''
const match = !keyword || text.includes(keyword)
msg.classList.toggle('hidden', !match)
})
updateCount()
clearTimeout(searchTimeout);
searchTimeout = setTimeout(() => {
const keyword = searchInput.value.trim().toLowerCase();
if (!keyword) {
currentList = allData;
} else {
// Simplified search: check raw html content (contains body text and sender name)
// Ideally we should search raw text, but we only have pre-rendered HTML in JSON 'b' (body)
// 'b' contains message content and sender name.
currentList = allData.filter(item => {
return item.b.toLowerCase().includes(keyword);
});
}
scroller.setData(currentList);
updateCount();
}, 300);
})
// Jump Logic
jumpBtn.addEventListener('click', () => {
const value = timeInput.value
if (!value) return
const target = Math.floor(new Date(value).getTime() / 1000)
const visibleMessages = messages.filter((msg) => !msg.classList.contains('hidden'))
if (visibleMessages.length === 0) return
let targetMessage = visibleMessages.find((msg) => {
const time = Number(msg.dataset.timestamp || 0)
return time >= target
})
if (!targetMessage) {
targetMessage = visibleMessages[visibleMessages.length - 1]
}
visibleMessages.forEach((msg) => msg.classList.remove('highlight'))
targetMessage.classList.add('highlight')
targetMessage.scrollIntoView({ behavior: 'smooth', block: 'center' })
setTimeout(() => targetMessage.classList.remove('highlight'), 2000)
// Find in current list
scroller.scrollToTime(target);
})
// Theme Logic
const applyTheme = (value) => {
document.body.setAttribute('data-theme', value)
localStorage.setItem('weflow-export-theme', value)
}
const storedTheme = localStorage.getItem('weflow-export-theme') || 'cloud-dancer'
themeSelect.value = storedTheme
applyTheme(storedTheme)
@@ -3315,16 +3534,18 @@ class ExportService {
applyTheme(event.target.value)
})
document.querySelectorAll('.previewable').forEach((img) => {
img.addEventListener('click', () => {
const full = img.getAttribute('data-full')
if (!full) return
imagePreviewTarget.src = full
imageZoom = 1
imagePreviewTarget.style.transform = 'scale(1)'
imagePreview.classList.add('active')
})
})
// Image Preview (Delegation)
container.addEventListener('click', (e) => {
const target = e.target;
if (target.classList.contains('previewable')) {
const full = target.getAttribute('data-full')
if (!full) return
imagePreviewTarget.src = full
imageZoom = 1
imagePreviewTarget.style.transform = 'scale(1)'
imagePreview.classList.add('active')
}
});
imagePreviewTarget.addEventListener('click', (event) => {
event.stopPropagation()
@@ -3351,20 +3572,24 @@ class ExportService {
})
updateCount()
console.log('WeFlow Export Loaded', allData.length);
</script>
</body>
</html>`
</html>`);
fs.writeFileSync(outputPath, html, 'utf-8')
onProgress?.({
current: 100,
total: 100,
currentSession: sessionInfo.displayName,
phase: 'complete'
return new Promise((resolve, reject) => {
stream.end(() => {
onProgress?.({
current: 100,
total: 100,
currentSession: sessionInfo.displayName,
phase: 'complete'
})
resolve({ success: true })
})
stream.on('error', reject)
})
return { success: true }
} catch (e) {
return { success: false, error: String(e) }
}

View File

@@ -0,0 +1,371 @@
import fs from "fs";
import { app, BrowserWindow } from "electron";
import path from "path";
import { ConfigService } from './config';
// Define interfaces locally to avoid static import of types that might not be available or cause issues
type LlamaModel = any;
type LlamaContext = any;
type LlamaChatSession = any;
export class LlamaService {
private _model: LlamaModel | null = null;
private _context: LlamaContext | null = null;
private _sequence: any = null;
private _session: LlamaChatSession | null = null;
private _llama: any = null;
private _nodeLlamaCpp: any = null;
private configService = new ConfigService();
private _initialized = false;
constructor() {
// 延迟初始化,只在需要时初始化
}
public async init() {
if (this._initialized) return;
try {
// Dynamic import to handle ESM module in CJS context
this._nodeLlamaCpp = await import("node-llama-cpp");
this._llama = await this._nodeLlamaCpp.getLlama();
this._initialized = true;
console.log("[LlamaService] Llama initialized");
} catch (error) {
console.error("[LlamaService] Failed to initialize Llama:", error);
}
}
public async loadModel(modelPath: string) {
if (!this._llama) await this.init();
try {
console.log("[LlamaService] Loading model from:", modelPath);
if (!this._llama) {
throw new Error("Llama not initialized");
}
this._model = await this._llama.loadModel({
modelPath: modelPath,
gpuLayers: 'max', // Offload all layers to GPU if possible
useMlock: false // Disable mlock to avoid "VirtualLock" errors (common on Windows)
});
if (!this._model) throw new Error("Failed to load model");
this._context = await this._model.createContext({
contextSize: 8192, // Balanced context size for better performance
batchSize: 2048 // Increase batch size for better prompt processing speed
});
if (!this._context) throw new Error("Failed to create context");
this._sequence = this._context.getSequence();
const { LlamaChatSession } = this._nodeLlamaCpp;
this._session = new LlamaChatSession({
contextSequence: this._sequence
});
console.log("[LlamaService] Model loaded successfully");
return true;
} catch (error) {
console.error("[LlamaService] Failed to load model:", error);
throw error;
}
}
public async createSession(systemPrompt?: string) {
if (!this._context) throw new Error("Model not loaded");
if (!this._nodeLlamaCpp) await this.init();
const { LlamaChatSession } = this._nodeLlamaCpp;
if (!this._sequence) {
this._sequence = this._context.getSequence();
}
this._session = new LlamaChatSession({
contextSequence: this._sequence,
systemPrompt: systemPrompt
});
return true;
}
public async chat(message: string, options: { thinking?: boolean } = {}, onToken: (token: string) => void) {
if (!this._session) throw new Error("Session not initialized");
const thinking = options.thinking ?? false;
// Sampling parameters based on mode
const samplingParams = thinking ? {
temperature: 0.6,
topP: 0.95,
topK: 20,
repeatPenalty: 1.5 // PresencePenalty=1.5
} : {
temperature: 0.7,
topP: 0.8,
topK: 20,
repeatPenalty: 1.5
};
try {
const response = await this._session.prompt(message, {
...samplingParams,
onTextChunk: (chunk: string) => {
onToken(chunk);
}
});
return response;
} catch (error) {
console.error("[LlamaService] Chat error:", error);
throw error;
}
}
public async getModelStatus(modelPath: string) {
try {
const exists = fs.existsSync(modelPath);
if (!exists) {
return { exists: false, path: modelPath };
}
const stats = fs.statSync(modelPath);
return {
exists: true,
path: modelPath,
size: stats.size
};
} catch (error) {
return { exists: false, error: String(error) };
}
}
private resolveModelDir(): string {
const configured = this.configService.get('whisperModelDir') as string | undefined;
if (configured) return configured;
return path.join(app.getPath('documents'), 'WeFlow', 'models');
}
public async downloadModel(url: string, savePath: string, onProgress: (payload: { downloaded: number; total: number; speed: number }) => void): Promise<void> {
// Ensure directory exists
const dir = path.dirname(savePath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
console.info(`[LlamaService] Multi-threaded download check for: ${savePath}`);
if (fs.existsSync(savePath)) {
fs.unlinkSync(savePath);
}
// 1. Get total size and check range support
let probeResult;
try {
probeResult = await this.probeUrl(url);
} catch (err) {
console.warn("[LlamaService] Probe failed, falling back to single-thread.", err);
return this.downloadSingleThread(url, savePath, onProgress);
}
const { totalSize, acceptRanges, finalUrl } = probeResult;
console.log(`[LlamaService] Total size: ${totalSize}, Accept-Ranges: ${acceptRanges}`);
if (totalSize <= 0 || !acceptRanges) {
console.warn("[LlamaService] Ranges not supported or size unknown, falling back to single-thread.");
return this.downloadSingleThread(finalUrl, savePath, onProgress);
}
const threadCount = 4;
const chunkSize = Math.ceil(totalSize / threadCount);
const fd = fs.openSync(savePath, 'w');
let downloadedLength = 0;
let lastDownloadedLength = 0;
let lastTime = Date.now();
let speed = 0;
const speedInterval = setInterval(() => {
const now = Date.now();
const duration = (now - lastTime) / 1000;
if (duration > 0) {
speed = (downloadedLength - lastDownloadedLength) / duration;
lastDownloadedLength = downloadedLength;
lastTime = now;
onProgress({ downloaded: downloadedLength, total: totalSize, speed });
}
}, 1000);
try {
const promises = [];
for (let i = 0; i < threadCount; i++) {
const start = i * chunkSize;
const end = i === threadCount - 1 ? totalSize - 1 : (i + 1) * chunkSize - 1;
promises.push(this.downloadChunk(finalUrl, fd, start, end, (bytes) => {
downloadedLength += bytes;
}));
}
await Promise.all(promises);
console.log("[LlamaService] Multi-threaded download complete");
// Final progress update
onProgress({ downloaded: totalSize, total: totalSize, speed: 0 });
} catch (err) {
console.error("[LlamaService] Multi-threaded download failed:", err);
throw err;
} finally {
clearInterval(speedInterval);
fs.closeSync(fd);
}
}
private async probeUrl(url: string): Promise<{ totalSize: number, acceptRanges: boolean, finalUrl: string }> {
const protocol = url.startsWith('https') ? require('https') : require('http');
const options = {
method: 'GET',
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Referer': 'https://www.modelscope.cn/',
'Range': 'bytes=0-0'
}
};
return new Promise((resolve, reject) => {
const req = protocol.get(url, options, (res: any) => {
if ([301, 302, 307, 308].includes(res.statusCode)) {
const location = res.headers.location;
const nextUrl = new URL(location, url).href;
this.probeUrl(nextUrl).then(resolve).catch(reject);
return;
}
if (res.statusCode !== 206 && res.statusCode !== 200) {
reject(new Error(`Probe failed: HTTP ${res.statusCode}`));
return;
}
const contentRange = res.headers['content-range'];
let totalSize = 0;
if (contentRange) {
const parts = contentRange.split('/');
totalSize = parseInt(parts[parts.length - 1], 10);
} else {
totalSize = parseInt(res.headers['content-length'] || '0', 10);
}
const acceptRanges = res.headers['accept-ranges'] === 'bytes' || !!contentRange;
resolve({ totalSize, acceptRanges, finalUrl: url });
res.destroy();
});
req.on('error', reject);
});
}
private async downloadChunk(url: string, fd: number, start: number, end: number, onData: (bytes: number) => void): Promise<void> {
const protocol = url.startsWith('https') ? require('https') : require('http');
const options = {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Referer': 'https://www.modelscope.cn/',
'Range': `bytes=${start}-${end}`
}
};
return new Promise((resolve, reject) => {
const req = protocol.get(url, options, (res: any) => {
if (res.statusCode !== 206) {
reject(new Error(`Chunk download failed: HTTP ${res.statusCode}`));
return;
}
let currentOffset = start;
res.on('data', (chunk: Buffer) => {
try {
fs.writeSync(fd, chunk, 0, chunk.length, currentOffset);
currentOffset += chunk.length;
onData(chunk.length);
} catch (err) {
reject(err);
res.destroy();
}
});
res.on('end', () => resolve());
res.on('error', reject);
});
req.on('error', reject);
});
}
private async downloadSingleThread(url: string, savePath: string, onProgress: (payload: { downloaded: number; total: number; speed: number }) => void): Promise<void> {
return new Promise((resolve, reject) => {
const protocol = url.startsWith('https') ? require('https') : require('http');
const options = {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Referer': 'https://www.modelscope.cn/'
}
};
const request = protocol.get(url, options, (response: any) => {
if ([301, 302, 307, 308].includes(response.statusCode)) {
const location = response.headers.location;
const nextUrl = new URL(location, url).href;
this.downloadSingleThread(nextUrl, savePath, onProgress).then(resolve).catch(reject);
return;
}
if (response.statusCode !== 200) {
reject(new Error(`Fallback download failed: HTTP ${response.statusCode}`));
return;
}
const totalLength = parseInt(response.headers['content-length'] || '0', 10);
let downloadedLength = 0;
let lastDownloadedLength = 0;
let lastTime = Date.now();
let speed = 0;
const fileStream = fs.createWriteStream(savePath);
response.pipe(fileStream);
const speedInterval = setInterval(() => {
const now = Date.now();
const duration = (now - lastTime) / 1000;
if (duration > 0) {
speed = (downloadedLength - lastDownloadedLength) / duration;
lastDownloadedLength = downloadedLength;
lastTime = now;
onProgress({ downloaded: downloadedLength, total: totalLength, speed });
}
}, 1000);
response.on('data', (chunk: any) => {
downloadedLength += chunk.length;
});
fileStream.on('finish', () => {
clearInterval(speedInterval);
fileStream.close();
resolve();
});
fileStream.on('error', (err: any) => {
clearInterval(speedInterval);
fs.unlink(savePath, () => { });
reject(err);
});
});
request.on('error', reject);
});
}
public getModelsPath() {
return this.resolveModelDir();
}
}
export const llamaService = new LlamaService();

View File

@@ -1,5 +1,5 @@
import { app } from 'electron'
import { existsSync, mkdirSync, statSync, unlinkSync, createWriteStream } from 'fs'
import { existsSync, mkdirSync, statSync, unlinkSync, createWriteStream, openSync, writeSync, closeSync } from 'fs'
import { join } from 'path'
import * as https from 'https'
import * as http from 'http'
@@ -24,6 +24,7 @@ type DownloadProgress = {
downloadedBytes: number
totalBytes?: number
percent?: number
speed?: number
}
const SENSEVOICE_MODEL: ModelInfo = {
@@ -123,44 +124,44 @@ export class VoiceTranscribeService {
percent: 0
})
// 下载模型文件 (40%)
// 下载模型文件 (80% 权重)
console.info('[VoiceTranscribe] 开始下载模型文件...')
await this.downloadToFile(
MODEL_DOWNLOAD_URLS.model,
modelPath,
'model',
(downloaded, total) => {
const percent = total ? (downloaded / total) * 40 : undefined
(downloaded, total, speed) => {
const percent = total ? (downloaded / total) * 80 : 0
onProgress?.({
modelName: SENSEVOICE_MODEL.name,
downloadedBytes: downloaded,
totalBytes: SENSEVOICE_MODEL.sizeBytes,
percent
percent,
speed
})
}
)
// 下载 tokens 文件 (30%)
// 下载 tokens 文件 (20% 权重)
console.info('[VoiceTranscribe] 开始下载 tokens 文件...')
await this.downloadToFile(
MODEL_DOWNLOAD_URLS.tokens,
tokensPath,
'tokens',
(downloaded, total) => {
(downloaded, total, speed) => {
const modelSize = existsSync(modelPath) ? statSync(modelPath).size : 0
const percent = total ? 40 + (downloaded / total) * 30 : 40
const percent = total ? 80 + (downloaded / total) * 20 : 80
onProgress?.({
modelName: SENSEVOICE_MODEL.name,
downloadedBytes: modelSize + downloaded,
totalBytes: SENSEVOICE_MODEL.sizeBytes,
percent
percent,
speed
})
}
)
console.info('[VoiceTranscribe] 模型下载完成')
console.info('[VoiceTranscribe] 所有文件下载完成')
return { success: true, modelPath, tokensPath }
} catch (error) {
const modelPath = this.resolveModelPath(SENSEVOICE_MODEL.files.model)
@@ -180,7 +181,7 @@ export class VoiceTranscribeService {
}
/**
* 转写 WAV 音频数据 (后台 Worker Threads 版本)
* 转写 WAV 音频数据
*/
async transcribeWavBuffer(
wavData: Buffer,
@@ -197,18 +198,15 @@ export class VoiceTranscribeService {
return
}
// 获取配置的语言列表,如果没有传入则从配置读取
let supportedLanguages = languages
if (!supportedLanguages || supportedLanguages.length === 0) {
supportedLanguages = this.configService.get('transcribeLanguages')
// 如果配置中也没有或为空,使用默认值
if (!supportedLanguages || supportedLanguages.length === 0) {
supportedLanguages = ['zh', 'yue']
}
}
const { Worker } = require('worker_threads')
// main.js 和 transcribeWorker.js 同在 dist-electron 目录下
const workerPath = join(__dirname, 'transcribeWorker.js')
const worker = new Worker(workerPath, {
@@ -224,12 +222,10 @@ export class VoiceTranscribeService {
let finalTranscript = ''
worker.on('message', (msg: any) => {
if (msg.type === 'partial') {
onPartial?.(msg.text)
} else if (msg.type === 'final') {
finalTranscript = msg.text
resolve({ success: true, transcript: finalTranscript })
worker.terminate()
} else if (msg.type === 'error') {
@@ -239,15 +235,9 @@ export class VoiceTranscribeService {
}
})
worker.on('error', (err: Error) => {
resolve({ success: false, error: String(err) })
})
worker.on('error', (err: Error) => resolve({ success: false, error: String(err) }))
worker.on('exit', (code: number) => {
if (code !== 0) {
console.error(`[VoiceTranscribe] Worker stopped with exit code ${code}`)
resolve({ success: false, error: `Worker exited with code ${code}` })
}
if (code !== 0) resolve({ success: false, error: `Worker exited with code ${code}` })
})
} catch (error) {
@@ -257,121 +247,230 @@ export class VoiceTranscribeService {
}
/**
* 下载文件
* 下载文件 (支持多线程)
*/
private downloadToFile(
private async downloadToFile(
url: string,
targetPath: string,
fileName: string,
onProgress?: (downloaded: number, total?: number) => void,
remainingRedirects = 5
onProgress?: (downloaded: number, total?: number, speed?: number) => void
): Promise<void> {
return new Promise((resolve, reject) => {
const protocol = url.startsWith('https') ? https : http
console.info(`[VoiceTranscribe] 下载 ${fileName}:`, url)
if (existsSync(targetPath)) {
unlinkSync(targetPath)
}
const options = {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
},
timeout: 30000 // 30秒连接超时
console.info(`[VoiceTranscribe] 准备下载 ${fileName}: ${url}`)
// 1. 探测支持情况
let probeResult
try {
probeResult = await this.probeUrl(url)
} catch (err) {
console.warn(`[VoiceTranscribe] ${fileName} 探测失败,使用单线程`, err)
return this.downloadSingleThread(url, targetPath, fileName, onProgress)
}
const { totalSize, acceptRanges, finalUrl } = probeResult
// 如果文件太小 (< 2MB) 或者不支持 Range使用单线程
if (totalSize < 2 * 1024 * 1024 || !acceptRanges) {
return this.downloadSingleThread(finalUrl, targetPath, fileName, onProgress)
}
console.info(`[VoiceTranscribe] ${fileName} 开始多线程下载 (4 线程), 大小: ${(totalSize / 1024 / 1024).toFixed(2)} MB`)
const threadCount = 4
const chunkSize = Math.ceil(totalSize / threadCount)
const fd = openSync(targetPath, 'w')
let downloadedTotal = 0
let lastDownloaded = 0
let lastTime = Date.now()
let speed = 0
const speedInterval = setInterval(() => {
const now = Date.now()
const duration = (now - lastTime) / 1000
if (duration > 0) {
speed = (downloadedTotal - lastDownloaded) / duration
lastDownloaded = downloadedTotal
lastTime = now
onProgress?.(downloadedTotal, totalSize, speed)
}
}, 1000)
try {
const promises = []
for (let i = 0; i < threadCount; i++) {
const start = i * chunkSize
const end = i === threadCount - 1 ? totalSize - 1 : (i + 1) * chunkSize - 1
promises.push(this.downloadChunk(finalUrl, fd, start, end, (bytes) => {
downloadedTotal += bytes
}))
}
const request = protocol.get(url, options, (response) => {
console.info(`[VoiceTranscribe] ${fileName} 响应状态:`, response.statusCode)
await Promise.all(promises)
// Final progress update
onProgress?.(totalSize, totalSize, 0)
console.info(`[VoiceTranscribe] ${fileName} 多线程下载完成`)
} catch (err) {
console.error(`[VoiceTranscribe] ${fileName} 多线程下载失败:`, err)
throw err
} finally {
clearInterval(speedInterval)
closeSync(fd)
}
}
// 处理重定向
if ([301, 302, 303, 307, 308].includes(response.statusCode || 0) && response.headers.location) {
if (remainingRedirects <= 0) {
reject(new Error('重定向次数过多'))
private async probeUrl(url: string, remainingRedirects = 5): Promise<{ totalSize: number, acceptRanges: boolean, finalUrl: string }> {
return new Promise((resolve, reject) => {
const protocol = url.startsWith('https') ? https : http
const options = {
method: 'GET',
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Referer': 'https://modelscope.cn/',
'Range': 'bytes=0-0'
}
}
const req = protocol.get(url, options, (res) => {
if ([301, 302, 303, 307, 308].includes(res.statusCode || 0)) {
const location = res.headers.location
if (location && remainingRedirects > 0) {
const nextUrl = new URL(location, url).href
this.probeUrl(nextUrl, remainingRedirects - 1).then(resolve).catch(reject)
return
}
console.info(`[VoiceTranscribe] 重定向到:`, response.headers.location)
this.downloadToFile(response.headers.location, targetPath, fileName, onProgress, remainingRedirects - 1)
.then(resolve)
.catch(reject)
}
if (res.statusCode !== 206 && res.statusCode !== 200) {
reject(new Error(`Probe failed: HTTP ${res.statusCode}`))
return
}
const contentRange = res.headers['content-range']
let totalSize = 0
if (contentRange) {
const parts = contentRange.split('/')
totalSize = parseInt(parts[parts.length - 1], 10)
} else {
totalSize = parseInt(res.headers['content-length'] || '0', 10)
}
const acceptRanges = res.headers['accept-ranges'] === 'bytes' || !!contentRange
resolve({ totalSize, acceptRanges, finalUrl: url })
res.destroy()
})
req.on('error', reject)
})
}
private async downloadChunk(url: string, fd: number, start: number, end: number, onData: (bytes: number) => void): Promise<void> {
return new Promise((resolve, reject) => {
const protocol = url.startsWith('https') ? https : http
const options = {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Referer': 'https://modelscope.cn/',
'Range': `bytes=${start}-${end}`
}
}
const req = protocol.get(url, options, (res) => {
if (res.statusCode !== 206) {
reject(new Error(`Chunk download failed: HTTP ${res.statusCode}`))
return
}
let currentOffset = start
res.on('data', (chunk: Buffer) => {
try {
writeSync(fd, chunk, 0, chunk.length, currentOffset)
currentOffset += chunk.length
onData(chunk.length)
} catch (err) {
reject(err)
res.destroy()
}
})
res.on('end', () => resolve())
res.on('error', reject)
})
req.on('error', reject)
})
}
private async downloadSingleThread(url: string, targetPath: string, fileName: string, onProgress?: (downloaded: number, total?: number, speed?: number) => void, remainingRedirects = 5): Promise<void> {
return new Promise((resolve, reject) => {
const protocol = url.startsWith('https') ? https : http
const options = {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Referer': 'https://modelscope.cn/'
}
}
const request = protocol.get(url, options, (response) => {
if ([301, 302, 303, 307, 308].includes(response.statusCode || 0)) {
const location = response.headers.location
if (location && remainingRedirects > 0) {
const nextUrl = new URL(location, url).href
this.downloadSingleThread(nextUrl, targetPath, fileName, onProgress, remainingRedirects - 1).then(resolve).catch(reject)
return
}
}
if (response.statusCode !== 200) {
reject(new Error(`下载失败: HTTP ${response.statusCode}`))
reject(new Error(`Fallback download failed: HTTP ${response.statusCode}`))
return
}
const totalBytes = Number(response.headers['content-length'] || 0) || undefined
let downloadedBytes = 0
let lastDownloaded = 0
let lastTime = Date.now()
let speed = 0
console.info(`[VoiceTranscribe] ${fileName} 文件大小:`, totalBytes ? `${(totalBytes / 1024 / 1024).toFixed(2)} MB` : '未知')
const speedInterval = setInterval(() => {
const now = Date.now()
const duration = (now - lastTime) / 1000
if (duration > 0) {
speed = (downloadedBytes - lastDownloaded) / duration
lastDownloaded = downloadedBytes
lastTime = now
onProgress?.(downloadedBytes, totalBytes, speed)
}
}, 1000)
const writer = createWriteStream(targetPath)
// 设置数据接收超时60秒没有数据则超时
let lastDataTime = Date.now()
const dataTimeout = setInterval(() => {
if (Date.now() - lastDataTime > 60000) {
clearInterval(dataTimeout)
response.destroy()
writer.close()
reject(new Error('下载超时60秒内未收到数据'))
}
}, 5000)
response.on('data', (chunk) => {
lastDataTime = Date.now()
downloadedBytes += chunk.length
onProgress?.(downloadedBytes, totalBytes)
})
response.on('error', (error) => {
clearInterval(dataTimeout)
try { writer.close() } catch { }
console.error(`[VoiceTranscribe] ${fileName} 响应错误:`, error)
reject(error)
})
writer.on('error', (error) => {
clearInterval(dataTimeout)
try { writer.close() } catch { }
console.error(`[VoiceTranscribe] ${fileName} 写入错误:`, error)
reject(error)
})
writer.on('finish', () => {
clearInterval(dataTimeout)
clearInterval(speedInterval)
writer.close()
console.info(`[VoiceTranscribe] ${fileName} 下载完成:`, targetPath)
resolve()
})
writer.on('error', (err) => {
clearInterval(speedInterval)
reject(err)
})
response.pipe(writer)
})
request.on('timeout', () => {
request.destroy()
console.error(`[VoiceTranscribe] ${fileName} 连接超时`)
reject(new Error('连接超时'))
})
request.on('error', (error) => {
console.error(`[VoiceTranscribe] ${fileName} 请求错误:`, error)
reject(error)
})
request.on('error', reject)
})
}
/**
* 清理资源
*/
dispose() {
if (this.recognizer) {
try {
// sherpa-onnx 的 recognizer 可能需要手动释放
this.recognizer = null
} catch (error) {
}
this.recognizer = null
}
}
}
export const voiceTranscribeService = new VoiceTranscribeService()