mirror of
https://github.com/jeffusion/gitea-ai-assistant.git
synced 2026-03-27 10:05:50 +00:00
Add GLOBAL_PROMPT config field that appends user-defined instructions to every LLM system message across all 9 call sites (legacy engine, agent specialist, reflexion, critic, and debate orchestrator). Configured via admin dashboard (auto-rendered from CONFIG_FIELDS metadata) or GLOBAL_PROMPT env var. Example use: "请始终使用中文回复". Changes: - Add GLOBAL_PROMPT to Zod schema, AppConfig interface, and buildConfig - Add CONFIG_FIELDS metadata (group: openai, type: text) - Add getEffectiveValue switch case - Add withGlobalPrompt() helper in src/utils/global-prompt.ts - Inject into all LLM call sites via withGlobalPrompt wrapper Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)
13 lines
468 B
TypeScript
13 lines
468 B
TypeScript
/**
|
|
* Helper to inject the global prompt into LLM system messages.
|
|
*
|
|
* If globalPrompt is non-empty, it is appended to the original system content
|
|
* separated by a blank line. Otherwise the original content is returned as-is.
|
|
*/
|
|
export function withGlobalPrompt(systemContent: string, globalPrompt: string | undefined): string {
|
|
if (!globalPrompt || globalPrompt.trim() === '') {
|
|
return systemContent;
|
|
}
|
|
return `${systemContent}\n\n${globalPrompt}`;
|
|
}
|