修复问 AI 时间线与表格渲染

This commit is contained in:
ILoveBingLu
2026-04-30 01:56:07 +08:00
parent 4cfae93f2e
commit 66dbfc506b
14 changed files with 728 additions and 45 deletions

View File

@@ -7,7 +7,7 @@
**一款现代化的微信聊天记录查看与分析工具**
[![License](https://img.shields.io/badge/license-CC--BY--NC--SA--4.0-blue.svg)](LICENSE)
[![Version](https://img.shields.io/badge/version-5.0.1-green.svg)](package.json)
[![Version](https://img.shields.io/badge/version-5.0.2-green.svg)](package.json)
[![Platform](https://img.shields.io/badge/platform-Windows-0078D6.svg?logo=windows)]()
[![Electron](https://img.shields.io/badge/Electron-39-47848F.svg?logo=electron)]()
[![React](https://img.shields.io/badge/React-19-61DAFB.svg?logo=react)]()

View File

@@ -13,12 +13,25 @@ type SessionQAProgressEvent = {
[key: string]: unknown
}
type SessionQATimelineItem = {
type: 'text' | 'progress'
id: string
order: number
createdAt: number
requestId?: string
channel?: 'answer' | 'think'
content?: string
event?: SessionQAProgressEvent
[key: string]: unknown
}
type SessionQAJobEvent = {
requestId: string
seq: number
kind: 'progress' | 'chunk' | 'final' | 'error' | 'cancelled'
createdAt: number
progress?: SessionQAProgressEvent
timelineItems?: SessionQATimelineItem[]
chunk?: string
result?: unknown
error?: string

View File

@@ -757,6 +757,14 @@ export async function answerSessionQuestionWithAgent(
let route = heuristicRoute
let aiIntentResult: AIIntentRouterResult | null = null
emitProgress(options, {
id: 'intent-route',
stage: 'intent',
status: 'running',
title: '识别问题意图',
detail: '正在判断是否需要读取聊天记录'
})
if (!(heuristicRoute.intent === 'direct_answer' && !heuristicRoute.needsSearch)) {
try {
aiIntentResult = await refineRouteWithAIIntent({
@@ -777,9 +785,33 @@ export async function answerSessionQuestionWithAgent(
route = enforceConcreteEvidenceRoute(route, options.question)
}
const contactMap = route.intent === 'direct_answer' && !route.needsSearch
? new Map<string, string>()
: await loadSessionContactMap(options.sessionId)
emitProgress(options, {
id: 'intent-route',
stage: 'intent',
status: 'completed',
title: '识别问题意图',
detail: `${getRouteLabel(route.intent)}${route.needsSearch ? '需要读取聊天记录' : '无需读取聊天记录'}`
})
let contactMap = new Map<string, string>()
if (!(route.intent === 'direct_answer' && !route.needsSearch)) {
emitProgress(options, {
id: 'load-contact-map',
stage: 'context',
status: 'running',
title: '加载联系人信息',
detail: '正在加载当前会话的联系人映射'
})
contactMap = await loadSessionContactMap(options.sessionId)
emitProgress(options, {
id: 'load-contact-map',
stage: 'context',
status: 'completed',
title: '加载联系人信息',
detail: `已加载 ${contactMap.size} 个联系人映射`,
count: contactMap.size
})
}
if (options.sessionName && !contactMap.has(options.sessionId)) {
contactMap.set(options.sessionId, options.sessionName)
}
@@ -830,12 +862,20 @@ export async function answerSessionQuestionWithAgent(
let nativeResponse
let streamedDecisionText = ''
const decisionProgressId = `decision-${ctx.decisionAttempts}`
emitProgress(options, {
id: decisionProgressId,
stage: 'thought',
status: 'running',
title: '等待模型决策',
detail: `${ctx.decisionAttempts} 轮,正在判断下一步工具或回答`
})
try {
const toolOptions: ChatWithToolsOptions = {
model: options.model,
temperature: 0.2,
maxTokens: agentDecisionMaxTokens,
enableThinking: false,
enableThinking: options.enableThinking !== false,
tools: nativeTools,
toolChoice: 'auto'
}
@@ -847,7 +887,25 @@ export async function answerSessionQuestionWithAgent(
} else {
nativeResponse = await options.provider.chatWithTools(toolLoopMessages, toolOptions)
}
const toolCallCount = Array.isArray((nativeResponse.message as any)?.tool_calls)
? (nativeResponse.message as any).tool_calls.length
: 0
emitProgress(options, {
id: decisionProgressId,
stage: 'thought',
status: 'completed',
title: '等待模型决策',
detail: toolCallCount > 0 ? `模型请求调用 ${toolCallCount} 个工具` : '模型未请求工具调用',
count: toolCallCount
})
} catch (error) {
emitProgress(options, {
id: decisionProgressId,
stage: 'thought',
status: 'failed',
title: '等待模型决策',
detail: compactText(String(error), 120)
})
if ((error instanceof Error && error.message === NATIVE_TOOL_CALLING_UNSUPPORTED_MESSAGE) || isNativeToolCallingUnsupportedError(error)) {
throw new Error(NATIVE_TOOL_CALLING_UNSUPPORTED_MESSAGE)
}

View File

@@ -15,6 +15,7 @@ import type {
SessionQAMessageRecord,
SessionQAProgressEvent,
SessionQAResult,
SessionQATimelineItem,
SessionQAToolCall
} from '../../../src/types/ai'
@@ -428,6 +429,7 @@ export class AIDatabase {
evidence_refs_json TEXT,
tool_calls_json TEXT,
progress_events_json TEXT,
timeline_events_json TEXT,
tokens_used INTEGER,
cost REAL,
provider TEXT,
@@ -464,6 +466,12 @@ export class AIDatabase {
} catch (e) {
// 忽略错误,列已存在
}
try {
this.db.exec("ALTER TABLE qa_messages ADD COLUMN timeline_events_json TEXT")
} catch (e) {
// 忽略错误,列已存在
}
}
/**
@@ -1153,6 +1161,7 @@ export class AIDatabase {
evidenceRefs?: SummaryEvidenceRef[]
toolCalls?: SessionQAToolCall[]
progressEvents?: SessionQAProgressEvent[]
timelineEvents?: SessionQATimelineItem[]
tokensUsed?: number
cost?: number
provider?: string
@@ -1166,8 +1175,8 @@ export class AIDatabase {
INSERT INTO qa_messages (
conversation_id, role, content, think_content, error,
result_json, evidence_refs_json, tool_calls_json,
progress_events_json, tokens_used, cost, provider, model, request_id, created_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
progress_events_json, timeline_events_json, tokens_used, cost, provider, model, request_id, created_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(
input.conversationId,
input.role,
@@ -1178,6 +1187,7 @@ export class AIDatabase {
input.evidenceRefs ? JSON.stringify(input.evidenceRefs) : null,
input.toolCalls ? JSON.stringify(input.toolCalls) : null,
input.progressEvents ? JSON.stringify(input.progressEvents) : null,
input.timelineEvents ? JSON.stringify(input.timelineEvents) : null,
input.tokensUsed ?? null,
input.cost ?? null,
input.provider || null,
@@ -1269,6 +1279,7 @@ export class AIDatabase {
const evidenceRefs = this.parseJson<SummaryEvidenceRef[]>(row.evidence_refs_json)
const toolCalls = this.parseJson<SessionQAToolCall[]>(row.tool_calls_json)
const progressEvents = this.parseJson<SessionQAProgressEvent[]>(row.progress_events_json)
const timelineEvents = this.parseJson<SessionQATimelineItem[]>(row.timeline_events_json)
return {
id: row.id,
@@ -1281,6 +1292,7 @@ export class AIDatabase {
evidenceRefs: evidenceRefs || result?.evidenceRefs,
toolCalls: toolCalls || result?.toolCalls,
progressEvents: progressEvents || undefined,
timelineEvents: timelineEvents || undefined,
tokensUsed: row.tokens_used ?? undefined,
cost: row.cost ?? undefined,
provider: row.provider || undefined,

View File

@@ -49,7 +49,8 @@ import type { SessionProfileMemoryState } from '../memory/memoryProfileService'
import type { MemoryEvidenceRef, MemoryItem, MemoryItemInput, MemorySourceType } from '../memory/memorySchema'
import type {
SessionQAConversationDetail,
SessionQAConversationSummary
SessionQAConversationSummary,
SessionQATimelineItem
} from '../../../src/types/ai'
/**
@@ -859,6 +860,7 @@ ${detailInstructions[detail as keyof typeof detailInstructions] || detailInstruc
evidenceRefs?: any[]
toolCalls?: SessionQAToolCall[]
progressEvents?: SessionQAProgressEvent[]
timelineEvents?: SessionQATimelineItem[]
tokensUsed?: number
cost?: number
provider?: string

View File

@@ -69,7 +69,7 @@ export interface ChatWithToolsOptions extends ChatOptions {
}
export interface NativeToolCallResult {
message: OpenAI.Chat.ChatCompletionMessage
message: OpenAI.Chat.ChatCompletionMessage & { reasoning_content?: string | null }
finishReason?: string | null
}
@@ -166,6 +166,14 @@ export abstract class BaseAIProvider implements AIProvider {
return displayName
}
protected getChatRequestExtraParams(_options?: ChatOptions): Record<string, unknown> {
return {}
}
protected getToolRequestExtraParams(_options: ChatWithToolsOptions): Record<string, unknown> {
return {}
}
async chat(messages: OpenAI.Chat.ChatCompletionMessageParam[], options?: ChatOptions): Promise<string> {
const client = await this.getClient()
const model = this.resolveModelId(options?.model || this.models[0])
@@ -175,7 +183,8 @@ export abstract class BaseAIProvider implements AIProvider {
messages: messages,
temperature: options?.temperature || 0.7,
max_tokens: options?.maxTokens,
stream: false
stream: false,
...this.getChatRequestExtraParams(options)
})
return response.choices[0]?.message?.content || ''
@@ -195,7 +204,8 @@ export abstract class BaseAIProvider implements AIProvider {
max_tokens: options?.maxTokens,
stream: false,
tools: options.tools,
tool_choice: options.toolChoice ?? 'auto'
tool_choice: options.toolChoice ?? 'auto',
...this.getToolRequestExtraParams(options)
}
if (typeof options.parallelToolCalls === 'boolean') {
@@ -228,7 +238,8 @@ export abstract class BaseAIProvider implements AIProvider {
max_tokens: options?.maxTokens,
stream: true,
tools: options.tools,
tool_choice: options.toolChoice ?? 'auto'
tool_choice: options.toolChoice ?? 'auto',
...this.getToolRequestExtraParams(options)
}
if (typeof options.parallelToolCalls === 'boolean') {
@@ -239,6 +250,8 @@ export abstract class BaseAIProvider implements AIProvider {
const stream = await client.chat.completions.create(requestParams) as any
let role: 'assistant' = 'assistant'
let content = ''
let reasoningContent = ''
let isThinking = false
let finishReason: string | null = null
const toolCallByIndex = new Map<number, {
id: string
@@ -254,7 +267,23 @@ export abstract class BaseAIProvider implements AIProvider {
const delta = choice.delta || {}
if (delta.role === 'assistant') role = 'assistant'
const reasoning = typeof delta.reasoning_content === 'string'
? delta.reasoning_content
: ''
if (reasoning) {
if (!isThinking) {
onChunk('<think>')
isThinking = true
}
reasoningContent += reasoning
onChunk(reasoning)
}
if (typeof delta.content === 'string' && delta.content) {
if (isThinking) {
onChunk('</think>')
isThinking = false
}
content += delta.content
onChunk(delta.content)
}
@@ -296,10 +325,17 @@ export abstract class BaseAIProvider implements AIProvider {
role,
content: content || null
}
if (reasoningContent) {
message.reasoning_content = reasoningContent
}
if (toolCalls.length > 0) {
message.tool_calls = toolCalls
}
if (isThinking) {
onChunk('</think>')
}
return { message, finishReason }
} catch (error) {
throw normalizeNativeToolCallingError(error)
@@ -321,7 +357,8 @@ export abstract class BaseAIProvider implements AIProvider {
messages: messages,
temperature: options?.temperature || 0.7,
max_tokens: options?.maxTokens,
stream: true
stream: true,
...this.getChatRequestExtraParams(options)
}
// 自适应添加思考模式参数(尝试所有已知的参数格式)

View File

@@ -51,6 +51,18 @@ export class DeepSeekProvider extends BaseAIProvider {
return this.getModelId(displayName)
}
protected getToolRequestExtraParams(options: ChatWithToolsOptions): Record<string, unknown> {
const enableThinking = options.enableThinking !== false
return enableThinking
? {
thinking: { type: 'enabled' },
reasoning_effort: 'high'
}
: {
thinking: { type: 'disabled' }
}
}
private buildRequestParams(
messages: OpenAI.Chat.ChatCompletionMessageParam[],
options: ChatOptions | undefined,
@@ -94,7 +106,7 @@ export class DeepSeekProvider extends BaseAIProvider {
): Promise<NativeToolCallResult> {
const client = await this.getClient()
const requestParams: any = {
...this.buildRequestParams(messages, { ...options, enableThinking: false }, false),
...this.buildRequestParams(messages, options, false),
tools: options.tools,
tool_choice: options.toolChoice ?? 'auto'
}

View File

@@ -1,4 +1,4 @@
import { BaseAIProvider } from './base'
import { BaseAIProvider, type ChatOptions, type ChatWithToolsOptions } from './base'
/**
* 通义千问提供商元数据
@@ -76,6 +76,18 @@ export class QwenProvider extends BaseAIProvider {
return this.getModelId(displayName)
}
protected getChatRequestExtraParams(options?: ChatOptions): Record<string, unknown> {
return {
enable_thinking: options?.enableThinking !== false
}
}
protected getToolRequestExtraParams(options: ChatWithToolsOptions): Record<string, unknown> {
return {
enable_thinking: options.enableThinking !== false
}
}
/**
* 重写 chat 方法以使用映射后的模型ID
*/

View File

@@ -6,7 +6,8 @@ import type {
SessionQAJobEvent,
SessionQAProgressEvent,
SessionQACancelResult,
SessionQAStartResult
SessionQAStartResult,
SessionQATimelineItem
} from '../../../src/types/ai'
import type { SessionQAOptions } from './aiService'
import { dataManagementService } from '../dataManagementService'
@@ -23,6 +24,8 @@ type SessionQAJob = {
assistantThinkContent: string
assistantIsThinking: boolean
progressEvents: SessionQAProgressEvent[]
timelineEvents: SessionQATimelineItem[]
timelineItemSeq: number
options: Omit<SessionQAStartOptions, 'requestId'>
}
@@ -47,6 +50,14 @@ function upsertProgressEvent(
)
}
function sortTimelineEvents(items: SessionQATimelineItem[]): SessionQATimelineItem[] {
return [...items].sort((a, b) =>
a.order - b.order
|| a.createdAt - b.createdAt
|| a.id.localeCompare(b.id)
)
}
class SessionQAJobService {
private jobs = new Map<string, SessionQAJob>()
private vectorWarmupJobs = new Map<string, Worker>()
@@ -96,6 +107,8 @@ class SessionQAJobService {
assistantThinkContent: '',
assistantIsThinking: false,
progressEvents: [],
timelineEvents: [],
timelineItemSeq: 0,
options: workerOptions
}
this.jobs.set(requestId, job)
@@ -128,6 +141,21 @@ class SessionQAJobService {
})
this.notifyConversationUpdated(job)
this.forwardEvent(requestId, {
kind: 'progress',
progress: {
id: 'job-start',
stage: 'intent',
status: 'completed',
title: '启动问答任务',
displayName: '启动问答任务',
nodeName: '启动问答任务',
detail: '任务已创建,正在进入问答流程',
source: 'model',
requestId,
createdAt: Date.now()
}
})
return { success: true, requestId, conversationId: conversation.conversationId }
}
@@ -138,6 +166,21 @@ class SessionQAJobService {
return { success: false, requestId, error: '问答任务不存在或已结束' }
}
const createdAt = Date.now()
const progress: SessionQAProgressEvent = {
id: 'job-cancelled',
stage: 'answer',
status: 'failed',
title: '已取消回答',
displayName: '已取消回答',
nodeName: '已取消回答',
detail: '用户已取消本次问答',
source: 'model',
requestId,
createdAt
}
job.progressEvents = upsertProgressEvent(job.progressEvents, progress)
const timelineItem = this.upsertTimelineProgress(job, progress, ++job.seq, createdAt)
this.jobs.delete(requestId)
await job.worker.terminate()
this.persistAssistantMessage(job, {
@@ -148,7 +191,9 @@ class SessionQAJobService {
requestId,
seq: ++job.seq,
kind: 'cancelled',
createdAt: Date.now()
createdAt,
progress,
timelineItems: [timelineItem]
})
this.notifyConversationUpdated(job)
return { success: true, requestId }
@@ -158,20 +203,52 @@ class SessionQAJobService {
const job = this.jobs.get(requestId)
if (!job) return
const nextSeq = ++job.seq
const nextCreatedAt = typeof event.createdAt === 'number' ? event.createdAt : Date.now()
const kind = event.kind || 'error'
const timelineItems: SessionQATimelineItem[] = []
let nextProgress = event.progress
if (event.kind === 'chunk' && event.chunk) {
this.appendAssistantChunk(job, event.chunk)
timelineItems.push(...this.appendAssistantChunk(job, event.chunk, nextSeq, nextCreatedAt))
}
if (event.kind === 'progress' && event.progress) {
job.progressEvents = upsertProgressEvent(job.progressEvents, event.progress)
const progress = {
...event.progress,
requestId: event.progress.requestId || requestId,
createdAt: event.progress.createdAt || nextCreatedAt
}
nextProgress = progress
job.progressEvents = upsertProgressEvent(job.progressEvents, progress)
timelineItems.push(this.upsertTimelineProgress(job, progress, nextSeq, nextCreatedAt))
}
if (event.kind === 'error' && !event.progress) {
const progress: SessionQAProgressEvent = {
id: 'job-error',
stage: 'answer',
status: 'failed',
title: '问答失败',
displayName: '问答失败',
nodeName: '问答失败',
detail: event.error || '问答失败',
source: 'model',
requestId,
createdAt: nextCreatedAt
}
nextProgress = progress
job.progressEvents = upsertProgressEvent(job.progressEvents, progress)
timelineItems.push(this.upsertTimelineProgress(job, progress, nextSeq, nextCreatedAt))
}
const nextEvent: SessionQAJobEvent = {
requestId,
seq: typeof event.seq === 'number' ? event.seq : ++job.seq,
kind: event.kind || 'error',
createdAt: typeof event.createdAt === 'number' ? event.createdAt : Date.now(),
progress: event.progress,
seq: nextSeq,
kind,
createdAt: nextCreatedAt,
progress: nextProgress,
timelineItems: timelineItems.length ? timelineItems : event.timelineItems,
chunk: event.chunk,
result: event.result,
error: event.error
@@ -215,16 +292,52 @@ class SessionQAJobService {
}
}
private appendAssistantChunk(job: SessionQAJob, chunk: string) {
private appendAssistantChunk(
job: SessionQAJob,
chunk: string,
order: number,
createdAt: number
): SessionQATimelineItem[] {
let remaining = chunk
const changed = new Map<string, SessionQATimelineItem>()
const appendText = (channel: 'answer' | 'think', content: string) => {
if (!content) return
if (channel === 'think') {
job.assistantThinkContent += content
} else {
job.assistantContent += content
}
const lastItem = job.timelineEvents[job.timelineEvents.length - 1]
if (lastItem?.type === 'text' && lastItem.channel === channel) {
lastItem.content += content
changed.set(lastItem.id, lastItem)
return
}
const item: SessionQATimelineItem = {
type: 'text',
id: `text:${++job.timelineItemSeq}`,
order: order + (job.timelineItemSeq / 1_000_000),
createdAt,
requestId: job.requestId,
channel,
content
}
job.timelineEvents.push(item)
changed.set(item.id, item)
}
while (remaining.length > 0) {
if (job.assistantIsThinking) {
const closeIndex = remaining.indexOf('</think>')
if (closeIndex < 0) {
job.assistantThinkContent += remaining
appendText('think', remaining)
break
}
job.assistantThinkContent += remaining.slice(0, closeIndex)
appendText('think', remaining.slice(0, closeIndex))
job.assistantIsThinking = false
remaining = remaining.slice(closeIndex + '</think>'.length)
continue
@@ -232,13 +345,59 @@ class SessionQAJobService {
const openIndex = remaining.indexOf('<think>')
if (openIndex < 0) {
job.assistantContent += remaining
appendText('answer', remaining)
break
}
job.assistantContent += remaining.slice(0, openIndex)
appendText('answer', remaining.slice(0, openIndex))
job.assistantIsThinking = true
remaining = remaining.slice(openIndex + '<think>'.length)
}
return Array.from(changed.values())
}
private upsertTimelineProgress(
job: SessionQAJob,
progress: SessionQAProgressEvent,
order: number,
createdAt: number
): SessionQATimelineItem {
const id = `progress:${progress.id}`
const index = job.timelineEvents.findIndex((item) => item.id === id)
if (index >= 0) {
const existing = job.timelineEvents[index] as SessionQATimelineItem
const item: SessionQATimelineItem = existing.type === 'progress'
? {
...existing,
event: {
...progress,
createdAt: existing.event.createdAt || progress.createdAt || createdAt
}
}
: existing
job.timelineEvents[index] = item
return item
}
const lastTimelineItem = job.timelineEvents[job.timelineEvents.length - 1]
const shouldAttachToolToThink = progress.stage === 'tool'
&& (
job.assistantIsThinking
|| (lastTimelineItem?.type === 'text' && lastTimelineItem.channel === 'think')
)
const item: SessionQATimelineItem = {
type: 'progress',
id,
order,
createdAt,
requestId: job.requestId,
channel: shouldAttachToolToThink ? 'think' : 'answer',
event: progress
}
job.timelineEvents.push(item)
job.timelineEvents = sortTimelineEvents(job.timelineEvents)
return item
}
private persistAssistantMessage(job: SessionQAJob, event: Partial<SessionQAJobEvent>) {
@@ -260,6 +419,7 @@ class SessionQAJobService {
evidenceRefs: result?.evidenceRefs,
toolCalls: result?.toolCalls,
progressEvents: job.progressEvents,
timelineEvents: job.timelineEvents,
tokensUsed: result?.tokensUsed,
cost: result?.cost,
provider: result?.provider || job.options.provider,

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "ciphertalk",
"version": "5.0.1",
"version": "5.0.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "ciphertalk",
"version": "5.0.1",
"version": "5.0.2",
"hasInstallScript": true,
"license": "CC-BY-NC-SA-4.0",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "ciphertalk",
"version": "5.0.1",
"version": "5.0.2",
"description": "密语 - 微信聊天记录查看工具",
"author": "ILoveBingLu",
"license": "CC-BY-NC-SA-4.0",

View File

@@ -1101,6 +1101,28 @@
}
}
.qa-timeline-think-panel {
max-width: min(100%, 680px);
.think-content {
display: flex;
flex-direction: column;
gap: 8px;
}
.qa-think-text {
color: var(--text-secondary);
p:last-child {
margin-bottom: 0;
}
}
.qa-progress-timeline {
width: 100%;
}
}
.qa-evidence-card-preview {
min-width: 0;
margin: 0;
@@ -2707,6 +2729,56 @@
margin: 0 0 16px 0;
}
table {
display: inline-table;
width: auto;
max-width: 100%;
margin: 12px 0 18px;
border-collapse: separate;
border-spacing: 0;
color: var(--text-primary);
border: 1px solid color-mix(in srgb, var(--border-color) 82%, transparent);
border-radius: 8px;
overflow: hidden;
}
thead {
background: color-mix(in srgb, var(--bg-secondary) 72%, transparent);
}
th,
td {
padding: 8px 10px;
border-right: 1px solid color-mix(in srgb, var(--border-color) 82%, transparent);
border-bottom: 1px solid color-mix(in srgb, var(--border-color) 82%, transparent);
vertical-align: top;
text-align: left;
word-break: break-word;
}
th:last-child,
td:last-child {
border-right: 0;
}
tbody tr:last-child td {
border-bottom: 0;
}
th {
color: var(--text-primary);
font-weight: 700;
white-space: nowrap;
}
td {
color: var(--text-secondary);
}
tbody tr:nth-child(even) {
background: color-mix(in srgb, var(--bg-secondary) 36%, transparent);
}
code {
padding: 0.2em 0.4em;
margin: 0;

View File

@@ -42,6 +42,7 @@ import {
type SessionQAMessageRecord,
type SessionQAProgressEvent,
type SessionQAResult,
type SessionQATimelineItem,
type SessionProfileMemoryState,
type SummaryEvidenceRef,
type SummaryResult,
@@ -73,6 +74,7 @@ interface QAMessage {
error?: string
result?: SessionQAResult
progressEvents?: SessionQAProgressEvent[]
timelineEvents?: SessionQATimelineItem[]
requestId?: string
thinkContent?: string
isThinking?: boolean
@@ -144,6 +146,11 @@ function stripSummaryContent(content: string) {
return splitSummaryContent(content).mainContent
}
function normalizeMarkdownTables(text: string) {
return text
.replace(/([:])\n(\|[^\n]+\|\n\|[\s:|-]+\|)/g, '$1\n\n$2')
}
function formatConfidence(value?: number) {
if (typeof value !== 'number' || Number.isNaN(value)) {
return ''
@@ -599,6 +606,58 @@ function upsertQAProgressEvent(
)
}
function upsertQATimelineItems(
events: SessionQATimelineItem[] = [],
incoming: SessionQATimelineItem[] = []
) {
if (incoming.length === 0) return events
const byId = new Map(events.map((item) => [item.id, item]))
incoming.forEach((item) => {
const existing = byId.get(item.id)
byId.set(item.id, existing
? { ...item, order: existing.order, createdAt: existing.createdAt || item.createdAt }
: item)
})
return Array.from(byId.values()).sort((a, b) =>
a.order - b.order
|| a.createdAt - b.createdAt
|| a.id.localeCompare(b.id)
)
}
function createLocalQATimelineProgress(requestId: string, createdAt: number): {
progress: SessionQAProgressEvent
timelineItem: SessionQATimelineItem
} {
const progress: SessionQAProgressEvent = {
id: 'job-start',
stage: 'intent',
status: 'running',
title: '准备问答任务',
displayName: '准备问答任务',
nodeName: '准备问答任务',
detail: '正在提交问题并创建问答任务',
source: 'model',
requestId,
createdAt
}
return {
progress,
timelineItem: {
type: 'progress',
id: `progress:${progress.id}`,
order: 0,
createdAt,
requestId,
channel: 'answer',
event: progress
}
}
}
function appendQAChunkToMessage(message: QAMessage, chunk: string): QAMessage {
let remaining = chunk
let next: QAMessage = { ...message }
@@ -668,6 +727,7 @@ function mapStoredQAMessage(record: SessionQAMessageRecord): QAMessage {
isThinking: false,
showThink: false,
progressEvents: record.progressEvents,
timelineEvents: record.timelineEvents,
requestId: record.requestId
}
}
@@ -700,6 +760,7 @@ function AISummaryWindow() {
const [isAsking, setIsAsking] = useState(false)
const [activeQARequestId, setActiveQARequestId] = useState<string | null>(null)
const [expandedQAProgressIds, setExpandedQAProgressIds] = useState<Set<string>>(() => new Set())
const [collapsedQAThinkIds, setCollapsedQAThinkIds] = useState<Set<string>>(() => new Set())
const [expandedQAEvidenceIds, setExpandedQAEvidenceIds] = useState<Set<string>>(() => new Set())
const [qaError, setQaError] = useState('')
const [profileMemoryState, setProfileMemoryState] = useState<SessionProfileMemoryState | null>(null)
@@ -759,7 +820,7 @@ function AISummaryWindow() {
}))
const renderMarkdown = (text: string) => {
const html = marked.parse(text) as string
const html = marked.parse(normalizeMarkdownTables(text)) as string
return { __html: DOMPurify.sanitize(html) }
}
@@ -1073,12 +1134,20 @@ function AISummaryWindow() {
)
}
const toggleQAThinkPanel = (messageId: string) => {
setQaMessages(prev => prev.map(message => (
message.id === messageId
? { ...message, showThink: !message.showThink }
: message
)))
const toggleQAThinkPanel = (panelId: string) => {
setCollapsedQAThinkIds(prev => {
const next = new Set(prev)
if (next.has(panelId)) {
next.delete(panelId)
} else {
next.add(panelId)
}
return next
})
}
const isQAThinkPanelExpanded = (panelId: string) => {
return !collapsedQAThinkIds.has(panelId)
}
const toggleQAProgressEvent = (eventId: string) => {
@@ -1190,11 +1259,12 @@ function AISummaryWindow() {
return null
}
const expanded = message.showThink !== false
const panelId = `${message.id}:legacy-think`
const expanded = isQAThinkPanelExpanded(panelId)
return (
<div className={`think-panel qa-think-panel ${!expanded ? 'collapsed' : ''} ${message.isThinking ? 'thinking' : ''}`}>
<div className="think-header" onClick={() => toggleQAThinkPanel(message.id)}>
<div className="think-header" onClick={() => toggleQAThinkPanel(panelId)}>
<div className="think-title">
{message.isThinking ? (
<Loader2 size={14} className="think-icon animate-spin" />
@@ -1217,15 +1287,167 @@ function AISummaryWindow() {
}
const renderQATimeline = (message: QAMessage) => {
type QATimelineItem =
type LegacyQATimelineItem =
| { type: 'progress'; id: string; events: SessionQAProgressEvent[] }
| { type: 'thought'; event: SessionQAProgressEvent }
| { type: 'answer'; content: string }
type TimelineRenderItem =
| { type: 'progress'; id: string; events: SessionQAProgressEvent[] }
| { type: 'text'; item: SessionQATimelineItem & { type: 'text' } }
| { type: 'think'; id: string; items: SessionQATimelineItem[] }
const renderAnswerTextItem = (item: SessionQATimelineItem & { type: 'text' }) => {
if (!item.content) return null
return (
<div key={item.id} className="qa-bubble">
<div
className="qa-answer markdown-body"
dangerouslySetInnerHTML={renderMarkdown(item.content)}
/>
</div>
)
}
const renderTimelineThinkGroup = (group: Extract<TimelineRenderItem, { type: 'think' }>) => {
const expanded = isQAThinkPanelExpanded(group.id)
const lastTimelineItem = message.timelineEvents?.[message.timelineEvents.length - 1]
const isThinkingGroup = message.isThinking && group.items.some((item) => item.id === lastTimelineItem?.id)
return (
<div
key={group.id}
className={`think-panel qa-think-panel qa-timeline-think-panel ${!expanded ? 'collapsed' : ''} ${isThinkingGroup ? 'thinking' : ''}`}
>
<div className="think-header" onClick={() => toggleQAThinkPanel(group.id)}>
<div className="think-title">
{isThinkingGroup ? (
<Loader2 size={14} className="think-icon animate-spin" />
) : (
<Atom size={14} className="think-icon" />
)}
<span>{isThinkingGroup ? '深度思考中...' : '深度思考'}</span>
</div>
<ChevronDown
size={16}
className={`toggle-icon ${expanded ? 'expanded' : ''}`}
/>
</div>
<div className="think-content">
{group.items.map((item) => {
if (item.type === 'progress') {
return renderQAProgressTimeline(`${group.id}-${item.id}`, [item.event])
}
return (
<div
key={item.id}
className="qa-think-text markdown-body"
dangerouslySetInnerHTML={renderMarkdown(item.content)}
/>
)
})}
</div>
</div>
)
}
const timelineEvents = [...(message.timelineEvents || [])]
.sort((a, b) =>
a.order - b.order
|| a.createdAt - b.createdAt
|| a.id.localeCompare(b.id)
)
if (timelineEvents.length > 0) {
const renderItems: TimelineRenderItem[] = []
let pendingProgressEvents: SessionQAProgressEvent[] = []
let pendingThinkItems: SessionQATimelineItem[] = []
const flushProgressEvents = () => {
if (pendingProgressEvents.length === 0) return
const firstEventId = pendingProgressEvents[0]?.id || String(renderItems.length)
renderItems.push({
type: 'progress',
id: `${message.id}-progress-${firstEventId}-${renderItems.length}`,
events: pendingProgressEvents
})
pendingProgressEvents = []
}
const flushThinkItems = () => {
if (pendingThinkItems.length === 0) return
renderItems.push({
type: 'think',
id: `${message.id}-think-${pendingThinkItems[0]?.id || renderItems.length}`,
items: pendingThinkItems
})
pendingThinkItems = []
}
timelineEvents.forEach((item) => {
if (item.channel === 'think') {
flushProgressEvents()
pendingThinkItems.push(item)
return
}
flushThinkItems()
if (item.type === 'progress') {
pendingProgressEvents.push(item.event)
return
}
flushProgressEvents()
renderItems.push({ type: 'text', item })
})
flushThinkItems()
flushProgressEvents()
const hasEvidence = (message.result?.evidenceRefs?.length || 0) > 0
const hasRenderableItems = renderItems.length > 0 || message.error || hasEvidence
if (!hasRenderableItems && message.isStreaming) {
return (
<div className="qa-streaming-placeholder">
<Loader2 size={14} className="spinner" />
</div>
)
}
if (!hasRenderableItems) return null
return (
<div className="qa-timeline">
{renderItems.map((item) => {
if (item.type === 'progress') {
return renderQAProgressTimeline(item.id, item.events)
}
if (item.type === 'think') {
return renderTimelineThinkGroup(item)
}
return renderAnswerTextItem(item.item)
})}
{message.error && (
<div className="qa-bubble">
<div className="qa-error">{message.error}</div>
</div>
)}
{hasEvidence && (
<div className="qa-bubble qa-evidence-bubble">
{renderQAEvidenceCards(message.id, message.result?.evidenceRefs)}
</div>
)}
</div>
)
}
const progressItems = [...(message.progressEvents || [])]
.sort((a, b) => (a.createdAt || 0) - (b.createdAt || 0))
const timelineItems: QATimelineItem[] = []
const timelineItems: LegacyQATimelineItem[] = []
let pendingProgressEvents: SessionQAProgressEvent[] = []
const flushProgressEvents = () => {
@@ -1251,6 +1473,7 @@ function AISummaryWindow() {
const hasAnswerBody = Boolean(
message.error ||
(message.timelineEvents?.length || 0) > 0 ||
message.thinkContent ||
message.content ||
(message.result?.evidenceRefs?.length || 0) > 0
@@ -1727,13 +1950,39 @@ function AISummaryWindow() {
if (event.kind === 'progress' && event.progress) {
setQaMessages(prev => prev.map(message => (
message.id === assistantId
? { ...message, progressEvents: upsertQAProgressEvent(message.progressEvents, event.progress!) }
? {
...message,
progressEvents: upsertQAProgressEvent(message.progressEvents, event.progress!),
timelineEvents: upsertQATimelineItems(message.timelineEvents, event.timelineItems || [])
}
: message
)))
return
}
if (event.kind === 'chunk' && event.chunk) {
const isThinkBoundaryChunk = event.chunk.includes('<think>') || event.chunk.includes('</think>')
if (event.timelineItems?.length) {
setQaMessages(prev => prev.map(message => (
message.id === assistantId
? {
...appendQAChunkToMessage(message, event.chunk!),
timelineEvents: upsertQATimelineItems(message.timelineEvents, event.timelineItems)
}
: message
)))
return
}
if (isThinkBoundaryChunk) {
setQaMessages(prev => prev.map(message => (
message.id === assistantId
? appendQAChunkToMessage(message, event.chunk!)
: message
)))
return
}
const previous = qaChunkBufferRef.current.get(assistantId) || ''
qaChunkBufferRef.current.set(assistantId, `${previous}${event.chunk}`)
scheduleQAChunkFlush()
@@ -1752,6 +2001,7 @@ function AISummaryWindow() {
message.id === assistantId
? {
...message,
timelineEvents: upsertQATimelineItems(message.timelineEvents, event.timelineItems || []),
content: stripSummaryContent(event.result!.answerText),
createdAt: event.result!.createdAt,
isStreaming: false,
@@ -1778,6 +2028,7 @@ function AISummaryWindow() {
message.id === assistantId
? {
...message,
timelineEvents: upsertQATimelineItems(message.timelineEvents, event.timelineItems || []),
content: message.content || (event.kind === 'cancelled' ? messageText : ''),
isStreaming: false,
isThinking: false,
@@ -2315,13 +2566,17 @@ function AISummaryWindow() {
createdAt: Date.now()
}
const assistantId = buildMessageId()
const assistantCreatedAt = Date.now()
const initialTimeline = createLocalQATimelineProgress(requestId, assistantCreatedAt)
const assistantMessage: QAMessage = {
id: assistantId,
role: 'assistant',
content: '',
createdAt: Date.now(),
createdAt: assistantCreatedAt,
isStreaming: true,
requestId
requestId,
progressEvents: [initialTimeline.progress],
timelineEvents: [initialTimeline.timelineItem]
}
qaRequestMessageMapRef.current.set(requestId, assistantId)
@@ -2364,6 +2619,28 @@ function AISummaryWindow() {
}
} catch (e) {
const message = String(e)
const failedAt = Date.now()
const failedProgress: SessionQAProgressEvent = {
id: 'job-start',
stage: 'intent',
status: 'failed',
title: '问答任务未启动',
displayName: '问答任务未启动',
nodeName: '问答任务未启动',
detail: message,
source: 'model',
requestId,
createdAt: failedAt
}
const failedTimelineItem: SessionQATimelineItem = {
type: 'progress',
id: `progress:${failedProgress.id}`,
order: 0,
createdAt: failedAt,
requestId,
channel: 'answer',
event: failedProgress
}
setQaError(message)
qaRequestMessageMapRef.current.delete(requestId)
if (activeQARequestIdRef.current === requestId) {
@@ -2375,6 +2652,8 @@ function AISummaryWindow() {
? {
...item,
content: '',
progressEvents: upsertQAProgressEvent(item.progressEvents, failedProgress),
timelineEvents: upsertQATimelineItems(item.timelineEvents, [failedTimelineItem]),
isStreaming: false,
error: message
}

View File

@@ -156,6 +156,7 @@ export interface SessionQAMessageRecord {
evidenceRefs?: SummaryEvidenceRef[]
toolCalls?: SessionQAToolCall[]
progressEvents?: SessionQAProgressEvent[]
timelineEvents?: SessionQATimelineItem[]
tokensUsed?: number
cost?: number
provider?: string
@@ -231,6 +232,30 @@ export interface SessionQAProgressEvent {
diagnostics?: string[]
}
export type SessionQATimelineChannel = 'answer' | 'think'
export interface SessionQATimelineTextItem {
type: 'text'
id: string
order: number
createdAt: number
requestId?: SessionQARequestId
channel: SessionQATimelineChannel
content: string
}
export interface SessionQATimelineProgressItem {
type: 'progress'
id: string
order: number
createdAt: number
requestId?: SessionQARequestId
channel?: SessionQATimelineChannel
event: SessionQAProgressEvent
}
export type SessionQATimelineItem = SessionQATimelineTextItem | SessionQATimelineProgressItem
export type SessionQAJobEventKind = 'progress' | 'chunk' | 'final' | 'error' | 'cancelled'
export interface SessionQAJobEvent {
@@ -239,6 +264,7 @@ export interface SessionQAJobEvent {
kind: SessionQAJobEventKind
createdAt: number
progress?: SessionQAProgressEvent
timelineItems?: SessionQATimelineItem[]
chunk?: string
result?: SessionQAResult
error?: string