Merge pull request #11 from WideLee/feat/20260311_refidx

feat: 实现 C2C 消息的 ref-index 缓存机制
This commit is contained in:
ryanlee-gemini
2026-03-12 12:29:41 +08:00
committed by GitHub
9 changed files with 753 additions and 88 deletions

1
.gitignore vendored
View File

@@ -8,3 +8,4 @@ package-lock.json
openclaw.json
*.tgz
*.log
pnpm-lock.yaml

View File

@@ -12,6 +12,35 @@ const TOKEN_URL = "https://bots.qq.com/app/getAppAccessToken";
// 运行时配置
let currentMarkdownSupport = false;
// 出站消息回调钩子:消息发送成功且回包含 ext_info.ref_idx 时触发
// 由外层gateway/outbound注册用于统一缓存 bot 出站消息的 refIdx
/** 出站消息元信息(结构化存储,不做预格式化) */
export interface OutboundMeta {
/** 消息文本内容 */
text?: string;
/** 媒体类型 */
mediaType?: "image" | "voice" | "video" | "file";
/** 媒体来源:在线 URL */
mediaUrl?: string;
/** 媒体来源:本地文件路径或文件名 */
mediaLocalPath?: string;
/** TTS 原文本(仅 voice 类型有效,用于保存 TTS 前的文本内容) */
ttsText?: string;
}
type OnMessageSentCallback = (refIdx: string, meta: OutboundMeta) => void;
let onMessageSentHook: OnMessageSentCallback | null = null;
/**
* 注册出站消息回调
* 当消息发送成功且 QQ 返回 ref_idx 时,自动回调此函数
* 用于在最底层统一缓存 bot 出站消息的 refIdx
*/
export function onMessageSent(callback: OnMessageSentCallback): void {
onMessageSentHook = callback;
}
/**
* 初始化 API 配置
* @param options.markdownSupport - 是否支持 markdown 消息(默认 false需要机器人具备该权限才能启用
@@ -98,7 +127,8 @@ async function doFetchToken(appId: string, clientSecret: string): Promise<string
response.headers.forEach((value, key) => {
responseHeaders[key] = value;
});
console.log(`[qqbot-api:${appId}] <<< Status: ${response.status} ${response.statusText}`);
const tokenTraceId = response.headers.get("x-tps-trace-id") ?? "";
console.log(`[qqbot-api:${appId}] <<< Status: ${response.status} ${response.statusText}${tokenTraceId ? ` | TraceId: ${tokenTraceId}` : ""}`);
let data: { access_token?: string; expires_in?: number };
let rawBody: string;
@@ -214,6 +244,7 @@ export async function apiRequest<T = unknown>(
if (typeof logBody.file_data === "string") {
logBody.file_data = `<base64 ${(logBody.file_data as string).length} chars>`;
}
console.log(`[qqbot-api] >>> Body:`, JSON.stringify(logBody));
}
let res: Response;
@@ -235,12 +266,14 @@ export async function apiRequest<T = unknown>(
res.headers.forEach((value, key) => {
responseHeaders[key] = value;
});
console.log(`[qqbot-api] <<< Status: ${res.status} ${res.statusText}`);
const traceId = res.headers.get("x-tps-trace-id") ?? "";
console.log(`[qqbot-api] <<< Status: ${res.status} ${res.statusText}${traceId ? ` | TraceId: ${traceId}` : ""}`);
let data: T;
let rawBody: string;
try {
rawBody = await res.text();
console.log(`[qqbot-api] <<< Body:`, rawBody);
data = JSON.parse(rawBody) as T;
} catch (err) {
throw new Error(`Failed to parse response[${path}]: ${err instanceof Error ? err.message : String(err)}`);
@@ -303,12 +336,39 @@ export async function getGatewayUrl(accessToken: string): Promise<string> {
export interface MessageResponse {
id: string;
timestamp: number | string;
/** 消息的引用索引信息(出站时由 QQ 服务端返回) */
ext_info?: {
ref_idx?: string;
};
}
/**
* 发送消息并自动触发 refIdx 回调
* 所有消息发送函数统一经过此处,确保每条出站消息的 refIdx 都被捕获
*/
async function sendAndNotify(
accessToken: string,
method: string,
path: string,
body: unknown,
meta: OutboundMeta,
): Promise<MessageResponse> {
const result = await apiRequest<MessageResponse>(accessToken, method, path, body);
if (result.ext_info?.ref_idx && onMessageSentHook) {
try {
onMessageSentHook(result.ext_info.ref_idx, meta);
} catch (err) {
console.error(`[qqbot-api] onMessageSent hook error: ${err}`);
}
}
return result;
}
function buildMessageBody(
content: string,
msgId: string | undefined,
msgSeq: number
msgSeq: number,
messageReference?: string
): Record<string, unknown> {
const body: Record<string, unknown> = currentMarkdownSupport
? {
@@ -325,6 +385,9 @@ function buildMessageBody(
if (msgId) {
body.msg_id = msgId;
}
if (messageReference && !currentMarkdownSupport) {
body.message_reference = { message_id: messageReference };
}
return body;
}
@@ -332,11 +395,12 @@ export async function sendC2CMessage(
accessToken: string,
openid: string,
content: string,
msgId?: string
msgId?: string,
messageReference?: string
): Promise<MessageResponse> {
const msgSeq = msgId ? getNextMsgSeq(msgId) : 1;
const body = buildMessageBody(content, msgId, msgSeq);
return apiRequest(accessToken, "POST", `/v2/users/${openid}/messages`, body);
const body = buildMessageBody(content, msgId, msgSeq, messageReference);
return sendAndNotify(accessToken, "POST", `/v2/users/${openid}/messages`, body, { text: content });
}
export async function sendC2CInputNotify(
@@ -344,7 +408,7 @@ export async function sendC2CInputNotify(
openid: string,
msgId?: string,
inputSecond: number = 60
): Promise<void> {
): Promise<{ refIdx?: string }> {
const msgSeq = msgId ? getNextMsgSeq(msgId) : 1;
const body = {
msg_type: 6,
@@ -355,7 +419,8 @@ export async function sendC2CInputNotify(
msg_seq: msgSeq,
...(msgId ? { msg_id: msgId } : {}),
};
await apiRequest(accessToken, "POST", `/v2/users/${openid}/messages`, body);
const response = await apiRequest<{ ext_info?: { ref_idx?: string } }>(accessToken, "POST", `/v2/users/${openid}/messages`, body);
return { refIdx: response.ext_info?.ref_idx };
}
export async function sendChannelMessage(
@@ -396,9 +461,9 @@ export async function sendProactiveC2CMessage(
accessToken: string,
openid: string,
content: string
): Promise<{ id: string; timestamp: number }> {
): Promise<MessageResponse> {
const body = buildProactiveMessageBody(content);
return apiRequest(accessToken, "POST", `/v2/users/${openid}/messages`, body);
return sendAndNotify(accessToken, "POST", `/v2/users/${openid}/messages`, body, { text: content });
}
export async function sendProactiveGroupMessage(
@@ -501,16 +566,17 @@ export async function sendC2CMediaMessage(
openid: string,
fileInfo: string,
msgId?: string,
content?: string
): Promise<{ id: string; timestamp: number }> {
content?: string,
meta?: OutboundMeta,
): Promise<MessageResponse> {
const msgSeq = msgId ? getNextMsgSeq(msgId) : 1;
return apiRequest(accessToken, "POST", `/v2/users/${openid}/messages`, {
return sendAndNotify(accessToken, "POST", `/v2/users/${openid}/messages`, {
msg_type: 7,
media: { file_info: fileInfo },
msg_seq: msgSeq,
...(content ? { content } : {}),
...(msgId ? { msg_id: msgId } : {}),
});
}, meta ?? { text: content });
}
export async function sendGroupMediaMessage(
@@ -530,21 +596,29 @@ export async function sendGroupMediaMessage(
});
}
export async function sendC2CImageMessage(accessToken: string, openid: string, imageUrl: string, msgId?: string, content?: string): Promise<{ id: string; timestamp: number }> {
export async function sendC2CImageMessage(accessToken: string, openid: string, imageUrl: string, msgId?: string, content?: string, localPath?: string): Promise<MessageResponse> {
let uploadResult: UploadMediaResponse;
if (imageUrl.startsWith("data:")) {
const isBase64 = imageUrl.startsWith("data:");
if (isBase64) {
const matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);
if (!matches) throw new Error("Invalid Base64 Data URL format");
uploadResult = await uploadC2CMedia(accessToken, openid, MediaFileType.IMAGE, undefined, matches[2], false);
} else {
uploadResult = await uploadC2CMedia(accessToken, openid, MediaFileType.IMAGE, imageUrl, undefined, false);
}
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId, content);
const meta: OutboundMeta = {
text: content,
mediaType: "image",
...(!isBase64 ? { mediaUrl: imageUrl } : {}),
...(localPath ? { mediaLocalPath: localPath } : {}),
};
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId, content, meta);
}
export async function sendGroupImageMessage(accessToken: string, groupOpenid: string, imageUrl: string, msgId?: string, content?: string): Promise<{ id: string; timestamp: string }> {
let uploadResult: UploadMediaResponse;
if (imageUrl.startsWith("data:")) {
const isBase64 = imageUrl.startsWith("data:");
if (isBase64) {
const matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);
if (!matches) throw new Error("Invalid Base64 Data URL format");
uploadResult = await uploadGroupMedia(accessToken, groupOpenid, MediaFileType.IMAGE, undefined, matches[2], false);
@@ -554,9 +628,13 @@ export async function sendGroupImageMessage(accessToken: string, groupOpenid: st
return sendGroupMediaMessage(accessToken, groupOpenid, uploadResult.file_info, msgId, content);
}
export async function sendC2CVoiceMessage(accessToken: string, openid: string, voiceBase64: string, msgId?: string): Promise<{ id: string; timestamp: number }> {
export async function sendC2CVoiceMessage(accessToken: string, openid: string, voiceBase64: string, msgId?: string, ttsText?: string, filePath?: string): Promise<MessageResponse> {
const uploadResult = await uploadC2CMedia(accessToken, openid, MediaFileType.VOICE, undefined, voiceBase64, false);
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId);
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId, undefined, {
mediaType: "voice",
...(ttsText ? { ttsText } : {}),
...(filePath ? { mediaLocalPath: filePath } : {})
});
}
export async function sendGroupVoiceMessage(accessToken: string, groupOpenid: string, voiceBase64: string, msgId?: string): Promise<{ id: string; timestamp: string }> {
@@ -564,9 +642,10 @@ export async function sendGroupVoiceMessage(accessToken: string, groupOpenid: st
return sendGroupMediaMessage(accessToken, groupOpenid, uploadResult.file_info, msgId);
}
export async function sendC2CFileMessage(accessToken: string, openid: string, fileBase64?: string, fileUrl?: string, msgId?: string, fileName?: string): Promise<{ id: string; timestamp: number }> {
export async function sendC2CFileMessage(accessToken: string, openid: string, fileBase64?: string, fileUrl?: string, msgId?: string, fileName?: string, localFilePath?: string): Promise<MessageResponse> {
const uploadResult = await uploadC2CMedia(accessToken, openid, MediaFileType.FILE, fileUrl, fileBase64, false, fileName);
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId);
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId, undefined,
{ mediaType: "file", mediaUrl: fileUrl, mediaLocalPath: localFilePath ?? fileName });
}
export async function sendGroupFileMessage(accessToken: string, groupOpenid: string, fileBase64?: string, fileUrl?: string, msgId?: string, fileName?: string): Promise<{ id: string; timestamp: string }> {
@@ -574,9 +653,10 @@ export async function sendGroupFileMessage(accessToken: string, groupOpenid: str
return sendGroupMediaMessage(accessToken, groupOpenid, uploadResult.file_info, msgId);
}
export async function sendC2CVideoMessage(accessToken: string, openid: string, videoUrl?: string, videoBase64?: string, msgId?: string, content?: string): Promise<{ id: string; timestamp: number }> {
export async function sendC2CVideoMessage(accessToken: string, openid: string, videoUrl?: string, videoBase64?: string, msgId?: string, content?: string, localPath?: string): Promise<MessageResponse> {
const uploadResult = await uploadC2CMedia(accessToken, openid, MediaFileType.VIDEO, videoUrl, videoBase64, false);
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId, content);
return sendC2CMediaMessage(accessToken, openid, uploadResult.file_info, msgId, content,
{ text: content, mediaType: "video", ...(videoUrl ? { mediaUrl: videoUrl } : {}), ...(localPath ? { mediaLocalPath: localPath } : {}) });
}
export async function sendGroupVideoMessage(accessToken: string, groupOpenid: string, videoUrl?: string, videoBase64?: string, msgId?: string, content?: string): Promise<{ id: string; timestamp: string }> {

View File

@@ -1,6 +1,7 @@
import {
type ChannelPlugin,
type OpenClawConfig,
type NormalizeTargetResult,
applyAccountNameToChannelSection,
deleteAccountFromConfigSection,
setAccountEnabledInConfigSection,
@@ -167,7 +168,7 @@ export const qqbotPlugin: ChannelPlugin<ResolvedQQBotAccount> = {
* - channel:channelid -> 频道
* - 纯 openid32位十六进制-> 私聊
*/
normalizeTarget: (target: string) => {
normalizeTarget: (target: string): NormalizeTargetResult => {
// 去掉 qqbot: 前缀(如果有)
const id = target.replace(/^qqbot:/i, "");

View File

@@ -2,10 +2,11 @@ import WebSocket from "ws";
import path from "node:path";
import * as fs from "node:fs";
import type { ResolvedQQBotAccount, WSPayload, C2CMessageEvent, GuildMessageEvent, GroupMessageEvent } from "./types.js";
import { getAccessToken, getGatewayUrl, sendC2CMessage, sendChannelMessage, sendGroupMessage, clearTokenCache, sendC2CImageMessage, sendGroupImageMessage, sendC2CVoiceMessage, sendGroupVoiceMessage, sendC2CVideoMessage, sendGroupVideoMessage, sendC2CFileMessage, sendGroupFileMessage, initApiConfig, startBackgroundTokenRefresh, stopBackgroundTokenRefresh, sendC2CInputNotify } from "./api.js";
import { getAccessToken, getGatewayUrl, sendC2CMessage, sendChannelMessage, sendGroupMessage, clearTokenCache, sendC2CImageMessage, sendGroupImageMessage, sendC2CVoiceMessage, sendGroupVoiceMessage, sendC2CVideoMessage, sendGroupVideoMessage, sendC2CFileMessage, sendGroupFileMessage, initApiConfig, startBackgroundTokenRefresh, stopBackgroundTokenRefresh, sendC2CInputNotify, onMessageSent } from "./api.js";
import { loadSession, saveSession, clearSession, type SessionState } from "./session-store.js";
import { recordKnownUser, flushKnownUsers } from "./known-users.js";
import { getQQBotRuntime } from "./runtime.js";
import { setRefIndex, getRefIndex, formatRefEntryForAgent, flushRefIndex, type RefAttachmentSummary } from "./ref-index-store.js";
import { startImageServer, isImageServerRunning, downloadFile, type ImageServerConfig } from "./image-server.js";
import { getImageSize, formatQQBotMarkdownImage, hasQQBotImageSize, DEFAULT_IMAGE_SIZE } from "./utils/image-size.js";
import { parseQQBotPayload, encodePayloadForCron, isCronReminderPayload, isMediaPayload, type CronReminderPayload, type MediaPayload } from "./utils/payload.js";
@@ -301,7 +302,55 @@ interface QueuedMessage {
channelId?: string;
guildId?: string;
groupOpenid?: string;
attachments?: Array<{ content_type: string; url: string; filename?: string; voice_wav_url?: string; asr_refer_text?: string }>;
attachments?: Array<{ content_type: string; url: string; filename?: string; voice_wav_url?: string; asr_refer_text?: string }>;
/** 被引用消息的 refIdx用户引用了哪条历史消息 */
refMsgIdx?: string;
/** 当前消息自身的 refIdx供将来被引用 */
msgIdx?: string;
}
/**
* 从 message_scene.ext 数组中解析引用索引
* ext 格式示例: ["", "ref_msg_idx=REFIDX_xxx", "msg_idx=REFIDX_yyy"]
*/
function parseRefIndices(ext?: string[]): { refMsgIdx?: string; msgIdx?: string } {
if (!ext || ext.length === 0) return {};
let refMsgIdx: string | undefined;
let msgIdx: string | undefined;
for (const item of ext) {
if (item.startsWith("ref_msg_idx=")) {
refMsgIdx = item.slice("ref_msg_idx=".length);
} else if (item.startsWith("msg_idx=")) {
msgIdx = item.slice("msg_idx=".length);
}
}
return { refMsgIdx, msgIdx };
}
/**
* 从附件列表中构建附件摘要(用于引用索引缓存)
* @param attachments 原始附件列表
* @param localPaths 与 attachments 一一对应的本地路径(下载后产生)
*/
function buildAttachmentSummaries(
attachments?: Array<{ content_type: string; url: string; filename?: string; voice_wav_url?: string }>,
localPaths?: Array<string | null>,
): RefAttachmentSummary[] | undefined {
if (!attachments || attachments.length === 0) return undefined;
return attachments.map((att, idx) => {
const ct = att.content_type?.toLowerCase() ?? "";
let type: RefAttachmentSummary["type"] = "unknown";
if (ct.startsWith("image/")) type = "image";
else if (ct === "voice" || ct.startsWith("audio/") || ct.includes("silk") || ct.includes("amr")) type = "voice";
else if (ct.startsWith("video/")) type = "video";
else if (ct.startsWith("application/") || ct.startsWith("text/")) type = "file";
return {
type,
filename: att.filename,
contentType: att.content_type,
localPath: localPaths?.[idx] ?? undefined,
};
});
}
/**
@@ -354,6 +403,40 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
});
log?.info(`[qqbot:${account.accountId}] API config: markdownSupport=${account.markdownSupport === true}`);
// 注册出站消息 refIdx 缓存钩子
// 所有消息发送函数在拿到 QQ 回包后,如果含 ref_idx 则自动回调此处缓存
onMessageSent((refIdx, meta) => {
log?.info(`[qqbot:${account.accountId}] onMessageSent called: refIdx=${refIdx}, mediaType=${meta.mediaType}, ttsText=${meta.ttsText?.slice(0, 30)}`);
const attachments: RefAttachmentSummary[] = [];
if (meta.mediaType) {
const localPath = meta.mediaLocalPath;
// filename 取路径的 basename如果没有路径信息则留空
const filename = localPath ? path.basename(localPath) : undefined;
const attachment: RefAttachmentSummary = {
type: meta.mediaType,
...(localPath ? { localPath } : {}),
...(filename ? { filename } : {}),
...(meta.mediaUrl ? { url: meta.mediaUrl } : {}),
};
// 如果是语音消息且有 TTS 原文本,保存到 transcript 并标记来源为 tts
if (meta.mediaType === "voice" && meta.ttsText) {
attachment.transcript = meta.ttsText;
attachment.transcriptSource = "tts";
log?.info(`[qqbot:${account.accountId}] Saving voice transcript (TTS): ${meta.ttsText.slice(0, 50)}`);
}
attachments.push(attachment);
}
setRefIndex(refIdx, {
content: (meta.text ?? "").slice(0, 500),
senderId: account.accountId,
senderName: account.accountId,
timestamp: Date.now(),
isBot: true,
...(attachments.length > 0 ? { attachments } : {}),
});
log?.info(`[qqbot:${account.accountId}] Cached outbound refIdx: ${refIdx}, attachments=${JSON.stringify(attachments)}`);
});
// TTS 配置验证
const ttsCfg = resolveTTSConfig(cfg as Record<string, unknown>);
if (ttsCfg) {
@@ -503,6 +586,8 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
stopBackgroundTokenRefresh(account.appId);
// P1-3: 保存已知用户数据
flushKnownUsers();
// P1-4: 保存引用索引数据
flushRefIndex();
});
const cleanup = () => {
@@ -586,6 +671,8 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
guildId?: string;
groupOpenid?: string;
attachments?: Array<{ content_type: string; url: string; filename?: string; voice_wav_url?: string; asr_refer_text?: string }>;
refMsgIdx?: string;
msgIdx?: string;
}) => {
log?.debug?.(`[qqbot:${account.accountId}] Received message: ${JSON.stringify(event)}`);
@@ -601,22 +688,26 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
});
// 发送输入状态提示(非关键,失败不影响主流程)
// 同时从响应中获取 ref_idx用于缓存入站消息
let inputNotifyRefIdx: string | undefined;
try {
let token = await getAccessToken(account.appId, account.clientSecret);
try {
await sendC2CInputNotify(token, event.senderId, event.messageId, 60);
const notifyResponse = await sendC2CInputNotify(token, event.senderId, event.messageId, 60);
inputNotifyRefIdx = notifyResponse.refIdx;
} catch (notifyErr) {
const errMsg = String(notifyErr);
if (errMsg.includes("token") || errMsg.includes("401") || errMsg.includes("11244")) {
log?.info(`[qqbot:${account.accountId}] InputNotify token expired, refreshing...`);
clearTokenCache(account.appId);
token = await getAccessToken(account.appId, account.clientSecret);
await sendC2CInputNotify(token, event.senderId, event.messageId, 60);
const notifyResponse = await sendC2CInputNotify(token, event.senderId, event.messageId, 60);
inputNotifyRefIdx = notifyResponse.refIdx;
} else {
throw notifyErr;
}
}
log?.info(`[qqbot:${account.accountId}] Sent input notify to ${event.senderId}`);
log?.info(`[qqbot:${account.accountId}] Sent input notify to ${event.senderId}${inputNotifyRefIdx ? `, got refIdx=${inputNotifyRefIdx}` : ""}`);
} catch (err) {
log?.error(`[qqbot:${account.accountId}] sendC2CInputNotify error: ${err}`);
}
@@ -661,6 +752,8 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
const voiceAttachmentUrls: string[] = [];
const voiceAsrReferTexts: string[] = [];
const voiceTranscripts: string[] = [];
// 每个附件的本地路径(与 event.attachments 一一对应,未下载的为 null
const attachmentLocalPaths: Array<string | null> = [];
const voiceTranscriptSources: Array<"stt" | "asr" | "fallback"> = [];
// 存到 .openclaw/qqbot 目录下的 downloads 文件夹
const downloadDir = getQQBotDataDir("downloads");
@@ -779,6 +872,7 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
otherAttachments.push(`[附件: ${localPath}]`);
}
log?.info(`[qqbot:${account.accountId}] Downloaded attachment to: ${localPath}`);
attachmentLocalPaths.push(localPath);
} else {
// 下载失败fallback 到原始 URL
log?.error(`[qqbot:${account.accountId}] Failed to download: ${attUrl}`);
@@ -792,6 +886,7 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
} else {
otherAttachments.push(`[附件: ${att.filename ?? att.content_type}] (下载失败)`);
}
attachmentLocalPaths.push(null);
}
}
@@ -820,6 +915,58 @@ export async function startGateway(ctx: GatewayContext): Promise<void> {
? (parsedContent.trim() ? `${parsedContent}\n${voiceText}` : voiceText) + attachmentInfo
: parsedContent + attachmentInfo;
// ============ 引用消息处理 ============
let replyToId: string | undefined;
let replyToBody: string | undefined;
let replyToSender: string | undefined;
let replyToIsQuote = false;
// 1. 查找被引用消息
if (event.refMsgIdx) {
const refEntry = getRefIndex(event.refMsgIdx);
if (refEntry) {
replyToId = event.refMsgIdx;
replyToBody = formatRefEntryForAgent(refEntry);
replyToSender = refEntry.senderName ?? refEntry.senderId;
replyToIsQuote = true;
log?.info(`[qqbot:${account.accountId}] Quote detected: refMsgIdx=${event.refMsgIdx}, sender=${replyToSender}, content="${replyToBody.slice(0, 80)}..."`);
} else {
log?.info(`[qqbot:${account.accountId}] Quote detected but refMsgIdx not in cache: ${event.refMsgIdx}`);
replyToId = event.refMsgIdx;
replyToIsQuote = true;
// 缓存未命中时 replyToBody 为空AI 只能知道"用户引用了一条消息"
}
}
// 2. 缓存当前消息自身的 msgIdx供将来被引用时查找
// 优先使用推送事件中的 msgIdx来自 message_scene.ext否则使用 InputNotify 返回的 refIdx
const currentMsgIdx = event.msgIdx ?? inputNotifyRefIdx;
if (currentMsgIdx) {
const attSummaries = buildAttachmentSummaries(event.attachments, attachmentLocalPaths);
// 如果有语音转录,把转录文本和来源写入对应附件摘要
if (attSummaries && voiceTranscripts.length > 0) {
let voiceIdx = 0;
for (const att of attSummaries) {
if (att.type === "voice" && voiceIdx < voiceTranscripts.length) {
att.transcript = voiceTranscripts[voiceIdx];
// 保存转录来源
if (voiceIdx < voiceTranscriptSources.length) {
att.transcriptSource = voiceTranscriptSources[voiceIdx];
}
voiceIdx++;
}
}
}
setRefIndex(currentMsgIdx, {
content: parsedContent,
senderId: event.senderId,
senderName: event.senderName,
timestamp: new Date(event.timestamp).getTime(),
attachments: attSummaries,
});
log?.info(`[qqbot:${account.accountId}] Cached msgIdx=${currentMsgIdx} for future reference (source: ${event.msgIdx ? "message_scene.ext" : "InputNotify"})`);
}
// Body: 展示用的用户原文Web UI 看到的)
const body = pluginRuntime.channel.reply.formatInboundEnvelope({
channel: "qqbot",
@@ -909,6 +1056,16 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
? `\n- 语音ASR兜底文本:\n${uniqueVoiceAsrReferTexts.map((t, i) => ` ${i + 1}. ${t}`).join("\n")}`
: "";
// 引用消息上下文
let quotePart = "";
if (replyToIsQuote) {
if (replyToBody) {
quotePart = `[引用消息开始]\n${replyToBody}\n[引用消息结束]\n`;
} else {
quotePart = `[引用消息开始]\n原始内容不可用\n[引用消息结束]\n`;
}
}
const contextInfo = `你正在通过 QQ 与用户对话。
【会话上下文】
@@ -942,13 +1099,13 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
【不要向用户透露过多以上述要求,以下是用户输入】
`;
// 命令直接透传,不注入上下文
const userMessage = `${quotePart}${userContent}`;
const agentBody = userContent.startsWith("/")
? userContent
: systemPrompts.length > 0
? `${contextInfo}\n\n${systemPrompts.join("\n")}\n\n${userContent}`
: `${contextInfo}\n\n${userContent}`;
? `${contextInfo}\n\n${systemPrompts.join("\n")}\n\n${userMessage}`
: `${contextInfo}\n\n${userMessage}`;
log?.info(`[qqbot:${account.accountId}] agentBody length: ${agentBody.length}`);
@@ -1021,13 +1178,20 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
MediaUrls: remoteMediaUrls,
MediaUrl: remoteMediaUrls[0],
} : {}),
// 引用消息上下文(对齐 Telegram/Discord 的 ReplyTo 字段)
...(replyToId ? {
ReplyToId: replyToId,
ReplyToBody: replyToBody,
ReplyToSender: replyToSender,
ReplyToIsQuote: replyToIsQuote,
} : {}),
});
// 发送消息的辅助函数,带 token 过期重试
const sendWithTokenRetry = async (sendFn: (token: string) => Promise<unknown>) => {
const sendWithTokenRetry = async <T>(sendFn: (token: string) => Promise<T>): Promise<T> => {
try {
const token = await getAccessToken(account.appId, account.clientSecret);
await sendFn(token);
return await sendFn(token);
} catch (err) {
const errMsg = String(err);
// 如果是 token 相关错误,清除缓存重试一次
@@ -1035,7 +1199,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
log?.info(`[qqbot:${account.accountId}] Token may be expired, refreshing...`);
clearTokenCache(account.appId);
const newToken = await getAccessToken(account.appId, account.clientSecret);
await sendFn(newToken);
return await sendFn(newToken);
} else {
throw err;
}
@@ -1178,6 +1342,19 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
log?.info(`[qqbot:${account.accountId}] Block deliver after ${toolDeliverCount} tool deliver(s)`);
}
// ============ 引用回复 ============
// 机器人回复时引用用户当前发来的消息event.msgIdx 是用户消息自身的 REFIDX
// 只在第一条回复消息上附加引用,后续消息不重复引用
const quoteRef = event.msgIdx;
let quoteRefUsed = false;
const consumeQuoteRef = (): string | undefined => {
if (quoteRef && !quoteRefUsed) {
quoteRefUsed = true;
return quoteRef;
}
return undefined;
};
let replyText = payload.text ?? "";
// ============ 媒体标签解析 ============
@@ -1299,12 +1476,13 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
// 发送文本
try {
await sendWithTokenRetry(async (token) => {
const ref = consumeQuoteRef();
if (event.type === "c2c") {
await sendC2CMessage(token, event.senderId, item.content, event.messageId);
return await sendC2CMessage(token, event.senderId, item.content, event.messageId, ref);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupMessage(token, event.groupOpenid, item.content, event.messageId);
return await sendGroupMessage(token, event.groupOpenid, item.content, event.messageId);
} else if (event.channelId) {
await sendChannelMessage(token, event.channelId, item.content, event.messageId);
return await sendChannelMessage(token, event.channelId, item.content, event.messageId);
}
});
log?.info(`[qqbot:${account.accountId}] Sent text: ${item.content.slice(0, 50)}...`);
@@ -1375,10 +1553,11 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
continue;
}
// 发送图片
// 发送图片(传递原始本地路径以便 refIdx 缓存记录来源)
const imgLocalPath = isLocalPath ? imagePath : undefined;
await sendWithTokenRetry(async (token) => {
if (event.type === "c2c") {
await sendC2CImageMessage(token, event.senderId, imageUrl, event.messageId);
await sendC2CImageMessage(token, event.senderId, imageUrl, event.messageId, undefined, imgLocalPath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupImageMessage(token, event.groupOpenid, imageUrl, event.messageId);
} else if (event.channelId) {
@@ -1421,7 +1600,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
await sendWithTokenRetry(async (token) => {
if (event.type === "c2c") {
await sendC2CVoiceMessage(token, event.senderId, silkBase64!, event.messageId);
await sendC2CVoiceMessage(token, event.senderId, silkBase64!, event.messageId, undefined, voicePath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupVoiceMessage(token, event.groupOpenid, silkBase64!, event.messageId);
} else if (event.channelId) {
@@ -1481,7 +1660,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
log?.info(`[qqbot:${account.accountId}] Read local video (${formatFileSize(fileBuffer.length)}): ${videoPath}`);
if (event.type === "c2c") {
await sendC2CVideoMessage(token, event.senderId, undefined, videoBase64, event.messageId);
await sendC2CVideoMessage(token, event.senderId, undefined, videoBase64, event.messageId, undefined, videoPath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupVideoMessage(token, event.groupOpenid, undefined, videoBase64, event.messageId);
} else if (event.channelId) {
@@ -1543,7 +1722,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
log?.info(`[qqbot:${account.accountId}] Read local file (${formatFileSize(fileBuffer.length)}): ${filePath}`);
if (event.type === "c2c") {
await sendC2CFileMessage(token, event.senderId, fileBase64, undefined, event.messageId, fileName);
await sendC2CFileMessage(token, event.senderId, fileBase64, undefined, event.messageId, fileName, filePath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupFileMessage(token, event.groupOpenid, fileBase64, undefined, event.messageId, fileName);
} else if (event.channelId) {
@@ -1623,7 +1802,8 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
if (parsedPayload.mediaType === "image") {
// 处理图片发送(展开 ~ 路径)
let imageUrl = normalizePath(parsedPayload.path);
const originalImagePath = parsedPayload.source === "file" ? imageUrl : undefined;
// 如果是本地文件,转换为 Base64 Data URL
if (parsedPayload.source === "file") {
try {
@@ -1661,11 +1841,11 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
}
}
// 发送图片
// 发送图片(传递原始本地路径以便 refIdx 缓存记录来源)
try {
await sendWithTokenRetry(async (token) => {
if (event.type === "c2c") {
await sendC2CImageMessage(token, event.senderId, imageUrl, event.messageId);
await sendC2CImageMessage(token, event.senderId, imageUrl, event.messageId, undefined, originalImagePath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupImageMessage(token, event.groupOpenid, imageUrl, event.messageId);
} else if (event.channelId) {
@@ -1705,12 +1885,12 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
} else {
log?.info(`[qqbot:${account.accountId}] TTS: "${ttsText.slice(0, 50)}..." via ${ttsCfg.model}`);
const ttsDir = getQQBotDataDir("tts");
const { silkBase64, duration } = await textToSilk(ttsText, ttsCfg, ttsDir);
log?.info(`[qqbot:${account.accountId}] TTS done: ${formatDuration(duration)}, uploading voice...`);
const { silkPath, silkBase64, duration } = await textToSilk(ttsText, ttsCfg, ttsDir);
log?.info(`[qqbot:${account.accountId}] TTS done: ${formatDuration(duration)}, file saved: ${silkPath}`);
await sendWithTokenRetry(async (token) => {
if (event.type === "c2c") {
await sendC2CVoiceMessage(token, event.senderId, silkBase64, event.messageId);
await sendC2CVoiceMessage(token, event.senderId, silkBase64, event.messageId, ttsText, silkPath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupVoiceMessage(token, event.groupOpenid, silkBase64, event.messageId);
} else if (event.channelId) {
@@ -1758,7 +1938,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
log?.info(`[qqbot:${account.accountId}] Read local video (${formatFileSize(fileBuffer.length)}): ${videoPath}`);
if (event.type === "c2c") {
await sendC2CVideoMessage(token, event.senderId, undefined, videoBase64, event.messageId);
await sendC2CVideoMessage(token, event.senderId, undefined, videoBase64, event.messageId, undefined, videoPath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupVideoMessage(token, event.groupOpenid, undefined, videoBase64, event.messageId);
} else if (event.channelId) {
@@ -1816,7 +1996,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
const fileBuffer = await readFileAsync(filePath);
const fileBase64 = fileBuffer.toString("base64");
if (event.type === "c2c") {
await sendC2CFileMessage(token, event.senderId, fileBase64, undefined, event.messageId, fileName);
await sendC2CFileMessage(token, event.senderId, fileBase64, undefined, event.messageId, fileName, filePath);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupFileMessage(token, event.groupOpenid, fileBase64, undefined, event.messageId, fileName);
} else if (event.channelId) {
@@ -2072,12 +2252,13 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
if (textWithoutImages.trim()) {
try {
await sendWithTokenRetry(async (token) => {
const ref = consumeQuoteRef();
if (event.type === "c2c") {
await sendC2CMessage(token, event.senderId, textWithoutImages, event.messageId);
return await sendC2CMessage(token, event.senderId, textWithoutImages, event.messageId, ref);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupMessage(token, event.groupOpenid, textWithoutImages, event.messageId);
return await sendGroupMessage(token, event.groupOpenid, textWithoutImages, event.messageId);
} else if (event.channelId) {
await sendChannelMessage(token, event.channelId, textWithoutImages, event.messageId);
return await sendChannelMessage(token, event.channelId, textWithoutImages, event.messageId);
}
});
log?.info(`[qqbot:${account.accountId}] Sent markdown message with ${httpImageUrls.length} HTTP images (${event.type})`);
@@ -2123,12 +2304,13 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
// 发送文本消息
if (textWithoutImages.trim()) {
await sendWithTokenRetry(async (token) => {
const ref = consumeQuoteRef();
if (event.type === "c2c") {
await sendC2CMessage(token, event.senderId, textWithoutImages, event.messageId);
return await sendC2CMessage(token, event.senderId, textWithoutImages, event.messageId, ref);
} else if (event.type === "group" && event.groupOpenid) {
await sendGroupMessage(token, event.groupOpenid, textWithoutImages, event.messageId);
return await sendGroupMessage(token, event.groupOpenid, textWithoutImages, event.messageId);
} else if (event.channelId) {
await sendChannelMessage(token, event.channelId, textWithoutImages, event.messageId);
return await sendChannelMessage(token, event.channelId, textWithoutImages, event.messageId);
}
});
log?.info(`[qqbot:${account.accountId}] Sent text reply (${event.type})`);
@@ -2277,6 +2459,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
break;
case 0: // Dispatch
log?.info(`[qqbot:${account.accountId}] 📩 Dispatch event: t=${t}, d=${JSON.stringify(d)}`);
if (t === "READY") {
const readyData = d as { session_id: string };
sessionId = readyData.session_id;
@@ -2317,6 +2500,8 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
type: "c2c",
accountId: account.accountId,
});
// 解析引用索引
const c2cRefs = parseRefIndices(event.message_scene?.ext);
// 使用消息队列异步处理,防止阻塞心跳
enqueueMessage({
type: "c2c",
@@ -2325,6 +2510,8 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
messageId: event.id,
timestamp: event.timestamp,
attachments: event.attachments,
refMsgIdx: c2cRefs.refMsgIdx,
msgIdx: c2cRefs.msgIdx,
});
} else if (t === "AT_MESSAGE_CREATE") {
const event = d as GuildMessageEvent;
@@ -2335,6 +2522,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
nickname: event.author.username,
accountId: account.accountId,
});
const guildRefs = parseRefIndices((event as any).message_scene?.ext);
enqueueMessage({
type: "guild",
senderId: event.author.id,
@@ -2345,6 +2533,8 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
channelId: event.channel_id,
guildId: event.guild_id,
attachments: event.attachments,
refMsgIdx: guildRefs.refMsgIdx,
msgIdx: guildRefs.msgIdx,
});
} else if (t === "DIRECT_MESSAGE_CREATE") {
const event = d as GuildMessageEvent;
@@ -2355,6 +2545,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
nickname: event.author.username,
accountId: account.accountId,
});
const dmRefs = parseRefIndices((event as any).message_scene?.ext);
enqueueMessage({
type: "dm",
senderId: event.author.id,
@@ -2364,6 +2555,8 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
timestamp: event.timestamp,
guildId: event.guild_id,
attachments: event.attachments,
refMsgIdx: dmRefs.refMsgIdx,
msgIdx: dmRefs.msgIdx,
});
} else if (t === "GROUP_AT_MESSAGE_CREATE") {
const event = d as GroupMessageEvent;
@@ -2374,6 +2567,7 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
groupOpenid: event.group_openid,
accountId: account.accountId,
});
const groupRefs = parseRefIndices(event.message_scene?.ext);
enqueueMessage({
type: "group",
senderId: event.author.member_openid,
@@ -2382,6 +2576,8 @@ ${ttsHint}${sttHint}${asrFallbackHint}${voiceForwardHint}`;
timestamp: event.timestamp,
groupOpenid: event.group_openid,
attachments: event.attachments,
refMsgIdx: groupRefs.refMsgIdx,
msgIdx: groupRefs.msgIdx,
});
}
break;

View File

@@ -443,9 +443,12 @@ export async function downloadFile(
// 确定文件名
let finalFilename: string;
if (originalFilename) {
// QQ 平台返回的 filename 可能是 URL 编码的(如 %E7%AC%94%E5%A2%A8...),先解码
let decodedFilename = originalFilename;
try { decodedFilename = decodeURIComponent(originalFilename); } catch { /* keep original */ }
// 使用原始文件名,但添加时间戳避免冲突
const ext = path.extname(originalFilename);
const baseName = path.basename(originalFilename, ext);
const ext = path.extname(decodedFilename);
const baseName = path.basename(decodedFilename, ext);
const timestamp = Date.now();
finalFilename = `${baseName}_${timestamp}${ext}`;
} else {

View File

@@ -170,6 +170,8 @@ export interface OutboundResult {
messageId?: string;
timestamp?: string | number;
error?: string;
/** 出站消息的引用索引ext_info.ref_idx供引用消息缓存使用 */
refIdx?: string;
}
/**
@@ -398,27 +400,27 @@ export async function sendText(ctx: OutboundContext): Promise<OutboundResult> {
if (target.type === "c2c") {
const result = await sendC2CMessage(accessToken, target.id, item.content, replyToId);
recordMessageReply(replyToId);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: result.ext_info?.ref_idx };
} else if (target.type === "group") {
const result = await sendGroupMessage(accessToken, target.id, item.content, replyToId);
recordMessageReply(replyToId);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: result.ext_info?.ref_idx };
} else {
const result = await sendChannelMessage(accessToken, target.id, item.content, replyToId);
recordMessageReply(replyToId);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
}
} else {
// 主动消息
if (target.type === "c2c") {
const result = await sendProactiveC2CMessage(accessToken, target.id, item.content);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
} else if (target.type === "group") {
const result = await sendProactiveGroupMessage(accessToken, target.id, item.content);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
} else {
const result = await sendChannelMessage(accessToken, target.id, item.content);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
}
}
console.log(`[qqbot] sendText: Sent text part: ${item.content.slice(0, 30)}...`);
@@ -458,7 +460,7 @@ export async function sendText(ctx: OutboundContext): Promise<OutboundResult> {
// 发送图片
if (target.type === "c2c") {
const result = await sendC2CImageMessage(accessToken, target.id, imageUrl, replyToId ?? undefined);
const result = await sendC2CImageMessage(accessToken, target.id, imageUrl, replyToId ?? undefined, undefined, isHttpUrl ? undefined : imagePath);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
} else if (target.type === "group") {
const result = await sendGroupImageMessage(accessToken, target.id, imageUrl, replyToId ?? undefined);
@@ -559,7 +561,7 @@ export async function sendText(ctx: OutboundContext): Promise<OutboundResult> {
console.log(`[qqbot] sendText: Read local video (${formatFileSize(fileBuffer.length)}): ${videoPath}`);
if (target.type === "c2c") {
const result = await sendC2CVideoMessage(accessToken, target.id, undefined, videoBase64, replyToId ?? undefined);
const result = await sendC2CVideoMessage(accessToken, target.id, undefined, videoBase64, replyToId ?? undefined, undefined, videoPath);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
} else if (target.type === "group") {
const result = await sendGroupVideoMessage(accessToken, target.id, undefined, videoBase64, replyToId ?? undefined);
@@ -615,7 +617,7 @@ export async function sendText(ctx: OutboundContext): Promise<OutboundResult> {
console.log(`[qqbot] sendText: Read local file (${formatFileSize(fileBuffer.length)}): ${filePath}`);
if (target.type === "c2c") {
const result = await sendC2CFileMessage(accessToken, target.id, fileBase64, undefined, replyToId ?? undefined, fileName);
const result = await sendC2CFileMessage(accessToken, target.id, fileBase64, undefined, replyToId ?? undefined, fileName, filePath);
lastResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
} else if (target.type === "group") {
const result = await sendGroupFileMessage(accessToken, target.id, fileBase64, undefined, replyToId ?? undefined, fileName);
@@ -665,17 +667,19 @@ export async function sendText(ctx: OutboundContext): Promise<OutboundResult> {
// 如果没有 replyToId使用主动发送接口
if (!replyToId) {
let outResult: OutboundResult;
if (target.type === "c2c") {
const result = await sendProactiveC2CMessage(accessToken, target.id, text);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
outResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
} else if (target.type === "group") {
const result = await sendProactiveGroupMessage(accessToken, target.id, text);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
outResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
} else {
// 频道暂不支持主动消息
const result = await sendChannelMessage(accessToken, target.id, text);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
outResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
}
return outResult;
}
// 有 replyToId使用被动回复接口
@@ -683,17 +687,17 @@ export async function sendText(ctx: OutboundContext): Promise<OutboundResult> {
const result = await sendC2CMessage(accessToken, target.id, text, replyToId);
// 记录回复次数
recordMessageReply(replyToId);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: result.ext_info?.ref_idx };
} else if (target.type === "group") {
const result = await sendGroupMessage(accessToken, target.id, text, replyToId);
// 记录回复次数
recordMessageReply(replyToId);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: result.ext_info?.ref_idx };
} else {
const result = await sendChannelMessage(accessToken, target.id, text, replyToId);
// 记录回复次数
recordMessageReply(replyToId);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
}
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
@@ -731,23 +735,25 @@ export async function sendProactiveMessage(
const target = parseTarget(to);
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: target parsed, type=${target.type}, id=${target.id}`);
let outResult: OutboundResult;
if (target.type === "c2c") {
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: sending proactive C2C message to user=${target.id}`);
const result = await sendProactiveC2CMessage(accessToken, target.id, text);
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: proactive C2C message sent successfully, messageId=${result.id}`);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
outResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
} else if (target.type === "group") {
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: sending proactive group message to group=${target.id}`);
const result = await sendProactiveGroupMessage(accessToken, target.id, text);
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: proactive group message sent successfully, messageId=${result.id}`);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
outResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
} else {
// 频道暂不支持主动消息,使用普通发送
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: sending channel message to channel=${target.id}`);
const result = await sendChannelMessage(accessToken, target.id, text);
console.log(`[${timestamp}] [qqbot] sendProactiveMessage: channel message sent successfully, messageId=${result.id}`);
return { channel: "qqbot", messageId: result.id, timestamp: result.timestamp };
outResult = { channel: "qqbot", messageId: result.id, timestamp: result.timestamp, refIdx: (result as any).ext_info?.ref_idx };
}
return outResult;
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err);
console.error(`[${timestamp}] [qqbot] sendProactiveMessage: error: ${errorMessage}`);
@@ -901,7 +907,7 @@ export async function sendMedia(ctx: MediaOutboundContext): Promise<OutboundResu
let imageResult: { id: string; timestamp: number | string };
if (target.type === "c2c") {
imageResult = await sendC2CImageMessage(
accessToken, target.id, processedMediaUrl, replyToId ?? undefined, undefined
accessToken, target.id, processedMediaUrl, replyToId ?? undefined, undefined, isLocalPath ? mediaUrl : undefined
);
} else if (target.type === "group") {
imageResult = await sendGroupImageMessage(
@@ -926,7 +932,7 @@ export async function sendMedia(ctx: MediaOutboundContext): Promise<OutboundResu
}
}
return { channel: "qqbot", messageId: imageResult.id, timestamp: imageResult.timestamp };
return { channel: "qqbot", messageId: imageResult.id, timestamp: imageResult.timestamp, refIdx: (imageResult as any).ext_info?.ref_idx };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { channel: "qqbot", error: message };
@@ -1003,7 +1009,7 @@ async function sendVoiceFile(ctx: MediaOutboundContext): Promise<OutboundResult>
}
console.log(`[qqbot] sendVoiceFile: voice message sent`);
return { channel: "qqbot", messageId: voiceResult.id, timestamp: voiceResult.timestamp };
return { channel: "qqbot", messageId: voiceResult.id, timestamp: voiceResult.timestamp, refIdx: (voiceResult as any).ext_info?.ref_idx };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
console.error(`[qqbot] sendVoiceFile: failed: ${message}`);
@@ -1065,7 +1071,7 @@ async function sendVideoUrl(ctx: MediaOutboundContext): Promise<OutboundResult>
}
console.log(`[qqbot] sendVideoUrl: video message sent`);
return { channel: "qqbot", messageId: videoResult.id, timestamp: videoResult.timestamp };
return { channel: "qqbot", messageId: videoResult.id, timestamp: videoResult.timestamp, refIdx: (videoResult as any).ext_info?.ref_idx };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
console.error(`[qqbot] sendVideoUrl: failed: ${message}`);
@@ -1106,7 +1112,7 @@ async function sendVideoFile(ctx: MediaOutboundContext): Promise<OutboundResult>
let videoResult: { id: string; timestamp: number | string };
if (target.type === "c2c") {
videoResult = await sendC2CVideoMessage(accessToken, target.id, undefined, videoBase64, replyToId ?? undefined);
videoResult = await sendC2CVideoMessage(accessToken, target.id, undefined, videoBase64, replyToId ?? undefined, undefined, mediaUrl);
} else if (target.type === "group") {
videoResult = await sendGroupVideoMessage(accessToken, target.id, undefined, videoBase64, replyToId ?? undefined);
} else {
@@ -1128,7 +1134,7 @@ async function sendVideoFile(ctx: MediaOutboundContext): Promise<OutboundResult>
}
console.log(`[qqbot] sendVideoFile: video message sent`);
return { channel: "qqbot", messageId: videoResult.id, timestamp: videoResult.timestamp };
return { channel: "qqbot", messageId: videoResult.id, timestamp: videoResult.timestamp, refIdx: (videoResult as any).ext_info?.ref_idx };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
console.error(`[qqbot] sendVideoFile: failed: ${message}`);
@@ -1191,7 +1197,7 @@ async function sendDocumentFile(ctx: MediaOutboundContext): Promise<OutboundResu
console.log(`[qqbot] sendDocumentFile: read local file (${formatFileSize(fileBuffer.length)}), uploading...`);
if (target.type === "c2c") {
fileResult = await sendC2CFileMessage(accessToken, target.id, fileBase64, undefined, replyToId ?? undefined, fileName);
fileResult = await sendC2CFileMessage(accessToken, target.id, fileBase64, undefined, replyToId ?? undefined, fileName, mediaUrl);
} else if (target.type === "group") {
fileResult = await sendGroupFileMessage(accessToken, target.id, fileBase64, undefined, replyToId ?? undefined, fileName);
} else {
@@ -1214,7 +1220,7 @@ async function sendDocumentFile(ctx: MediaOutboundContext): Promise<OutboundResu
}
console.log(`[qqbot] sendDocumentFile: file message sent`);
return { channel: "qqbot", messageId: fileResult.id, timestamp: fileResult.timestamp };
return { channel: "qqbot", messageId: fileResult.id, timestamp: fileResult.timestamp, refIdx: (fileResult as any).ext_info?.ref_idx };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
console.error(`[qqbot] sendDocumentFile: failed: ${message}`);

358
src/ref-index-store.ts Normal file
View File

@@ -0,0 +1,358 @@
/**
* QQ Bot 引用索引持久化存储
*
* QQ Bot 使用 REFIDX_xxx 索引体系做引用消息,
* 入站事件只有索引值,无 API 可回查内容。
* 采用 内存缓存 + JSONL 追加写持久化 方案,确保重启后历史引用仍可命中。
*
* 存储位置:~/.openclaw/qqbot/data/ref-index.jsonl
*
* 每行格式:{"k":"REFIDX_xxx","v":{...},"t":1709000000}
* - k = refIdx 键
* - v = 消息数据
* - t = 写入时间(用于 TTL 淘汰和 compact
*/
import fs from "node:fs";
import path from "node:path";
import { getQQBotDataDir } from "./utils/platform.js";
// ============ 存储的消息摘要 ============
export interface RefIndexEntry {
/** 消息文本内容摘要 */
content: string;
/** 发送者 ID */
senderId: string;
/** 发送者名称 */
senderName?: string;
/** 消息时间戳 (ms) */
timestamp: number;
/** 是否是 bot 发出的消息 */
isBot?: boolean;
/** 附件摘要(图片/语音/视频/文件等) */
attachments?: RefAttachmentSummary[];
}
/** 附件摘要:存本地路径、在线 URL 和类型描述 */
export interface RefAttachmentSummary {
/** 附件类型 */
type: "image" | "voice" | "video" | "file" | "unknown";
/** 文件名(如有) */
filename?: string;
/** MIME 类型 */
contentType?: string;
/** 语音转录文本入站STT/ASR识别结果出站TTS原文本 */
transcript?: string;
/** 语音转录来源stt=本地STT、asr=QQ官方ASR、tts=TTS原文本、fallback=兜底文案 */
transcriptSource?: "stt" | "asr" | "tts" | "fallback";
/** 已下载到本地的文件路径(持久化后可供引用时访问) */
localPath?: string;
/** 在线来源 URL公网图片/文件等) */
url?: string;
}
// ============ 配置 ============
const STORAGE_DIR = getQQBotDataDir("data");
const REF_INDEX_FILE = path.join(STORAGE_DIR, "ref-index.jsonl");
const MAX_CONTENT_LENGTH = 500; // 存储的消息内容最大字符数
const MAX_ENTRIES = 50000; // 内存中最大缓存条目数
const TTL_MS = 7 * 24 * 60 * 60 * 1000; // 7 天
const COMPACT_THRESHOLD_RATIO = 2; // 文件行数超过有效条目 N 倍时 compact
// ============ JSONL 行格式 ============
interface RefIndexLine {
/** refIdx 键 */
k: string;
/** 消息数据 */
v: RefIndexEntry;
/** 写入时间 (ms) */
t: number;
}
// ============ 内存缓存 ============
let cache: Map<string, RefIndexEntry & { _createdAt: number }> | null = null;
let totalLinesOnDisk = 0; // 磁盘文件总行数(含过期 / 被覆盖的)
/**
* 从 JSONL 文件加载到内存(懒加载,首次访问时触发)
*/
function loadFromFile(): Map<string, RefIndexEntry & { _createdAt: number }> {
if (cache !== null) return cache;
cache = new Map();
totalLinesOnDisk = 0;
try {
if (!fs.existsSync(REF_INDEX_FILE)) {
return cache;
}
const raw = fs.readFileSync(REF_INDEX_FILE, "utf-8");
const lines = raw.split("\n");
const now = Date.now();
let expired = 0;
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
totalLinesOnDisk++;
try {
const entry = JSON.parse(trimmed) as RefIndexLine;
if (!entry.k || !entry.v || !entry.t) continue;
// 跳过过期条目
if (now - entry.t > TTL_MS) {
expired++;
continue;
}
cache.set(entry.k, {
...entry.v,
_createdAt: entry.t,
});
} catch {
// 跳过损坏的行
}
}
console.log(
`[ref-index-store] Loaded ${cache.size} entries from ${totalLinesOnDisk} lines (${expired} expired)`,
);
// 启动时检查是否需要 compact
if (shouldCompact()) {
compactFile();
}
} catch (err) {
console.error(`[ref-index-store] Failed to load: ${err}`);
cache = new Map();
}
return cache;
}
// ============ JSONL 追加写入 ============
/**
* 追加一行到 JSONL 文件
*/
function appendLine(line: RefIndexLine): void {
try {
ensureDir();
fs.appendFileSync(REF_INDEX_FILE, JSON.stringify(line) + "\n", "utf-8");
totalLinesOnDisk++;
} catch (err) {
console.error(`[ref-index-store] Failed to append: ${err}`);
}
}
function ensureDir(): void {
if (!fs.existsSync(STORAGE_DIR)) {
fs.mkdirSync(STORAGE_DIR, { recursive: true });
}
}
// ============ Compact重写文件去除过期和被覆盖的条目 ============
function shouldCompact(): boolean {
if (!cache) return false;
// 文件行数远超有效条目数时 compact
return totalLinesOnDisk > cache.size * COMPACT_THRESHOLD_RATIO && totalLinesOnDisk > 1000;
}
function compactFile(): void {
if (!cache) return;
const before = totalLinesOnDisk;
try {
ensureDir();
const tmpPath = REF_INDEX_FILE + ".tmp";
const lines: string[] = [];
for (const [key, entry] of cache) {
const line: RefIndexLine = {
k: key,
v: {
content: entry.content,
senderId: entry.senderId,
senderName: entry.senderName,
timestamp: entry.timestamp,
isBot: entry.isBot,
attachments: entry.attachments,
},
t: entry._createdAt,
};
lines.push(JSON.stringify(line));
}
fs.writeFileSync(tmpPath, lines.join("\n") + "\n", "utf-8");
fs.renameSync(tmpPath, REF_INDEX_FILE);
totalLinesOnDisk = cache.size;
console.log(`[ref-index-store] Compacted: ${before} lines → ${totalLinesOnDisk} lines`);
} catch (err) {
console.error(`[ref-index-store] Compact failed: ${err}`);
}
}
// ============ 溢出淘汰 ============
function evictIfNeeded(): void {
if (!cache || cache.size < MAX_ENTRIES) return;
const now = Date.now();
// 第一轮:清理过期
for (const [key, entry] of cache) {
if (now - entry._createdAt > TTL_MS) {
cache.delete(key);
}
}
// 第二轮:仍超限,按时间删最旧
if (cache.size >= MAX_ENTRIES) {
const sorted = [...cache.entries()].sort((a, b) => a[1]._createdAt - b[1]._createdAt);
const toRemove = sorted.slice(0, cache.size - MAX_ENTRIES + 1000);
for (const [key] of toRemove) {
cache.delete(key);
}
console.log(`[ref-index-store] Evicted ${toRemove.length} oldest entries`);
}
}
// ============ 公共 API ============
/**
* 存储一条消息的 refIdx 映射
*/
export function setRefIndex(refIdx: string, entry: RefIndexEntry): void {
const store = loadFromFile();
evictIfNeeded();
const now = Date.now();
store.set(refIdx, {
content: entry.content.slice(0, MAX_CONTENT_LENGTH),
senderId: entry.senderId,
senderName: entry.senderName,
timestamp: entry.timestamp,
isBot: entry.isBot,
attachments: entry.attachments,
_createdAt: now,
});
// 追加写入 JSONL
appendLine({
k: refIdx,
v: {
content: entry.content.slice(0, MAX_CONTENT_LENGTH),
senderId: entry.senderId,
senderName: entry.senderName,
timestamp: entry.timestamp,
isBot: entry.isBot,
attachments: entry.attachments,
},
t: now,
});
// 检查是否需要 compact
if (shouldCompact()) {
compactFile();
}
}
/**
* 查找被引用消息
*/
export function getRefIndex(refIdx: string): RefIndexEntry | null {
const store = loadFromFile();
const entry = store.get(refIdx);
if (!entry) return null;
// 检查过期
if (Date.now() - entry._createdAt > TTL_MS) {
store.delete(refIdx);
return null;
}
return {
content: entry.content,
senderId: entry.senderId,
senderName: entry.senderName,
timestamp: entry.timestamp,
isBot: entry.isBot,
attachments: entry.attachments,
};
}
/**
* 将引用消息内容格式化为人类可读的描述(供 AI 上下文注入)
*/
export function formatRefEntryForAgent(entry: RefIndexEntry): string {
const parts: string[] = [];
// 文本内容
if (entry.content.trim()) {
parts.push(entry.content);
}
// 附件描述
if (entry.attachments?.length) {
for (const att of entry.attachments) {
const sourceHint = att.localPath ? ` (${att.localPath})` : att.url ? ` (${att.url})` : "";
switch (att.type) {
case "image":
parts.push(`[图片${att.filename ? `: ${att.filename}` : ""}${sourceHint}]`);
break;
case "voice":
if (att.transcript) {
const sourceMap = { stt: "本地识别", asr: "官方识别", tts: "TTS原文", fallback: "兜底文案" };
const sourceTag = att.transcriptSource ? ` - ${sourceMap[att.transcriptSource] || att.transcriptSource}` : "";
parts.push(`[语音消息(内容: "${att.transcript}"${sourceTag}${sourceHint}]`);
} else {
parts.push(`[语音消息${sourceHint}]`);
}
break;
case "video":
parts.push(`[视频${att.filename ? `: ${att.filename}` : ""}${sourceHint}]`);
break;
case "file":
parts.push(`[文件${att.filename ? `: ${att.filename}` : ""}${sourceHint}]`);
break;
default:
parts.push(`[附件${att.filename ? `: ${att.filename}` : ""}${sourceHint}]`);
}
}
}
return parts.join(" ") || "[空消息]";
}
/**
* 进程退出前强制 compact确保数据一致性
*/
export function flushRefIndex(): void {
if (cache && shouldCompact()) {
compactFile();
}
}
/**
* 缓存统计(调试用)
*/
export function getRefIndexStats(): {
size: number;
maxEntries: number;
totalLinesOnDisk: number;
filePath: string;
} {
const store = loadFromFile();
return {
size: store.size,
maxEntries: MAX_ENTRIES,
totalLinesOnDisk,
filePath: REF_INDEX_FILE,
};
}

View File

@@ -102,6 +102,8 @@ export interface C2CMessageEvent {
timestamp: string;
message_scene?: {
source: string;
/** ext 数组,可能包含 ref_msg_idx=REFIDX_xxx引用的消息和 msg_idx=REFIDX_xxx自身索引 */
ext?: string[];
};
attachments?: MessageAttachment[];
}
@@ -140,6 +142,10 @@ export interface GroupMessageEvent {
timestamp: string;
group_id: string;
group_openid: string;
message_scene?: {
source: string;
ext?: string[];
};
attachments?: MessageAttachment[];
}

View File

@@ -105,11 +105,22 @@ export function expandTilde(p: string): string {
}
/**
* 对路径进行完整的规范化处理:展开波浪线 + 去除首尾空白
* 对路径进行完整的规范化处理:剥离 file:// 前缀 + 展开波浪线 + 去除首尾空白
* 所有文件操作前应通过此函数处理用户输入的路径
*/
export function normalizePath(p: string): string {
return expandTilde(p.trim());
let result = p.trim();
// 剥离 file:// 协议前缀: file:///Users/... → /Users/...
if (result.startsWith("file://")) {
result = result.slice("file://".length);
// 处理 URL 编码file:// 路径中空格等字符可能被编码)
try {
result = decodeURIComponent(result);
} catch {
// decodeURIComponent 失败时保留原样
}
}
return expandTilde(result);
}
// ============ 文件名 UTF-8 规范化 ============
@@ -163,6 +174,7 @@ export function sanitizeFileName(name: string): string {
* - Windows 绝对路径: C:\..., D:/..., \\server\share
* - 相对路径: ./file, ../file
* - 波浪线路径: ~/Desktop/file.png
* - file:// 协议: file:///Users/..., file:///home/...
*
* 不匹配:
* - http:// / https:// URL
@@ -170,6 +182,8 @@ export function sanitizeFileName(name: string): string {
*/
export function isLocalPath(p: string): boolean {
if (!p) return false;
// file:// 协议(本地文件 URI
if (p.startsWith("file://")) return true;
// 波浪线路径Mac/Linux 用户常用)
if (p === "~" || p.startsWith("~/") || p.startsWith("~\\")) return true;
// Unix 绝对路径