fix(chat): 保存LLM响应的生成ID用于链路追踪

This commit is contained in:
2025-12-17 19:23:21 +08:00
parent abfac871fd
commit 95fb77a575
2 changed files with 9 additions and 0 deletions

View File

@@ -103,4 +103,7 @@ public class KeyboardUserCallLog {
@Schema(description="调用记录创建时间")
private Date createdAt;
@TableField(value = "gen_id")
@Schema(description="生成 Id")
private String genId;
}

View File

@@ -155,6 +155,8 @@ public class ChatServiceImpl implements ChatService {
AtomicInteger outputTokens = new AtomicInteger(0);
// 原子引用保存错误代码
AtomicReference<String> errorCodeRef = new AtomicReference<>();
//原子引用保存生成ID
AtomicReference<String> genId = new AtomicReference<>();
// ============ 3. 构建LLM流式输出 ============
Flux<ChatStreamMessage> llmFlux = client
@@ -180,6 +182,9 @@ public class ChatServiceImpl implements ChatService {
if (metadata.getModel() != null) {
modelRef.set(metadata.getModel());
}
if (metadata.getId() != null){
genId.set(metadata.getId());
}
// 保存token使用情况
if (metadata.getUsage() != null) {
var usage = metadata.getUsage();
@@ -281,6 +286,7 @@ public class ChatServiceImpl implements ChatService {
});
// 设置日志基本信息
callLog.setRequestId(requestId);
callLog.setGenId(genId.get());
callLog.setFeature("chat_talk");
callLog.setModel(modelRef.get());
// 设置token使用情况