This commit is contained in:
2026-01-23 21:51:37 +08:00
parent 6ad9783bcb
commit 77fd46aa34
26 changed files with 3681 additions and 199 deletions

View File

@@ -11,7 +11,7 @@
#import "AudioSessionManager.h"
#import "DeepgramStreamingManager.h"
#import "KBAICommentView.h"
#import "KBAiChatView.h"
#import "KBChatTableView.h"
#import "KBAiRecordButton.h"
#import "KBHUD.h"
#import "LSTPopView.h"
@@ -26,7 +26,7 @@
@property(nonatomic, weak) LSTPopView *popView;
// UI
@property(nonatomic, strong) KBAiChatView *chatView;
@property(nonatomic, strong) KBChatTableView *chatView;
@property(nonatomic, strong) KBAiRecordButton *recordButton;
@property(nonatomic, strong) UILabel *statusLabel;
@property(nonatomic, strong) UILabel *transcriptLabel;
@@ -68,8 +68,7 @@
[self setupUI];
[self setupOrchestrator];
[self setupStreamingManager];
// websocket-api Deepgram
// [self setupDeepgramManager];
[self setupDeepgramManager];
}
- (void)viewWillAppear:(BOOL)animated {
@@ -159,7 +158,7 @@
[self.view addSubview:self.transcriptLabel];
//
self.chatView = [[KBAiChatView alloc] init];
self.chatView = [[KBChatTableView alloc] init];
self.chatView.backgroundColor = [UIColor clearColor];
self.chatView.translatesAutoresizingMaskIntoConstraints = NO;
[self.view addSubview:self.chatView];
@@ -208,13 +207,20 @@
make.left.equalTo(self.view).offset(16);
make.right.equalTo(self.view).offset(-16);
}];
//
[self.transcriptLabel setContentCompressionResistancePriority:UILayoutPriorityDefaultLow
forAxis:UILayoutConstraintAxisVertical];
[self.chatView mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.right.equalTo(self.view);
make.bottom.equalTo(self.tabbarBackgroundView.mas_top).offset(-8);
make.top.equalTo(self.transcriptLabel.mas_bottom).offset(8);
make.left.equalTo(self.view).offset(16);
make.right.equalTo(self.view).offset(-16);
make.bottom.lessThanOrEqualTo(self.recordButton.mas_top).offset(-16);
// 0
make.height.greaterThanOrEqualTo(@100).priority(MASLayoutPriorityDefaultHigh);
}];
// chatView
[self.chatView setContentCompressionResistancePriority:UILayoutPriorityRequired
forAxis:UILayoutConstraintAxisVertical];
[self.recordButton mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.equalTo(self.view.mas_safeAreaLayoutGuideLeft).offset(20);
@@ -311,7 +317,7 @@
return;
// AI
[strongSelf.chatView addAssistantMessage:@""];
[strongSelf.chatView addAssistantMessage:@"" audioDuration:0 audioData:nil];
};
// AI
@@ -455,6 +461,10 @@
- (void)recordButtonDidBeginPress:(KBAiRecordButton *)button {
NSLog(@"[KBAiMainVC] Record button began press");
//
[self.chatView stopPlayingAudio];
NSString *token = [[KBUserSessionManager shared] accessToken] ?: @"";
if (token.length == 0) {
[[KBUserSessionManager shared] goLoginVC];
@@ -463,20 +473,19 @@
self.statusLabel.text = @"正在连接...";
self.recordButton.state = KBAiRecordButtonStateRecording;
[self.deepgramFullText setString:@""];
self.transcriptLabel.text = @"";
[self.voiceChatAudioBuffer setLength:0];
[self.streamingManager startWithToken:token language:@"en" voiceId:nil];
[self.deepgramManager start];
}
- (void)recordButtonDidEndPress:(KBAiRecordButton *)button {
NSLog(@"[KBAiMainVC] Record button end press");
[self.streamingManager stopAndFinalize];
[self.deepgramManager stopAndFinalize];
}
- (void)recordButtonDidCancelPress:(KBAiRecordButton *)button {
NSLog(@"[KBAiMainVC] Record button cancel press");
[self.voiceChatAudioBuffer setLength:0];
[self.streamingManager cancel];
[self.deepgramManager cancel];
}
#pragma mark - VoiceChatStreamingManagerDelegate
@@ -537,7 +546,7 @@
- (void)voiceChatStreamingManagerDidReceiveLLMStart {
self.statusLabel.text = @"AI 正在思考...";
[self.assistantVisibleText setString:@""];
[self.chatView addAssistantMessage:@""];
[self.chatView addAssistantMessage:@"" audioDuration:0 audioData:nil];
[self.voiceChatAudioBuffer setLength:0];
}
@@ -559,22 +568,36 @@
- (void)voiceChatStreamingManagerDidCompleteWithTranscript:(NSString *)transcript
aiResponse:(NSString *)aiResponse {
NSString *finalText = aiResponse.length > 0 ? aiResponse
: self.assistantVisibleText;
NSString *finalText = aiResponse.length > 0 ? aiResponse : self.assistantVisibleText;
if (aiResponse.length > 0) {
[self.assistantVisibleText setString:aiResponse];
}
//
NSTimeInterval duration = 0;
if (self.voiceChatAudioBuffer.length > 0) {
NSError *error = nil;
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:self.voiceChatAudioBuffer
error:&error];
if (!error && player) {
duration = player.duration;
}
}
if (finalText.length > 0) {
[self.chatView updateLastAssistantMessage:finalText];
[self.chatView markLastAssistantMessageComplete];
} else if (transcript.length > 0) {
[self.chatView addAssistantMessage:transcript];
[self.chatView markLastAssistantMessageComplete];
[self.chatView addAssistantMessage:transcript
audioDuration:duration
audioData:self.voiceChatAudioBuffer.length > 0 ? self.voiceChatAudioBuffer : nil];
}
if (self.voiceChatAudioBuffer.length > 0) {
[self playAiAudioData:self.voiceChatAudioBuffer];
[self.voiceChatAudioBuffer setLength:0];
}
self.recordButton.state = KBAiRecordButtonStateNormal;
self.statusLabel.text = @"完成";
}
@@ -629,39 +652,50 @@
self.statusLabel.text = @"识别完成";
self.recordButton.state = KBAiRecordButtonStateNormal;
// NSString *finalText = [self.deepgramFullText copy];
// if (finalText.length > 0) {
// __weak typeof(self) weakSelf = self;
// [KBHUD show];
// [self.aiVM syncChatWithTranscript:finalText
// completion:^(KBAiSyncResponse *_Nullable response,
// NSError *_Nullable error) {
// __strong typeof(weakSelf) strongSelf = weakSelf;
// if (!strongSelf) {
// return;
// }
// dispatch_async(dispatch_get_main_queue(), ^{
// [KBHUD dismiss];
// if (error) {
// [KBHUD showError:error.localizedDescription ?: @"请求失败"];
// return;
// }
//
// NSString *aiResponse = response.data.aiResponse ?: @"";
// if (aiResponse.length > 0) {
// NSLog(@"[KBAiMainVC] /chat/sync aiResponse: %@", aiResponse);
// }
//
// NSData *audioData = response.data.audioData;
// if (audioData.length > 0) {
// NSLog(@"[KBAiMainVC] /chat/sync audio ready, start play");
// [strongSelf playAiAudioData:audioData];
// } else {
// NSLog(@"[KBAiMainVC] /chat/sync audioData empty");
// }
// });
// }];
// }
NSString *finalText = [self.deepgramFullText copy];
if (finalText.length == 0) {
return;
}
//
[self.chatView addUserMessage:finalText];
__weak typeof(self) weakSelf = self;
[KBHUD showWithStatus:@"AI 思考中..."];
// chat/message
[self.aiVM requestChatMessageWithContent:finalText
completion:^(KBAiMessageResponse *_Nullable response,
NSError *_Nullable error) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf) {
return;
}
dispatch_async(dispatch_get_main_queue(), ^{
[KBHUD dismiss];
if (error) {
[KBHUD showError:error.localizedDescription ?: @"请求失败"];
return;
}
// AI
NSString *aiResponse = response.data.aiResponse ?: response.data.content ?: response.data.text ?: response.data.message ?: @"";
if (aiResponse.length == 0) {
[KBHUD showError:@"AI 回复为空"];
return;
}
// audioId
NSString *audioId = response.data.audioId;
// AI audioId
[strongSelf.chatView addAssistantMessage:aiResponse
audioId:audioId];
});
}];
}
- (void)deepgramStreamingManagerDidFail:(NSError *)error {

View File

@@ -0,0 +1,269 @@
# KBAiMainVC 集成 KBChatTableView 说明
## ✅ 已完成的修改
### 1. Import 修改
```objective-c
// 原来
#import "KBAiChatView.h"
// 修改为
#import "KBChatTableView.h"
```
---
### 2. 属性类型修改
```objective-c
// 原来
@property(nonatomic, strong) KBAiChatView *chatView;
// 修改为
@property(nonatomic, strong) KBChatTableView *chatView;
```
---
### 3. 初始化修改
```objective-c
// 原来
self.chatView = [[KBAiChatView alloc] init];
// 修改为
self.chatView = [[KBChatTableView alloc] init];
```
---
### 4. 布局约束修改(从底部向上显示)
**原来的布局:**
```objective-c
[self.chatView mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.equalTo(self.transcriptLabel.mas_bottom).offset(8);
make.left.equalTo(self.view).offset(16);
make.right.equalTo(self.view).offset(-16);
make.bottom.lessThanOrEqualTo(self.recordButton.mas_top).offset(-16);
}];
```
**修改为(从底部向上):**
```objective-c
[self.chatView mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.right.equalTo(self.view);
make.bottom.equalTo(self.tabbarBackgroundView.mas_top).offset(-8);
make.top.greaterThanOrEqualTo(self.transcriptLabel.mas_bottom).offset(8);
}];
```
**说明:**
- `bottom` 约束到 `tabbarBackgroundView` 的顶部,确保从底部开始
- `top` 使用 `greaterThanOrEqualTo`,允许内容向上扩展
- 移除了左右的 16pt 边距,让聊天视图占满宽度(气泡内部已有边距)
---
### 5. 消息添加逻辑修改
#### 5.1 添加用户消息(保持不变)
```objective-c
[self.chatView addUserMessage:finalText];
```
#### 5.2 添加 AI 消息(带语音数据)
**在 `deepgramStreamingManagerDidReceiveFinalTranscript` 中:**
```objective-c
// 1. 添加用户消息
[self.chatView addUserMessage:finalText];
// 2. 生成 AI 回复后,添加带语音的消息
// 计算音频时长
NSTimeInterval duration = 0;
NSError *playerError = nil;
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:audioData
error:&playerError];
if (!playerError && player) {
duration = player.duration;
}
// 添加 AI 消息(带语音)
[self.chatView addAssistantMessage:polishedText
audioDuration:duration
audioData:audioData];
// 播放音频
[self playAiAudioData:audioData];
```
**错误处理:**
```objective-c
// 如果语音生成失败,仍然添加文本消息(无语音)
if (ttsError) {
[KBHUD showError:ttsError.localizedDescription ?: @"语音生成失败"];
[self.chatView addAssistantMessage:polishedText
audioDuration:0
audioData:nil];
return;
}
```
---
#### 5.3 打字机效果修改
**在 `voiceChatStreamingManagerDidReceiveLLMStart` 中:**
```objective-c
// 原来
[self.chatView addAssistantMessage:@""];
// 修改为
[self.chatView addAssistantMessage:@"" audioDuration:0 audioData:nil];
```
**在 `orchestrator.onSpeakingStart` 中:**
```objective-c
// 原来
[self.chatView addAssistantMessage:@""];
// 修改为
[self.chatView addAssistantMessage:@"" audioDuration:0 audioData:nil];
```
---
### 6. VoiceChatStreamingManager 完成回调修改
**在 `voiceChatStreamingManagerDidCompleteWithTranscript` 中:**
```objective-c
// 计算音频时长
NSTimeInterval duration = 0;
if (self.voiceChatAudioBuffer.length > 0) {
NSError *error = nil;
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:self.voiceChatAudioBuffer
error:&error];
if (!error && player) {
duration = player.duration;
}
}
// 添加消息时带上音频数据
if (transcript.length > 0) {
[self.chatView addAssistantMessage:transcript
audioDuration:duration
audioData:self.voiceChatAudioBuffer.length > 0 ? self.voiceChatAudioBuffer : nil];
}
```
---
## 🎯 核心改进
### 1. 从底部向上显示
- 聊天视图的 `bottom` 约束到 `tabbarBackgroundView.top`
- 新消息会从底部向上堆叠
- 自动滚动到最新消息
### 2. 语音功能集成
- AI 消息带有语音数据和时长
- 点击语音按钮可播放
- 显示语音时长(如 "6""
### 3. 时间戳自动显示
- 第一条消息显示时间
- 超过 5 分钟自动插入时间戳
- 跨天消息显示日期
### 4. 用户体验优化
- 消息气泡左右对齐清晰
- 语音播放状态可视化
- 自动滚动到最新消息
---
## 📊 布局效果
```
┌─────────────────────────────────┐
│ 状态标签 │
│ 转写文本标签 │
│ │
│ ┌────────────────────────────┐ │
│ │ │ │
│ │ │ │
│ │ 聊天消息区域 │ │
│ │ (从底部向上显示) │ │
│ │ │ │
│ │ 16:36 │ │ ← 时间戳
│ │ ┌──────────┐ │ │
│ │ │ 你好呀。 │ │ │ ← 用户消息(右侧)
│ │ └──────────┘ │ │
│ │ │ │
│ │ ▶️ 6" │ │ ← 语音按钮
│ │ ┌──────────────────────┐ │ │
│ │ │ 你好!很高兴见到你。 │ │ │ ← AI 消息(左侧)
│ │ └──────────────────────┘ │ │
│ │ │ │
│ └────────────────────────────┘ │
│ │
│ ┌────────────────────────────┐ │
│ │ 毛玻璃背景区域 │ │
│ │ │ │
│ │ ┌──────────────────────┐ │ │
│ │ │ 录音按钮 │ │ │
│ │ └──────────────────────┘ │ │
│ └────────────────────────────┘ │
└─────────────────────────────────┘
```
---
## ✅ 测试清单
- [ ] 用户消息显示在右侧
- [ ] AI 消息显示在左侧
- [ ] 语音按钮显示在 AI 消息左上角
- [ ] 点击语音按钮可播放
- [ ] 语音时长正确显示
- [ ] 时间戳自动插入
- [ ] 消息从底部向上显示
- [ ] 新消息自动滚动到底部
- [ ] 打字机效果正常工作
- [ ] 语音播放状态图标切换
---
## 🐛 可能的问题
### 1. 语音按钮不显示
**原因:** `audioData` 为 nil 或长度为 0
**解决:** 检查 TTS 返回的音频数据
### 2. 消息不从底部显示
**原因:** 布局约束错误
**解决:** 确保 `bottom` 约束到 `tabbarBackgroundView.top`
### 3. 时间戳不显示
**原因:** 消息的 `timestamp` 未设置
**解决:** `KBChatMessage` 会自动设置当前时间
### 4. 点击语音按钮没反应
**原因:** 音频数据格式不正确
**解决:** 确保音频数据是 AVAudioPlayer 支持的格式MP3、AAC、M4A
---
## 🎉 完成!
现在 `KBAiMainVC` 已经成功集成了新的 `KBChatTableView`,具备以下特性:
✅ 从底部向上显示消息
✅ 用户消息在右侧
✅ AI 消息在左侧(带语音按钮)
✅ 时间戳自动插入
✅ 语音播放功能
✅ 打字机效果
运行项目即可看到效果!

View File

@@ -0,0 +1,17 @@
//
// KBChatTestVC.h
// keyBoard
//
// Created by Kiro on 2026/1/23.
//
#import "BaseViewController.h"
NS_ASSUME_NONNULL_BEGIN
/// 聊天 UI 测试页面
@interface KBChatTestVC : BaseViewController
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,149 @@
//
// KBChatTestVC.m
// keyBoard
//
// Created by Kiro on 2026/1/23.
//
#import "KBChatTestVC.h"
#import "KBChatTableView.h"
#import <Masonry/Masonry.h>
@interface KBChatTestVC ()
@property (nonatomic, strong) KBChatTableView *chatView;
@property (nonatomic, strong) UIButton *addUserMessageButton;
@property (nonatomic, strong) UIButton *addAIMessageButton;
@property (nonatomic, strong) UIButton *clearButton;
@end
@implementation KBChatTestVC
- (void)viewDidLoad {
[super viewDidLoad];
self.title = @"聊天 UI 测试";
self.view.backgroundColor = [UIColor whiteColor];
[self setupUI];
[self loadMockData];
}
- (void)setupUI {
//
self.chatView = [[KBChatTableView alloc] init];
self.chatView.backgroundColor = [UIColor colorWithWhite:0.95 alpha:1.0];
[self.view addSubview:self.chatView];
//
UIView *buttonContainer = [[UIView alloc] init];
buttonContainer.backgroundColor = [UIColor whiteColor];
[self.view addSubview:buttonContainer];
//
self.addUserMessageButton = [UIButton buttonWithType:UIButtonTypeSystem];
[self.addUserMessageButton setTitle:@"添加用户消息" forState:UIControlStateNormal];
[self.addUserMessageButton addTarget:self
action:@selector(addUserMessage)
forControlEvents:UIControlEventTouchUpInside];
[buttonContainer addSubview:self.addUserMessageButton];
// AI
self.addAIMessageButton = [UIButton buttonWithType:UIButtonTypeSystem];
[self.addAIMessageButton setTitle:@"添加 AI 消息" forState:UIControlStateNormal];
[self.addAIMessageButton addTarget:self
action:@selector(addAIMessage)
forControlEvents:UIControlEventTouchUpInside];
[buttonContainer addSubview:self.addAIMessageButton];
//
self.clearButton = [UIButton buttonWithType:UIButtonTypeSystem];
[self.clearButton setTitle:@"清空" forState:UIControlStateNormal];
[self.clearButton addTarget:self
action:@selector(clearMessages)
forControlEvents:UIControlEventTouchUpInside];
[buttonContainer addSubview:self.clearButton];
//
[self.chatView mas_makeConstraints:^(MASConstraintMaker *make) {
make.top.equalTo(self.view.mas_safeAreaLayoutGuideTop);
make.left.right.equalTo(self.view);
make.bottom.equalTo(buttonContainer.mas_top);
}];
[buttonContainer mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.right.equalTo(self.view);
make.bottom.equalTo(self.view.mas_safeAreaLayoutGuideBottom);
make.height.mas_equalTo(60);
}];
[self.addUserMessageButton mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.equalTo(buttonContainer).offset(16);
make.centerY.equalTo(buttonContainer);
}];
[self.addAIMessageButton mas_makeConstraints:^(MASConstraintMaker *make) {
make.center.equalTo(buttonContainer);
}];
[self.clearButton mas_makeConstraints:^(MASConstraintMaker *make) {
make.right.equalTo(buttonContainer).offset(-16);
make.centerY.equalTo(buttonContainer);
}];
}
- (void)loadMockData {
//
[self.chatView addUserMessage:@"你好"];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self.chatView addAssistantMessage:@"你好!很高兴见到你。"
audioDuration:3.0
audioData:[self generateMockAudioData:3.0]];
});
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1.0 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self.chatView addUserMessage:@"今天天气怎么样?"];
});
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1.5 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self.chatView addAssistantMessage:@"今天天气不错,阳光明媚,温度适宜,非常适合外出活动。"
audioDuration:8.0
audioData:[self generateMockAudioData:8.0]];
});
}
- (void)addUserMessage {
static NSInteger userMessageCount = 0;
userMessageCount++;
NSString *text = [NSString stringWithFormat:@"这是用户消息 %ld", (long)userMessageCount];
[self.chatView addUserMessage:text];
}
- (void)addAIMessage {
static NSInteger aiMessageCount = 0;
aiMessageCount++;
NSString *text = [NSString stringWithFormat:@"这是 AI 回复消息 %ld包含一些较长的文本内容用于测试气泡的自适应高度和换行效果。", (long)aiMessageCount];
NSTimeInterval duration = 5.0 + (aiMessageCount % 10);
[self.chatView addAssistantMessage:text
audioDuration:duration
audioData:[self generateMockAudioData:duration]];
}
- (void)clearMessages {
[self.chatView clearMessages];
}
/// TTS
- (NSData *)generateMockAudioData:(NSTimeInterval)duration {
// nil使
//
NSString *audioPath = [[NSBundle mainBundle] pathForResource:@"ai_test" ofType:@"m4a"];
if (audioPath) {
return [NSData dataWithContentsOfFile:audioPath];
}
return nil;
}
@end