添加语音websocket等,还没测试

This commit is contained in:
2026-01-16 13:38:03 +08:00
parent 169a1929d7
commit b021fd308f
33 changed files with 5098 additions and 8 deletions

View File

@@ -0,0 +1,246 @@
//
// AudioStreamPlayer.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "AudioStreamPlayer.h"
#import <AVFoundation/AVFoundation.h>
@interface AudioStreamPlayer ()
@property(nonatomic, strong) AVAudioEngine *audioEngine;
@property(nonatomic, strong) AVAudioPlayerNode *playerNode;
@property(nonatomic, strong) AVAudioFormat *playbackFormat;
//
@property(nonatomic, copy) NSString *currentSegmentId;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSNumber *> *segmentDurations;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSNumber *> *segmentStartTimes;
@property(nonatomic, assign) NSUInteger scheduledSamples;
@property(nonatomic, assign) NSUInteger playedSamples;
//
@property(nonatomic, assign) BOOL playing;
@property(nonatomic, strong) dispatch_queue_t playerQueue;
@property(nonatomic, strong) NSTimer *progressTimer;
@end
@implementation AudioStreamPlayer
- (instancetype)init {
self = [super init];
if (self) {
_audioEngine = [[AVAudioEngine alloc] init];
_playerNode = [[AVAudioPlayerNode alloc] init];
_segmentDurations = [[NSMutableDictionary alloc] init];
_segmentStartTimes = [[NSMutableDictionary alloc] init];
_playerQueue = dispatch_queue_create("com.keyboard.aitalk.streamplayer",
DISPATCH_QUEUE_SERIAL);
// 16kHz, Mono, Float32
_playbackFormat =
[[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
sampleRate:16000
channels:1
interleaved:NO];
}
return self;
}
- (void)dealloc {
[self stop];
}
#pragma mark - Public Methods
- (BOOL)start:(NSError **)error {
if (self.playing) {
return YES;
}
//
[self.audioEngine attachNode:self.playerNode];
[self.audioEngine connect:self.playerNode
to:self.audioEngine.mainMixerNode
format:self.playbackFormat];
//
NSError *startError = nil;
[self.audioEngine prepare];
if (![self.audioEngine startAndReturnError:&startError]) {
if (error) {
*error = startError;
}
NSLog(@"[AudioStreamPlayer] Failed to start engine: %@",
startError.localizedDescription);
return NO;
}
[self.playerNode play];
self.playing = YES;
//
[self startProgressTimer];
NSLog(@"[AudioStreamPlayer] Started");
return YES;
}
- (void)stop {
dispatch_async(self.playerQueue, ^{
[self stopProgressTimer];
[self.playerNode stop];
[self.audioEngine stop];
self.playing = NO;
self.currentSegmentId = nil;
self.scheduledSamples = 0;
self.playedSamples = 0;
[self.segmentDurations removeAllObjects];
[self.segmentStartTimes removeAllObjects];
NSLog(@"[AudioStreamPlayer] Stopped");
});
}
- (void)enqueuePCMChunk:(NSData *)pcmData
sampleRate:(double)sampleRate
channels:(int)channels
segmentId:(NSString *)segmentId {
if (!pcmData || pcmData.length == 0)
return;
dispatch_async(self.playerQueue, ^{
//
BOOL isNewSegment = ![segmentId isEqualToString:self.currentSegmentId];
if (isNewSegment) {
self.currentSegmentId = segmentId;
self.scheduledSamples = 0;
self.segmentStartTimes[segmentId] = @(CACurrentMediaTime());
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(audioStreamPlayerDidStartSegment:)]) {
[self.delegate audioStreamPlayerDidStartSegment:segmentId];
}
});
}
// Int16 -> Float32
NSUInteger sampleCount = pcmData.length / sizeof(int16_t);
const int16_t *int16Samples = (const int16_t *)pcmData.bytes;
// buffer
AVAudioFormat *format =
[[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
sampleRate:sampleRate
channels:channels
interleaved:NO];
AVAudioPCMBuffer *buffer = [[AVAudioPCMBuffer alloc]
initWithPCMFormat:format
frameCapacity:(AVAudioFrameCount)sampleCount];
buffer.frameLength = (AVAudioFrameCount)sampleCount;
float *floatChannel = buffer.floatChannelData[0];
for (NSUInteger i = 0; i < sampleCount; i++) {
floatChannel[i] = (float)int16Samples[i] / 32768.0f;
}
//
__weak typeof(self) weakSelf = self;
[self.playerNode scheduleBuffer:buffer
completionHandler:^{
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf)
return;
dispatch_async(strongSelf.playerQueue, ^{
strongSelf.playedSamples += sampleCount;
});
}];
self.scheduledSamples += sampleCount;
//
NSTimeInterval chunkDuration = (double)sampleCount / sampleRate;
NSNumber *currentDuration = self.segmentDurations[segmentId];
self.segmentDurations[segmentId] =
@(currentDuration.doubleValue + chunkDuration);
});
}
- (NSTimeInterval)playbackTimeForSegment:(NSString *)segmentId {
if (![segmentId isEqualToString:self.currentSegmentId]) {
return 0;
}
//
return (double)self.playedSamples / self.playbackFormat.sampleRate;
}
- (NSTimeInterval)durationForSegment:(NSString *)segmentId {
NSNumber *duration = self.segmentDurations[segmentId];
return duration ? duration.doubleValue : 0;
}
#pragma mark - Progress Timer
- (void)startProgressTimer {
dispatch_async(dispatch_get_main_queue(), ^{
self.progressTimer =
[NSTimer scheduledTimerWithTimeInterval:1.0 / 30.0
target:self
selector:@selector(updateProgress)
userInfo:nil
repeats:YES];
});
}
- (void)stopProgressTimer {
dispatch_async(dispatch_get_main_queue(), ^{
[self.progressTimer invalidate];
self.progressTimer = nil;
});
}
- (void)updateProgress {
if (!self.playing || !self.currentSegmentId) {
return;
}
NSTimeInterval currentTime =
[self playbackTimeForSegment:self.currentSegmentId];
NSString *segmentId = self.currentSegmentId;
if ([self.delegate respondsToSelector:@selector
(audioStreamPlayerDidUpdateTime:segmentId:)]) {
[self.delegate audioStreamPlayerDidUpdateTime:currentTime
segmentId:segmentId];
}
//
NSTimeInterval duration = [self durationForSegment:segmentId];
if (duration > 0 && currentTime >= duration - 0.1) {
//
dispatch_async(self.playerQueue, ^{
if ([self.delegate respondsToSelector:@selector
(audioStreamPlayerDidFinishSegment:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate audioStreamPlayerDidFinishSegment:segmentId];
});
}
});
}
}
@end