// // LLMStreamClient.m // keyBoard // // Created by Mac on 2026/1/15. // #import "LLMStreamClient.h" @interface LLMStreamClient () @property(nonatomic, strong) NSURLSession *urlSession; @property(nonatomic, strong) NSURLSessionDataTask *dataTask; @property(nonatomic, strong) dispatch_queue_t networkQueue; @property(nonatomic, assign) BOOL generating; @property(nonatomic, strong) NSMutableString *buffer; // SSE 数据缓冲 @end @implementation LLMStreamClient - (instancetype)init { self = [super init]; if (self) { _networkQueue = dispatch_queue_create("com.keyboard.aitalk.llm.network", DISPATCH_QUEUE_SERIAL); _buffer = [[NSMutableString alloc] init]; // TODO: 替换为实际的 LLM 服务器地址 _serverURL = @"https://your-llm-server.com/api/chat/stream"; } return self; } - (void)dealloc { [self cancel]; } #pragma mark - Public Methods - (void)sendUserText:(NSString *)text conversationId:(NSString *)conversationId { dispatch_async(self.networkQueue, ^{ [self cancelInternal]; self.generating = YES; [self.buffer setString:@""]; // 创建请求 NSURL *url = [NSURL URLWithString:self.serverURL]; NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url]; request.HTTPMethod = @"POST"; [request setValue:@"application/json" forHTTPHeaderField:@"Content-Type"]; [request setValue:@"text/event-stream" forHTTPHeaderField:@"Accept"]; if (self.apiKey) { [request setValue:[NSString stringWithFormat:@"Bearer %@", self.apiKey] forHTTPHeaderField:@"Authorization"]; } // 请求体 NSDictionary *body = @{ @"message" : text, @"conversationId" : conversationId, @"stream" : @YES }; NSError *jsonError = nil; NSData *jsonData = [NSJSONSerialization dataWithJSONObject:body options:0 error:&jsonError]; if (jsonError) { [self reportError:jsonError]; return; } request.HTTPBody = jsonData; // 创建会话 NSURLSessionConfiguration *config = [NSURLSessionConfiguration defaultSessionConfiguration]; config.timeoutIntervalForRequest = 60; config.timeoutIntervalForResource = 300; self.urlSession = [NSURLSession sessionWithConfiguration:config delegate:self delegateQueue:nil]; self.dataTask = [self.urlSession dataTaskWithRequest:request]; [self.dataTask resume]; NSLog(@"[LLMStreamClient] Started request for conversation: %@", conversationId); }); } - (void)cancel { dispatch_async(self.networkQueue, ^{ [self cancelInternal]; }); } #pragma mark - Private Methods - (void)cancelInternal { self.generating = NO; if (self.dataTask) { [self.dataTask cancel]; self.dataTask = nil; } if (self.urlSession) { [self.urlSession invalidateAndCancel]; self.urlSession = nil; } [self.buffer setString:@""]; } - (void)reportError:(NSError *)error { self.generating = NO; dispatch_async(dispatch_get_main_queue(), ^{ if ([self.delegate respondsToSelector:@selector(llmClientDidFail:)]) { [self.delegate llmClientDidFail:error]; } }); } - (void)reportComplete { self.generating = NO; dispatch_async(dispatch_get_main_queue(), ^{ if ([self.delegate respondsToSelector:@selector(llmClientDidComplete)]) { [self.delegate llmClientDidComplete]; } }); } - (void)reportToken:(NSString *)token { dispatch_async(dispatch_get_main_queue(), ^{ if ([self.delegate respondsToSelector:@selector(llmClientDidReceiveToken:)]) { [self.delegate llmClientDidReceiveToken:token]; } }); } #pragma mark - SSE Parsing - (void)parseSSEData:(NSData *)data { NSString *string = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]; if (!string) return; [self.buffer appendString:string]; // SSE 格式:每个事件以 \n\n 分隔 NSArray *events = [self.buffer componentsSeparatedByString:@"\n\n"]; // 保留最后一个可能不完整的事件 if (events.count > 1) { [self.buffer setString:events.lastObject]; for (NSUInteger i = 0; i < events.count - 1; i++) { [self handleSSEEvent:events[i]]; } } } - (void)handleSSEEvent:(NSString *)event { if (event.length == 0) return; // 解析 SSE 事件 // 格式: data: {...} NSArray *lines = [event componentsSeparatedByString:@"\n"]; for (NSString *line in lines) { if ([line hasPrefix:@"data: "]) { NSString *dataString = [line substringFromIndex:6]; // 检查是否是结束标志 if ([dataString isEqualToString:@"[DONE]"]) { [self reportComplete]; return; } // 解析 JSON NSData *jsonData = [dataString dataUsingEncoding:NSUTF8StringEncoding]; NSError *jsonError = nil; NSDictionary *json = [NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&jsonError]; if (jsonError) { NSLog(@"[LLMStreamClient] Failed to parse SSE data: %@", dataString); continue; } // 提取 token(根据实际 API 格式调整) // 常见格式: {"token": "..."} 或 {"choices": [{"delta": {"content": // "..."}}]} NSString *token = json[@"token"]; if (!token) { // OpenAI 格式 NSArray *choices = json[@"choices"]; if (choices.count > 0) { NSDictionary *delta = choices[0][@"delta"]; token = delta[@"content"]; } } if (token && token.length > 0) { [self reportToken:token]; } } } } #pragma mark - NSURLSessionDataDelegate - (void)URLSession:(NSURLSession *)session dataTask:(NSURLSessionDataTask *)dataTask didReceiveData:(NSData *)data { [self parseSSEData:data]; } - (void)URLSession:(NSURLSession *)session task:(NSURLSessionTask *)task didCompleteWithError:(NSError *)error { if (error) { if (error.code != NSURLErrorCancelled) { [self reportError:error]; } } else { // 处理缓冲区中剩余的数据 if (self.buffer.length > 0) { [self handleSSEEvent:self.buffer]; [self.buffer setString:@""]; } [self reportComplete]; } } @end