Compare commits

...

19 Commits

Author SHA1 Message Date
dfccc03df8 fix(entity): 为ServerBigBrother实体增加secUid字段及映射 2025-12-15 20:13:09 +08:00
cf15298968 feat(mq): 新增 WebAI 消息队列支持
为支持 WebAI 登录场景,新增 web.ai.headers.exchange 交换机及对应发送方法;
在 HostInfoServiceImpl 中增加 Redis 前缀判断与 webAISend 调用逻辑。
2025-12-05 13:57:34 +08:00
9f062492c4 refactor(controller): 移除未使用的 List 导入并整理代码格式 2025-11-20 17:42:59 +08:00
4d20448b82 refactor(mq): 拆分交换器并新增brotherSend方法
- 新增 AI_CHAT_EXCHANGE_NAME、BIG_BROTHER_EXCHANGE_NAME 两个HeadersExchange
- MQSender 新增 brotherSend 方法,支持按 tenantId 路由到大佬队列
- ServerBigBrotherServiceImpl 在保存/更新后,若 Redis 标记存在则异步发送消息
- 移除 HostInfoServiceImpl 多余空行,保持代码整洁
2025-11-10 20:43:43 +08:00
b82dbacfee 1.添加英文国家名字段 2025-10-13 17:56:03 +08:00
a4020b9176 1.添加爬主播 发送到AI消息队列 2025-09-25 21:52:43 +08:00
ca86db8f66 1.大哥添加粉丝团字段 2025-09-23 15:57:06 +08:00
417c2186c2 1.修改判断用户是否登录 AI 逻辑 2025-08-27 21:40:38 +08:00
85d62459a6 1.修改判断发送AI消息队列逻辑 2025-08-27 21:27:16 +08:00
704482ae0e 1.通过 rabbitmq 发送 AI 消息 2025-08-27 19:28:48 +08:00
a7a17667e4 1.在每一次Sse发消息前发送 Start 2025-07-28 19:20:11 +08:00
097145dfc6 1.如果Sse连接不存在不发送消息 2025-07-25 20:38:41 +08:00
0fbb59b18e 1.删除无用配置
2.Sse 添加心跳机制
2025-07-18 19:04:33 +08:00
4cd6ea0c3f 1.添加Sse接口推送数据 2025-07-18 15:38:20 +08:00
2912a29884 1.修改 2025-07-09 18:42:43 +08:00
51de8a63e1 1.延迟接收消息,防止出现线程吃满 2025-07-09 15:57:23 +08:00
7aceb760a2 1.修改多线程配置,修复线程不复用的问题 2025-07-08 15:19:59 +08:00
12f4059ba0 1.修改 logback-spring.xml 日志配置 2025-07-07 16:22:41 +08:00
9e72d84b7e 1.修改 logback-spring.xml 日志配置 2025-07-03 15:26:00 +08:00
24 changed files with 548 additions and 136 deletions

1
.gitignore vendored
View File

@@ -146,3 +146,4 @@ fabric.properties
.idea/caches/build_file_checksums.ser .idea/caches/build_file_checksums.ser
!/.xcodemap/ !/.xcodemap/
/tk-data-save.log

View File

@@ -88,7 +88,7 @@
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>r07</version> <version>30.1-jre</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>

View File

@@ -42,10 +42,10 @@ public class LogInterceptor {
Object[] args = point.getArgs(); Object[] args = point.getArgs();
String reqParam = "[" + StringUtils.join(args, ", ") + "]"; String reqParam = "[" + StringUtils.join(args, ", ") + "]";
// 输出请求日志 // 输出请求日志
// log.info("request startid: {}, path: {}, ip: {}, params: {}", requestId, url, log.info("request startid: {}, path: {}, ip: {}, params: {}", requestId, url,
// httpServletRequest.getRemoteHost(), reqParam); httpServletRequest.getRemoteHost(), reqParam);
log.info("request startid: {}, path: {}, ip: {}", requestId, url, // log.info("request startid: {}, path: {}, ip: {}", requestId, url,
httpServletRequest.getRemoteHost()); // httpServletRequest.getRemoteHost());
// 执行原方法 // 执行原方法
Object result = point.proceed(); Object result = point.proceed();
// 输出响应日志 // 输出响应日志

View File

@@ -0,0 +1,27 @@
package com.yupi.springbootinit.common;
import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
@Component
@Slf4j
public class DelayedRabbitMQStarter implements ApplicationRunner {
@Resource
private RabbitListenerEndpointRegistry registry; // Spring 提供的监听器注册表
@Override
public void run(ApplicationArguments args) throws Exception {
// 延迟 5 秒后启动监听器
log.info("延迟5秒启动 rabbitMQ 监听");
Thread.sleep(5000);
registry.getListenerContainer("hosts").start();
registry.getListenerContainer("bigbrother").start();
}
}

View File

@@ -0,0 +1,23 @@
package com.yupi.springbootinit.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
@Configuration
@EnableAsync
public class AsyncConfig {
@Bean
public Executor taskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(10); // 与 yaml 保持一致
executor.setMaxPoolSize(20);
executor.setQueueCapacity(200);
executor.setThreadNamePrefix("save-task-");
executor.initialize();
return executor;
}
}

View File

@@ -5,9 +5,11 @@ import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.rabbitmq.client.ConnectionFactory; import org.springframework.amqp.core.ExchangeBuilder;
import org.springframework.amqp.core.HeadersExchange;
import org.springframework.amqp.core.Queue; import org.springframework.amqp.core.Queue;
import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter; import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter;
import org.springframework.amqp.support.converter.MessageConverter; import org.springframework.amqp.support.converter.MessageConverter;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@@ -15,8 +17,17 @@ import org.springframework.context.annotation.Configuration;
@Configuration @Configuration
public class RabbitMQConfig { public class RabbitMQConfig {
private static final String QUEUE = "HOST_INFO_QUEUE"; private static final String QUEUE = "HOST_INFO_QUEUE";
public static final String EXCHANGE_NAME = "user.headers.exchange";
public static final String AI_CHAT_EXCHANGE_NAME = "ai.chat.headers.exchange";
public static final String BIG_BROTHER_EXCHANGE_NAME = "big.brother.headers.exchange";
public static final String WEB_AI_EXCHANGE_NAME = "web.ai.headers.exchange";
//创建队列 //创建队列
//true:表示持久化 //true:表示持久化
//队列在默认情况下放到内存rabbitmq重启后就丢失了如果希望重启后队列 //队列在默认情况下放到内存rabbitmq重启后就丢失了如果希望重启后队列
@@ -32,6 +43,43 @@ public class RabbitMQConfig {
// return new Jackson2JsonMessageConverter(); // return new Jackson2JsonMessageConverter();
// } // }
@Bean
public HeadersExchange userHeadersExchange() {
return ExchangeBuilder.headersExchange(EXCHANGE_NAME)
.durable(true)
.build();
}
@Bean
public HeadersExchange aiChatHeadersExchange() {
return ExchangeBuilder.headersExchange(AI_CHAT_EXCHANGE_NAME)
.durable(true)
.build();
}
@Bean
public HeadersExchange bigBrotherHeadersExchange() {
return ExchangeBuilder.headersExchange(BIG_BROTHER_EXCHANGE_NAME)
.durable(true)
.build();
}
@Bean
public HeadersExchange webAiHeadersExchange() {
return ExchangeBuilder.headersExchange(WEB_AI_EXCHANGE_NAME)
.durable(true)
.build();
}
@Bean
public RabbitAdmin rabbitAdmin(ConnectionFactory cf) {
return new RabbitAdmin(cf);
}
@Bean @Bean
public MessageConverter messageConverter() { public MessageConverter messageConverter() {
ObjectMapper om = new ObjectMapper(); ObjectMapper om = new ObjectMapper();

View File

@@ -6,6 +6,7 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate; import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.RedisSerializer; import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer; import org.springframework.data.redis.serializer.StringRedisSerializer;
@@ -13,25 +14,24 @@ import org.springframework.data.redis.serializer.StringRedisSerializer;
public class RedisConfig { public class RedisConfig {
@Bean(name="redisTemplate") @Bean
public RedisTemplate<String, String> redisTemplate(RedisConnectionFactory factory) { public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
RedisTemplate<String, String> template = new RedisTemplate<>(); RedisTemplate<String, Object> template = new RedisTemplate<>();
RedisSerializer<String> redisSerializer = new StringRedisSerializer();
template.setConnectionFactory(factory); template.setConnectionFactory(factory);
//key序列化方式
template.setKeySerializer(redisSerializer); // 使用 JSON 序列化器
//value序列化 GenericJackson2JsonRedisSerializer jsonSerializer = new GenericJackson2JsonRedisSerializer();
template.setValueSerializer(redisSerializer);
//value hashmap序列化 template.setKeySerializer(new StringRedisSerializer());
template.setHashValueSerializer(redisSerializer); template.setValueSerializer(jsonSerializer);
//key haspmap序列化 template.setHashKeySerializer(new StringRedisSerializer());
template.setHashKeySerializer(redisSerializer); template.setHashValueSerializer(jsonSerializer);
//
template.afterPropertiesSet();
return template; return template;
} }
} }

View File

@@ -2,15 +2,12 @@ package com.yupi.springbootinit.controller;
import com.yupi.springbootinit.common.BaseResponse; import com.yupi.springbootinit.common.BaseResponse;
import com.yupi.springbootinit.common.ResultUtils; import com.yupi.springbootinit.common.ResultUtils;
import com.yupi.springbootinit.model.entity.NewHosts;
import com.yupi.springbootinit.model.entity.ServerBigBrother; import com.yupi.springbootinit.model.entity.ServerBigBrother;
import com.yupi.springbootinit.rabbitMQ.MQSender; import com.yupi.springbootinit.rabbitMQ.MQSender;
import com.yupi.springbootinit.service.HostInfoService;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.util.List;
/* /*
* @author: ziin * @author: ziin

View File

@@ -0,0 +1,42 @@
package com.yupi.springbootinit.controller;
import cn.hutool.json.JSONUtil;
import com.yupi.springbootinit.model.entity.NewHosts;
import com.yupi.springbootinit.utils.SseEmitterUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
@Slf4j
@RestController
@RequestMapping("/sse")
public class SseController {
/**
* 用于创建连接
*/
@GetMapping("/connect/{tenantID}/{userId}")
public SseEmitter connect(@PathVariable String tenantID,@PathVariable String userId) {
return SseEmitterUtil.connect(tenantID+"-"+userId);
}
/**
* 关闭连接
*/
@GetMapping("/close/{tenantID}/{userId}")
public void close(@PathVariable String tenantID,@PathVariable String userId) {
SseEmitterUtil.removeUser(tenantID+"-"+userId);
}
@GetMapping("/sse")
public void sse(String connectId,String message){
// 构建推送消息体
NewHosts newHosts = new NewHosts();
newHosts.setId(123123213L);
newHosts.setHostsId(message);
SseEmitterUtil.sendMessage(connectId, JSONUtil.toJsonStr(newHosts));
}
}

View File

@@ -86,4 +86,6 @@ public class HostInfoDTO {
@ApiModelProperty(value = "入库人", example = "1001") @ApiModelProperty(value = "入库人", example = "1001")
private Integer creator; private Integer creator;
} }

View File

@@ -75,6 +75,9 @@ public class NewHosts {
@ApiModelProperty(value = "主播国家", example = "中国") @ApiModelProperty(value = "主播国家", example = "中国")
private String country; private String country;
@ApiModelProperty(value = "主播国家英文", example = "China")
private String countryEng;
/** /**
* 直播类型 娱乐,游戏 * 直播类型 娱乐,游戏
*/ */

View File

@@ -13,6 +13,7 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateDeserializer; import com.fasterxml.jackson.datatype.jsr310.deser.LocalDateDeserializer;
import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer; import com.fasterxml.jackson.datatype.jsr310.ser.LocalDateTimeSerializer;
import io.swagger.models.auth.In;
import lombok.Data; import lombok.Data;
import org.springframework.format.annotation.DateTimeFormat; import org.springframework.format.annotation.DateTimeFormat;
@@ -99,8 +100,8 @@ public class ServerBigBrother {
/** /**
* 该数据所属的账号id * 该数据所属的账号id
*/ */
@TableField(value = "owner_id") @TableField(value = "user_id")
private String ownerId; private Long userId;
/** /**
* 租户 Id * 租户 Id
@@ -109,7 +110,11 @@ public class ServerBigBrother {
private Long tenantId; private Long tenantId;
@TableField(value = "create_time") @TableField(value = "create_time")
private LocalDateTime createTime; private LocalDateTime createTime;
@TableField(value = "fans_level")
private Integer fansLevel;
@TableField(value = "secUid")
private String secUid;
} }

View File

@@ -12,6 +12,7 @@ import com.yupi.springbootinit.service.ServerBigBrotherService;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.core.Message; import org.springframework.amqp.core.Message;
import org.springframework.amqp.rabbit.annotation.RabbitListener; import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.context.annotation.Lazy;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -44,8 +45,8 @@ public class MQReceiver {
// } // }
// } // }
@RabbitListener(queues = "HOST_INFO_QUEUE") @RabbitListener(queues = "HOST_INFO_QUEUE",id = "hosts", autoStartup = "false")
@Async @Async("taskExecutor")
public void receive(List<NewHosts> hosts, Channel channel, Message message) throws IOException { public void receive(List<NewHosts> hosts, Channel channel, Message message) throws IOException {
long deliveryTag = message.getMessageProperties().getDeliveryTag(); long deliveryTag = message.getMessageProperties().getDeliveryTag();
try { try {
@@ -61,8 +62,8 @@ public class MQReceiver {
} }
} }
@RabbitListener(queues = "BIG_BROTHER_QUEUE") @RabbitListener(queues = "BIG_BROTHER_QUEUE",id = "bigbrother", autoStartup = "false")
@Async @Async("taskExecutor")
public void bigBrotherReceive(ServerBigBrother bigBrotherList, Channel channel, Message message) throws IOException { public void bigBrotherReceive(ServerBigBrother bigBrotherList, Channel channel, Message message) throws IOException {
long deliveryTag = message.getMessageProperties().getDeliveryTag(); long deliveryTag = message.getMessageProperties().getDeliveryTag();
try { try {
@@ -74,6 +75,7 @@ public class MQReceiver {
} catch (Exception e) { } catch (Exception e) {
channel.basicNack(deliveryTag, false, false); channel.basicNack(deliveryTag, false, false);
log.error("消息消费失败"); log.error("消息消费失败");
log.error(e.getMessage());
throw new BusinessException(ErrorCode.QUEUE_CONSUMPTION_FAILURE); throw new BusinessException(ErrorCode.QUEUE_CONSUMPTION_FAILURE);
} }
} }

View File

@@ -2,13 +2,19 @@ package com.yupi.springbootinit.rabbitMQ;
import cn.hutool.core.date.DateTime; import cn.hutool.core.date.DateTime;
import com.yupi.springbootinit.common.ErrorCode; import com.yupi.springbootinit.common.ErrorCode;
import com.yupi.springbootinit.config.RabbitMQConfig;
import com.yupi.springbootinit.exception.BusinessException; import com.yupi.springbootinit.exception.BusinessException;
import com.yupi.springbootinit.model.entity.NewHosts; import com.yupi.springbootinit.model.entity.NewHosts;
import com.yupi.springbootinit.model.entity.ServerBigBrother; import com.yupi.springbootinit.model.entity.ServerBigBrother;
import io.swagger.models.auth.In;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.core.HeadersExchange;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter; import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import javax.annotation.Resource; import javax.annotation.Resource;
@@ -16,7 +22,8 @@ import java.time.LocalDateTime;
import java.util.List; import java.util.List;
@Slf4j @Slf4j
@Service @Component
@RequiredArgsConstructor
public class MQSender { public class MQSender {
@Resource @Resource
@@ -33,6 +40,7 @@ public class MQSender {
throw new BusinessException(ErrorCode.QUEUE_ERROR); throw new BusinessException(ErrorCode.QUEUE_ERROR);
} }
} }
//方法:发送消息 //方法:发送消息
public void bigBrotherSend(ServerBigBrother bigBrothers){ public void bigBrotherSend(ServerBigBrother bigBrothers){
try { try {
@@ -45,4 +53,31 @@ public class MQSender {
throw new BusinessException(ErrorCode.QUEUE_ERROR); throw new BusinessException(ErrorCode.QUEUE_ERROR);
} }
} }
public void send(Long tenantId, Object payload) {
// 发送消息,把 userId 放进 header
rabbitTemplate.convertAndSend(RabbitMQConfig.AI_CHAT_EXCHANGE_NAME, "", payload, m -> {
m.getMessageProperties().getHeaders().put("tenantId", tenantId);
return m;
});
}
public void brotherSend(Long tenantId, Object payload) {
// 发送消息,把 userId 放进 header
rabbitTemplate.convertAndSend(RabbitMQConfig.BIG_BROTHER_EXCHANGE_NAME, "", payload, m -> {
m.getMessageProperties().getHeaders().put("tenantId", tenantId);
return m;
});
}
public void webAISend(Long tenantId, Object payload) {
// 发送消息,把 userId 放进 header
rabbitTemplate.convertAndSend(RabbitMQConfig.WEB_AI_EXCHANGE_NAME, "", payload, m -> {
m.getMessageProperties().getHeaders().put("tenantId", tenantId);
return m;
});
}
} }

View File

@@ -1,27 +1,29 @@
package com.yupi.springbootinit.service.impl; package com.yupi.springbootinit.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.yupi.springbootinit.mapper.CountryInfoMapper; import com.yupi.springbootinit.mapper.CountryInfoMapper;
import com.yupi.springbootinit.mapper.NewHostsMapper; import com.yupi.springbootinit.mapper.NewHostsMapper;
import com.yupi.springbootinit.model.dto.host.QueryCountDTO; import com.yupi.springbootinit.model.dto.host.QueryCountDTO;
import com.yupi.springbootinit.model.entity.NewHosts; import com.yupi.springbootinit.model.entity.NewHosts;
import com.yupi.springbootinit.model.vo.country.CountryInfoVO; import com.yupi.springbootinit.model.vo.country.CountryInfoVO;
import com.yupi.springbootinit.rabbitMQ.MQSender;
import com.yupi.springbootinit.service.HostInfoService; import com.yupi.springbootinit.service.HostInfoService;
import com.yupi.springbootinit.utils.JsonUtils;
import com.yupi.springbootinit.utils.RedisUtils;
import com.yupi.springbootinit.utils.SseEmitterUtil;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate; import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StopWatch; import org.springframework.util.StopWatch;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
/* /*
* @author: ziin * @author: ziin
* @date: 2025/6/10 19:04 * @date: 2025/6/10 19:04
@@ -39,6 +41,13 @@ public class HostInfoServiceImpl extends ServiceImpl<NewHostsMapper, NewHosts> i
private RedisTemplate redisTemplate; private RedisTemplate redisTemplate;
@Resource
private RedisUtils redisUtils;
@Resource
private MQSender mqSender;
// //
// private final RedisTemplate<String,Object> redisTemplate; // private final RedisTemplate<String,Object> redisTemplate;
// //
@@ -48,18 +57,47 @@ public class HostInfoServiceImpl extends ServiceImpl<NewHostsMapper, NewHosts> i
@Override @Override
@Async("taskExecutor")
public CompletableFuture<Void> saveHostInfo(List<NewHosts> newHosts) { public CompletableFuture<Void> saveHostInfo(List<NewHosts> newHosts) {
try { try {
StopWatch stopWatch = new StopWatch(); StopWatch stopWatch = new StopWatch();
stopWatch.start(); stopWatch.start();
saveBatch(newHosts); saveBatch(newHosts);
NewHosts sseConnectConfirmInfo = newHosts.get(0);
SseEmitter sseEmitter = SseEmitterUtil.getSseEmitter(sseConnectConfirmInfo.getTenantId().toString() + "-" + sseConnectConfirmInfo.getUserId().toString());
if (sseEmitter != null) {
NewHosts forChannelInfo = newHosts.get(0);
SseEmitterUtil.sendMessage(forChannelInfo.getTenantId().toString()+"-"+forChannelInfo.getUserId().toString(),"start");
newHosts.forEach(newHost -> {
SseEmitterUtil.sendMessage(newHost.getTenantId().toString()+"-"+newHost.getUserId().toString(),
JsonUtils.toJsonString(newHost));
});
}
// Boolean o = (Boolean) redisTemplate.opsForValue().get("ai_login:"+newHosts.get(0).getTenantId());
if (redisUtils.hasKeyByPrefix("ai_login:"+ newHosts.get(0).getTenantId())) {
newHosts.forEach(newHost -> {
mqSender.send(newHost.getTenantId(),newHost);
});
log.info("发送消息到队列{}, 消息数量: {}", newHosts.get(0).getTenantId(), newHosts.size());
}
if (redisUtils.hasKeyByPrefix("webAI_login:"+ newHosts.get(0).getTenantId())) {
newHosts.forEach(newHost -> {
mqSender.webAISend(newHost.getTenantId(),newHost);
});
log.info("发送消息到队列WebAI{}, 消息数量: {}", newHosts.get(0).getTenantId(), newHosts.size());
}
stopWatch.stop(); stopWatch.stop();
long totalTimeMillis = stopWatch.getTotalTimeMillis(); long totalTimeMillis = stopWatch.getTotalTimeMillis();
log.info("当前存储数据量大小 {}, 存储花费: {}ms",newHosts.size(),totalTimeMillis); log.info("当前存储数据量大小 {}, 存储花费: {}ms",newHosts.size(),totalTimeMillis);
return CompletableFuture.completedFuture(null); return CompletableFuture.completedFuture(null);
} catch (Exception e) { } catch (Exception e) {
// 将异常包装到Future使调用方能处理 // 将异常包装到Future使调用方能处理
log.error(e.getMessage());
return CompletableFuture.failedFuture(e); return CompletableFuture.failedFuture(e);
} }
} }
@@ -110,7 +148,6 @@ public class HostInfoServiceImpl extends ServiceImpl<NewHostsMapper, NewHosts> i
@Override @Override
public void queryCount(QueryCountDTO queryCountDTO) { public void queryCount(QueryCountDTO queryCountDTO) {
redisTemplate.opsForValue().increment( "tkaccount:" + queryCountDTO.getTkAccount(),1); redisTemplate.opsForValue().increment( "tkaccount:" + queryCountDTO.getTkAccount(),1);
} }

View File

@@ -1,7 +1,10 @@
package com.yupi.springbootinit.service.impl; package com.yupi.springbootinit.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers; import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.conditions.query.LambdaQueryChainWrapper; import com.baomidou.mybatisplus.extension.conditions.query.LambdaQueryChainWrapper;
import com.yupi.springbootinit.rabbitMQ.MQSender;
import com.yupi.springbootinit.utils.RedisUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -13,6 +16,8 @@ import com.yupi.springbootinit.model.entity.ServerBigBrother;
import com.yupi.springbootinit.service.ServerBigBrotherService; import com.yupi.springbootinit.service.ServerBigBrotherService;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StopWatch; import org.springframework.util.StopWatch;
import javax.annotation.Resource;
/* /*
* @author: ziin * @author: ziin
* @date: 2025/6/24 16:19 * @date: 2025/6/24 16:19
@@ -22,17 +27,27 @@ import org.springframework.util.StopWatch;
@Slf4j @Slf4j
public class ServerBigBrotherServiceImpl extends ServiceImpl<ServerBigBrotherMapper, ServerBigBrother> implements ServerBigBrotherService{ public class ServerBigBrotherServiceImpl extends ServiceImpl<ServerBigBrotherMapper, ServerBigBrother> implements ServerBigBrotherService{
@Resource
private RedisUtils redisUtils;
@Resource
private MQSender mqSender;
@Override @Override
@Transactional(rollbackFor = Exception.class) @Transactional(rollbackFor = Exception.class)
@Async("taskExecutor")
public void saveData(ServerBigBrother bigBrother) { public void saveData(ServerBigBrother bigBrother) {
StopWatch stopWatch = new StopWatch(); StopWatch stopWatch = new StopWatch();
stopWatch.start(); stopWatch.start();
ServerBigBrother serverBigBrother = baseMapper.selectOne(Wrappers.<ServerBigBrother>lambdaQuery() QueryWrapper<ServerBigBrother> queryWrapper = new QueryWrapper<>();
.eq(ServerBigBrother::getDisplayId, bigBrother.getDisplayId()) queryWrapper.eq("display_id", bigBrother.getDisplayId())
.eq(ServerBigBrother::getTenantId, bigBrother.getTenantId())); .eq("tenant_id", bigBrother.getTenantId());
ServerBigBrother serverBigBrother = baseMapper.selectOne(queryWrapper);
if(serverBigBrother == null){ if(serverBigBrother == null){
save(bigBrother); save(bigBrother);
if (redisUtils.hasKeyByPrefix("bigbrother_login:"+ bigBrother.getTenantId())) {
mqSender.brotherSend(bigBrother.getTenantId(),bigBrother);
log.info("发送消息到队列{}, 大哥 Id: {}", bigBrother.getTenantId(),bigBrother.getDisplayId());
}
stopWatch.stop(); stopWatch.stop();
long totalTimeMillis = stopWatch.getTotalTimeMillis(); long totalTimeMillis = stopWatch.getTotalTimeMillis();
log.info("当前存储花费: {}ms",totalTimeMillis); log.info("当前存储花费: {}ms",totalTimeMillis);
@@ -43,6 +58,10 @@ public class ServerBigBrotherServiceImpl extends ServiceImpl<ServerBigBrotherMap
} }
bigBrother.setTotalGiftCoins(bigBrother.getHistoricHighCoins()+serverBigBrother.getTotalGiftCoins()); bigBrother.setTotalGiftCoins(bigBrother.getHistoricHighCoins()+serverBigBrother.getTotalGiftCoins());
updateById(bigBrother); updateById(bigBrother);
if (redisUtils.hasKeyByPrefix("bigbrother_login:"+ bigBrother.getTenantId())) {
mqSender.brotherSend(bigBrother.getTenantId(),bigBrother);
log.info("发送消息到队列{}, 大哥 Id: {}", bigBrother.getTenantId(),bigBrother.getDisplayId());
}
stopWatch.stop(); stopWatch.stop();
long totalTimeMillis = stopWatch.getTotalTimeMillis(); long totalTimeMillis = stopWatch.getTotalTimeMillis();
log.info("当前更新花费: {}ms",totalTimeMillis); log.info("当前更新花费: {}ms",totalTimeMillis);

View File

@@ -0,0 +1,38 @@
package com.yupi.springbootinit.utils;
/*
* @author: ziin
* @date: 2025/8/27 20:35
*/
import org.springframework.data.redis.core.Cursor;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ScanOptions;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.Set;
@Component
public class RedisUtils {
@Resource
private RedisTemplate<String,Object> redisTemplate;
public boolean hasKeyByPrefix(String prefix) {
return Boolean.TRUE.equals(
redisTemplate.execute((RedisCallback<Boolean>) conn -> {
try (Cursor<byte[]> cursor = conn.scan(
ScanOptions.scanOptions()
.match(prefix + ":*")
.count(100) // 每次返回条数,可调整
.build())) {
return cursor.hasNext(); // 只要存在一条就返回 true
}
})
);
}
}

View File

@@ -4,9 +4,6 @@ import org.apache.commons.lang3.StringUtils;
/** /**
* SQL 工具 * SQL 工具
*
* @author <a href="https://github.com/liyupi">程序员鱼皮</a>
* @from <a href="https://yupi.icu">编程导航知识星球</a>
*/ */
public class SqlUtils { public class SqlUtils {

View File

@@ -0,0 +1,155 @@
package com.yupi.springbootinit.utils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.MediaType;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import java.util.function.Consumer;
/**
* SSE长链接工具类
*/
@Slf4j
public class SseEmitterUtil {
private static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
/**
* 使用map对象便于根据userId来获取对应的SseEmitter或者放redis里面
*/
private final static Map<String, SseEmitter> sseEmitterMap = new ConcurrentHashMap<>();
public static SseEmitter connect(String userId) {
// 设置超时时间0表示不过期。默认30S超时时间未完成会抛出异常AsyncRequestTimeoutException
SseEmitter sseEmitter = new SseEmitter(0L);
// 设置超时时间为0表示永不超时或设置为较长的时间
// 注册回调
sseEmitter.onCompletion(completionCallBack(userId));
sseEmitter.onError(errorCallBack(userId));
sseEmitter.onTimeout(timeoutCallBack(userId));
sseEmitterMap.put(userId, sseEmitter);
log.info("创建新的 SSE 连接,当前用户 {}, 连接总数 {}", userId, sseEmitterMap.size());
// 启动一个任务定期发送心跳
ScheduledFuture<?> heartbeat = scheduler.scheduleAtFixedRate(() -> {
try {
// 发送心跳注释,防止代理或浏览器断开
sseEmitter.send(SseEmitter.event().comment("heartbeat"));
} catch (IOException e) {
sseEmitter.completeWithError(e);
}
}, 0, 10, TimeUnit.SECONDS); // 每10秒一次
return sseEmitter;
}
/**
* 给制定用户发送消息
*
* @param connectId 指定用户名
* @param sseMessage 消息体
*/
public static void sendMessage(String connectId, String sseMessage) {
if (sseEmitterMap.containsKey(connectId)) {
try {
sseEmitterMap.get(connectId).send(sseMessage);
log.info("用户 {} 推送消息 {}", connectId, sseMessage);
} catch (IOException e) {
log.error("用户 {} 推送消息异常", connectId, e);
removeUser(connectId);
}
} else {
log.error("消息推送 用户 {} 不存在,链接总数 {}", connectId, sseEmitterMap.size());
}
}
/**
* 群发消息
*/
public static void batchSendMessage(String message, List<String> ids) {
ids.forEach(userId -> sendMessage(userId, message));
}
/**
* 群发所有人
*/
public static void batchSendMessage(String message) {
sseEmitterMap.forEach((k, v) -> {
try {
v.send(message, MediaType.APPLICATION_JSON);
} catch (IOException e) {
log.error("用户 {} 推送异常", k, e);
removeUser(k);
}
});
}
/**
* 移除用户连接
*
* @param userId 用户 ID
*/
public static void removeUser(String userId) {
if (sseEmitterMap.containsKey(userId)) {
sseEmitterMap.get(userId).complete();
sseEmitterMap.remove(userId);
log.info("移除用户 {}, 剩余连接 {}", userId, sseEmitterMap.size());
} else {
log.error("消息推送 用户 {} 已被移除,剩余连接 {}", userId, sseEmitterMap.size());
}
}
/**
* 获取当前连接信息
*
* @return 所有的连接用户
*/
public static List<String> getIds() {
return new ArrayList<>(sseEmitterMap.keySet());
}
/**
* 获取当前的连接数量
*
* @return 当前的连接数量
*/
public static int getUserCount() {
return sseEmitterMap.size();
}
public static SseEmitter getSseEmitter(String userId) {
return sseEmitterMap.get(userId);
}
private static Runnable completionCallBack(String userId) {
return () -> {
log.info("用户 {} 结束连接", userId);
};
}
private static Runnable timeoutCallBack(String userId) {
return () -> {
log.error("用户 {} 连接超时", userId);
removeUser(userId);
};
}
private static Consumer<Throwable> errorCallBack(String userId) {
return throwable -> {
log.error("用户 {} 连接异常", userId);
removeUser(userId);
};
}
}

View File

@@ -27,10 +27,7 @@ spring:
listener: listener:
simple: simple:
acknowledge-mode: manual acknowledge-mode: manual
mybatis-plus:
configuration:
# 生产环境关闭日志
log-impl: ''
# 接口文档配置 # 接口文档配置
knife4j: knife4j:
basic: basic:

View File

@@ -60,7 +60,7 @@ server:
mybatis-plus: mybatis-plus:
configuration: configuration:
map-underscore-to-camel-case: false map-underscore-to-camel-case: false
log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl log-impl: org.apache.ibatis.logging.slf4j.Slf4jImpl
default-executor-type: batch default-executor-type: batch
global-config: global-config:
banner: false banner: false
@@ -83,24 +83,6 @@ knife4j:
api-rule-resources: api-rule-resources:
- com.yupi.springbootinit.controller - com.yupi.springbootinit.controller
############## Sa-Token 配置 (文档: https://sa-token.cc) ##############
sa-token:
# token 名称(同时也是 cookie 名称)
token-name: vvtoken
# token 有效期(单位:秒) 默认30天-1 代表永久有效
timeout: 2592000
# token 最低活跃频率(单位:秒),如果 token 超过此时间没有访问系统就会被冻结,默认-1 代表不限制,永不冻结
active-timeout: -1
# 是否允许同一账号多地同时登录 (为 true 时允许一起登录, 为 false 时新登录挤掉旧登录)
is-concurrent: false
# 在多人登录同一账号时,是否共用一个 token (为 true 时所有登录共用一个 token, 为 false 时每次登录新建一个 token
is-share: false
# token 风格默认可取值uuid、simple-uuid、random-32、random-64、random-128、tik
token-style: random-128
# 是否输出操作日志
is-log: true
logging: logging:
level: level:
org.mybatis: off org.mybatis: off

View File

@@ -1,31 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds"> <configuration scan="true" scanPeriod="60 seconds">
<!-- 日志上下文名称 -->
<contextName>TK-DataSave</contextName> <contextName>TK-DataSave</contextName>
<!-- 日志输出格式 -->
<property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%ex}"/> <property name="PATTERN_DEFAULT" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | %highlight(${LOG_LEVEL_PATTERN:-%5p} ${PID:- }) | %boldYellow(%thread) %boldGreen(%-40.40logger{39}) | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%ex}"/>
<property name="FILE_PATTERN" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | ${LOG_LEVEL_PATTERN:-%5p} ${PID:- } | %thread %-40.40logger{39} | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%ex}"/> <property name="FILE_PATTERN" value="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} | ${LOG_LEVEL_PATTERN:-%5p} ${PID:- } | %thread %-40.40logger{39} | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%ex}"/>
<property name="LOG_FILE_PATH" value="${LOG_FILE:-${user.dir}/tk-data-save}"/>
<!-- <property name="LOG_PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n"/>-->
<!-- 日志文件路径 -->
<property name="LOG_FILE_PATH" value="${LOG_FILE:-${user.dir}}/tk-data-save"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${PATTERN_DEFAULT}</pattern> <pattern>${PATTERN_DEFAULT}</pattern>
</encoder> </encoder>
</appender> </appender>
<!-- 文件输出 (滚动策略: 按时间) -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE_PATH}.log</file> <file>${LOG_FILE_PATH}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE_PATH}-%d{yyyy-MM-dd}.log</fileNamePattern> <fileNamePattern>${LOGBACK_ROLLINGPOLICY_FILE_NAME_PATTERN:-${LOG_FILE_PATH}.%d{yyyy-MM-dd}.%i.gz}</fileNamePattern>
<maxHistory>7</maxHistory> <maxHistory>${LOGBACK_ROLLINGPOLICY_MAX_HISTORY:-7}</maxHistory>
<totalSizeCap>10MB</totalSizeCap> <maxFileSize>${LOGBACK_ROLLINGPOLICY_MAX_FILE_SIZE:-10MB}</maxFileSize>
</rollingPolicy> </rollingPolicy>
<encoder> <encoder>
<pattern>${FILE_PATTERN}</pattern> <pattern>${FILE_PATTERN}</pattern>
@@ -33,36 +25,35 @@
</encoder> </encoder>
</appender> </appender>
<!-- 开发环境配置 -->
<springProfile name="dev"> <springProfile name="dev">
<root level="INFO"> <root level="INFO">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="FILE"/> <appender-ref ref="FILE"/>
</root> </root>
<!-- 项目包日志级别 --> <logger name="com.yupi.springbootinit" level="INFO" additivity="false">
<logger name="com.yupi.springbootinit" level="DEBUG" additivity="false">
<appender-ref ref="STDOUT"/> <appender-ref ref="STDOUT"/>
</logger> </logger>
<!-- MyBatis日志级别 --> <logger name="com.baomidou.mybatisplus" level="INFO">
<logger name="com.baomidou.mybatisplus" level="INFO"/> <appender-ref ref="FILE"/>
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.springframework" level="INFO">
</logger>
<logger name="org.redisson" level="INFO"/>
</springProfile> </springProfile>
<!-- 生产环境配置 -->
<springProfile name="prod"> <springProfile name="prod">
<root level="INFO"> <root level="INFO">
<appender-ref ref="CONSOLE"/> <appender-ref ref="STDOUT"/>
<appender-ref ref="FILE"/> <appender-ref ref="FILE"/>
</root> </root>
<!-- 项目包日志级别 -->
<logger name="com.yupi.springbootinit" level="INFO" additivity="false"> <logger name="com.yupi.springbootinit" level="INFO" additivity="false">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/> <appender-ref ref="FILE"/>
</logger> </logger>
<!-- 第三方框架日志级别 --> <logger name="org.springframework" level="INFO">
<logger name="org.springframework" level="WARN"/> </logger>
<logger name="com.baomidou.mybatisplus" level="WARN"/> <logger name="com.baomidou.mybatisplus" level="INFO">
<logger name="org.redisson" level="WARN"/> </logger>
<logger name="org.redisson" level="INFO"/>
</springProfile> </springProfile>
</configuration> </configuration>

View File

@@ -15,15 +15,24 @@
<result column="yesterday_coins" jdbcType="INTEGER" property="yesterdayCoins" /> <result column="yesterday_coins" jdbcType="INTEGER" property="yesterdayCoins" />
<result column="country" jdbcType="VARCHAR" property="country" /> <result column="country" jdbcType="VARCHAR" property="country" />
<result column="hosts_kind" jdbcType="VARCHAR" property="hostsKind" /> <result column="hosts_kind" jdbcType="VARCHAR" property="hostsKind" />
<result column="is_assigned" jdbcType="TINYINT" property="isAssigned" />
<result column="tenant_id" jdbcType="BIGINT" property="tenantId" /> <result column="tenant_id" jdbcType="BIGINT" property="tenantId" />
<result column="creator" jdbcType="BIGINT" property="creator" /> <result column="creator" jdbcType="BIGINT" property="creator" />
<result column="create_time" jdbcType="TIMESTAMP" property="createTime" /> <result column="create_time" jdbcType="TIMESTAMP" property="createTime" />
<result column="uid" jdbcType="TIMESTAMP" property="uid" /> <result column="updater" jdbcType="VARCHAR" property="updater" />
<result column="update_time" jdbcType="TIMESTAMP" property="updateTime" />
<result column="user_id" jdbcType="BIGINT" property="userId" />
<result column="deleted" jdbcType="TINYINT" property="deleted" />
<result column="uid" jdbcType="VARCHAR" property="uid" />
<result column="ai_operation" jdbcType="TINYINT" property="aiOperation" />
<result column="operation_status" jdbcType="TINYINT" property="operationStatus" />
<result column="country_eng" jdbcType="VARCHAR" property="countryEng" />
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
<!--@mbg.generated--> <!--@mbg.generated-->
id, hosts_id, hosts_level, hosts_coins, Invitation_type, online_fans,fans, fllowernum, yesterday_coins, id, hosts_id, hosts_level, hosts_coins, Invitation_type, online_fans, fans, fllowernum,
country, hosts_kind, tenant_id, creator, create_time, updater, update_time,uid yesterday_coins, country, hosts_kind, is_assigned, tenant_id, creator, create_time,
updater, update_time, user_id, deleted, `uid`, ai_operation, operation_status,country_eng
</sql> </sql>
<select id="selectByPrimaryKey" parameterType="java.lang.Long" resultMap="BaseResultMap"> <select id="selectByPrimaryKey" parameterType="java.lang.Long" resultMap="BaseResultMap">
<!--@mbg.generated--> <!--@mbg.generated-->
@@ -42,12 +51,12 @@
insert into server_new_hosts (hosts_id, hosts_level, hosts_coins, insert into server_new_hosts (hosts_id, hosts_level, hosts_coins,
Invitation_type, online_fans,fans, fllowernum, Invitation_type, online_fans,fans, fllowernum,
yesterday_coins, country, hosts_kind, yesterday_coins, country, hosts_kind,
tenant_id, creator,create_time,uid tenant_id, creator,create_time,uid,country_eng
) )
values (#{hostsId,jdbcType=VARCHAR}, #{hostsLevel,jdbcType=VARCHAR}, #{hostsCoins,jdbcType=INTEGER}, values (#{hostsId,jdbcType=VARCHAR}, #{hostsLevel,jdbcType=VARCHAR}, #{hostsCoins,jdbcType=INTEGER},
#{invitationType,jdbcType=INTEGER}, #{onlineFans,jdbcType=INTEGER},#{fans,jdbcType=INTEGER}, #{fllowernum,jdbcType=INTEGER}, #{invitationType,jdbcType=INTEGER}, #{onlineFans,jdbcType=INTEGER},#{fans,jdbcType=INTEGER}, #{fllowernum,jdbcType=INTEGER},
#{yesterdayCoins,jdbcType=INTEGER}, #{country,jdbcType=VARCHAR}, #{hostsKind,jdbcType=VARCHAR}, #{yesterdayCoins,jdbcType=INTEGER}, #{country,jdbcType=VARCHAR}, #{hostsKind,jdbcType=VARCHAR},
#{tenantId,jdbcType=BIGINT}, #{creator,jdbcType=BIGINT},#{createTime,jdbcType=TIMESTAMP},#{uid,jdbcType=VARCHAR}) #{tenantId,jdbcType=BIGINT}, #{creator,jdbcType=BIGINT},#{createTime,jdbcType=TIMESTAMP},#{uid,jdbcType=VARCHAR},#{countryEng,jdbcType=VARCHAR})
</insert> </insert>
<insert id="insertSelective" keyColumn="id" keyProperty="id" parameterType="com.yupi.springbootinit.model.entity.NewHosts" useGeneratedKeys="true"> <insert id="insertSelective" keyColumn="id" keyProperty="id" parameterType="com.yupi.springbootinit.model.entity.NewHosts" useGeneratedKeys="true">
<!--@mbg.generated--> <!--@mbg.generated-->
@@ -193,14 +202,14 @@
<!--@mbg.generated--> <!--@mbg.generated-->
insert into server_new_hosts insert into server_new_hosts
(hosts_id, hosts_level, hosts_coins, Invitation_type, online_fans,fans, fllowernum, yesterday_coins, (hosts_id, hosts_level, hosts_coins, Invitation_type, online_fans,fans, fllowernum, yesterday_coins,
country, hosts_kind, tenant_id, creator, user_id,create_time,uid) country, hosts_kind, tenant_id, creator, user_id,create_time,uid,country_eng)
values values
<foreach collection="list" item="item" separator=","> <foreach collection="list" item="item" separator=",">
(#{item.hostsId,jdbcType=VARCHAR}, #{item.hostsLevel,jdbcType=VARCHAR}, #{item.hostsCoins,jdbcType=INTEGER}, (#{item.hostsId,jdbcType=VARCHAR}, #{item.hostsLevel,jdbcType=VARCHAR}, #{item.hostsCoins,jdbcType=INTEGER},
#{item.invitationType,jdbcType=INTEGER}, #{item.onlineFans,jdbcType=INTEGER},#{item.fans,jdbcType=INTEGER}, #{item.invitationType,jdbcType=INTEGER}, #{item.onlineFans,jdbcType=INTEGER},#{item.fans,jdbcType=INTEGER},
#{item.fllowernum,jdbcType=INTEGER}, #{item.yesterdayCoins,jdbcType=INTEGER}, #{item.country,jdbcType=VARCHAR}, #{item.fllowernum,jdbcType=INTEGER}, #{item.yesterdayCoins,jdbcType=INTEGER}, #{item.country,jdbcType=VARCHAR},
#{item.hostsKind,jdbcType=VARCHAR}, #{item.tenantId,jdbcType=BIGINT}, #{item.creator,jdbcType=INTEGER},#{item.userId,jdbcType=BIGINT}, #{item.hostsKind,jdbcType=VARCHAR}, #{item.tenantId,jdbcType=BIGINT}, #{item.creator,jdbcType=INTEGER},#{item.userId,jdbcType=BIGINT},
#{item.createTime,jdbcType=TIMESTAMP},#{item.uid,jdbcType=VARCHAR}) #{item.createTime,jdbcType=TIMESTAMP},#{item.uid,jdbcType=VARCHAR},#{item.countryEng,jdbcType=VARCHAR})
</foreach> </foreach>
</insert> </insert>
</mapper> </mapper>

View File

@@ -19,11 +19,13 @@
<result column="owner_id" jdbcType="VARCHAR" property="ownerId" /> <result column="owner_id" jdbcType="VARCHAR" property="ownerId" />
<result column="create_time" jdbcType="TIMESTAMP" property="createTime" /> <result column="create_time" jdbcType="TIMESTAMP" property="createTime" />
<result column="tenant_id" jdbcType="BIGINT" property="tenantId" /> <result column="tenant_id" jdbcType="BIGINT" property="tenantId" />
<result column="fans_level" jdbcType="INTEGER" property="fansLevel" />
<result column="secUid" jdbcType="VARCHAR" property="secUid" />
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
<!--@mbg.generated--> <!--@mbg.generated-->
id, display_id, user_id_str, nickname, `level`, hostcoins, follower_count, following_count, id, display_id, user_id_str, nickname, `level`, hostcoins, follower_count, following_count,
region, historic_high_coins, total_gift_coins, host_display_id, owner_id, create_time, region, historic_high_coins, total_gift_coins, host_display_id, owner_id, create_time,
update_time, creator, updater, deleted, tenant_id update_time, creator, updater, deleted, tenant_id,fans_level,secUid
</sql> </sql>
</mapper> </mapper>