Merge remote-tracking branch 'origin/master'

master
wyt 6 months ago
commit 225a56c1ad

@ -1,23 +1,37 @@
package com.aurora.config;
/// 主要负责应用的配置与初始化工作,它不直接处理业务逻辑,而是为整个应用提供运行所需的基础设施、组件行为、安全策略、数据访问方式等配置
//配置 Spring 的异步任务执行器(线程池),使得应用能够支持 @Async注解实现方法的异步调用
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
@EnableAsync
@Configuration
@EnableAsync//Spring框架的注解用于启用异步方法执行功能
@Configuration//注解将普通的Java类标记为Spring的配置源
public class AsyncConfig {
@Bean
/*
*
* @return ThreadPoolTaskExecutor
*/
public TaskExecutor taskExecutor() {
// 创建线程池任务执行器
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
// 设置核心线程数为10
executor.setCorePoolSize(10);
// 设置最大线程数为20
executor.setMaxPoolSize(20);
// 设置队列容量为20
executor.setQueueCapacity(20);
// 设置线程空闲时间60秒
executor.setKeepAliveSeconds(60);
// 设置线程名称前缀
executor.setThreadNamePrefix("async-task-thread-");
return executor;
}
}

@ -1,18 +1,28 @@
package com.aurora.config;
import org.springframework.context.annotation.Configuration;
import javax.annotation.PostConstruct;
import java.util.TimeZone;
import static com.aurora.enums.ZoneEnum.SHANGHAI;
/**
*
*
*/
@Configuration
public class GlobalZoneConfig {
/**
*
* Spring
*/
@PostConstruct
public void setGlobalZone() {
// 设置系统默认时区为上海时区
TimeZone.setDefault(TimeZone.getTimeZone(SHANGHAI.getZone()));
}
}

@ -8,53 +8,93 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static com.aurora.constant.RabbitMQConstant.*;
//用于实现消息的发布与订阅
/**
* RabbitMQ
*
*/
@Configuration
public class RabbitMQConfig {
/**
* Bean
* @return Queue
*/
@Bean
public Queue articleQueue() {
return new Queue(MAXWELL_QUEUE, true);
}
/**
* MaxWellBean
* @return FanoutExchange Fanout
*/
@Bean
public FanoutExchange maxWellExchange() {
return new FanoutExchange(MAXWELL_EXCHANGE, true, false);
}
/**
* MaxWell
* @return Binding MaxWell
*/
@Bean
public Binding bindingArticleDirect() {
return BindingBuilder.bind(articleQueue()).to(maxWellExchange());
}
/**
* Bean
* @return Queue
*/
@Bean
public Queue emailQueue() {
return new Queue(EMAIL_QUEUE, true);
}
/**
* Bean
* @return FanoutExchange Fanout
*/
@Bean
public FanoutExchange emailExchange() {
return new FanoutExchange(EMAIL_EXCHANGE, true, false);
}
/**
*
* @return Binding
*/
@Bean
public Binding bindingEmailDirect() {
return BindingBuilder.bind(emailQueue()).to(emailExchange());
}
/**
* Bean
* @return Queue
*/
@Bean
public Queue subscribeQueue() {
return new Queue(SUBSCRIBE_QUEUE, true);
}
/**
* Bean
* @return FanoutExchange Fanout
*/
@Bean
public FanoutExchange subscribeExchange() {
return new FanoutExchange(SUBSCRIBE_EXCHANGE, true, false);
}
/**
*
* @return Binding
*/
@Bean
public Binding bindingSubscribeDirect() {
return BindingBuilder.bind(subscribeQueue()).to(subscribeExchange());
}
}
}

@ -1,5 +1,7 @@
package com.aurora.config;
//配置 Spring Data Redis 的 RedisTemplate用于以更加灵活和可读的方式操作 Redis尤其是对存储对象进行序列化
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.impl.LaissezFaireSubTypeValidator;
@ -10,24 +12,42 @@ import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* RedisRedisTemplate
*/
@Configuration
public class RedisConfig {
/**
* RedisTemplate
*
* @param factory RedisRedis
* @return RedisTemplateStringObject
*/
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
RedisTemplate<String, Object> redisTemplate = new RedisTemplate<>();
redisTemplate.setConnectionFactory(factory);
// 配置JSON序列化器用于对象的序列化和反序列化
Jackson2JsonRedisSerializer<Object> jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer<>(Object.class);
ObjectMapper mapper = new ObjectMapper();
mapper.activateDefaultTyping(LaissezFaireSubTypeValidator.instance, ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY);
jackson2JsonRedisSerializer.setObjectMapper(mapper);
// 配置字符串序列化器,用于键的序列化
StringRedisSerializer stringRedisSerializer = new StringRedisSerializer();
// 设置RedisTemplate的序列化器
redisTemplate.setKeySerializer(stringRedisSerializer);
redisTemplate.setHashKeySerializer(stringRedisSerializer);
redisTemplate.setValueSerializer(jackson2JsonRedisSerializer);
redisTemplate.setHashValueSerializer(jackson2JsonRedisSerializer);
redisTemplate.afterPropertiesSet();
return redisTemplate;
}
}

@ -4,18 +4,37 @@ import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
Java[MinioProperties]MinIO`@ConfigurationProperties`
"upload.minio"URL访
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "upload.minio")
public class MinioProperties {
/**
* MinIOURL
*/
private String url;
/**
* MinIO
*/
private String endpoint;
/**
* MinIO访
*/
private String accessKey;
/**
* MinIO
*/
private String secretKey;
/**
* MinIO
*/
private String bucketName;
}

@ -17,50 +17,98 @@ import java.util.Date;
import static com.aurora.constant.CommonConstant.ONE;
import static com.aurora.constant.CommonConstant.ZERO;
/**
* Quartz
* <p>
* {@link org.quartz.Job}
*
*/
public abstract class AbstractQuartzJob implements org.quartz.Job {
private static final Logger log = LoggerFactory.getLogger(AbstractQuartzJob.class);
/**
* 使 ThreadLocal 线
*/
private static final ThreadLocal<Date> THREAD_LOCAL = new ThreadLocal<>();
/**
* Quartz
* <p>
* {@link Job}
*
* @param context
* @throws JobExecutionException
*/
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
// 从上下文中提取任务属性并复制到 Job 实体对象中
Job job = new Job();
BeanUtils.copyProperties(context.getMergedJobDataMap().get(ScheduleConstant.TASK_PROPERTIES), job);
try {
before(context, job);
doExecute(context, job);
after(context, job, null);
before(context, job); // 执行前置操作(如记录开始时间)
doExecute(context, job); // 执行具体的任务逻辑(由子类实现)
after(context, job, null); // 成功执行后处理(如记录成功日志)
} catch (Exception e) {
log.error("任务执行异常:", e);
after(context, job, e);
log.error("任务执行异常:", e); // 记录异常日志
after(context, job, e); // 异常情况下的后置处理(如记录失败日志)
}
}
/**
*
* <p>
* ThreadLocal
*
* @param context
* @param job
*/
protected void before(JobExecutionContext context, Job job) {
THREAD_LOCAL.set(new Date());
}
/**
*
* <p>
*
*
* @param context
* @param job
* @param e null
*/
protected void after(JobExecutionContext context, Job job, Exception e) {
Date startTime = THREAD_LOCAL.get();
THREAD_LOCAL.remove();
final JobLog jobLog = new JobLog();
Date startTime = THREAD_LOCAL.get(); // 获取任务开始时间
THREAD_LOCAL.remove(); // 清理 ThreadLocal 避免内存泄漏
final JobLog jobLog = new JobLog(); // 构建任务日志对象
jobLog.setJobId(job.getId());
jobLog.setJobName(job.getJobName());
jobLog.setJobGroup(job.getJobGroup());
jobLog.setInvokeTarget(job.getInvokeTarget());
jobLog.setStartTime(startTime);
jobLog.setEndTime(new Date());
long runMs = jobLog.getEndTime().getTime() - jobLog.getStartTime().getTime();
jobLog.setJobMessage(jobLog.getJobName() + " 总共耗时:" + runMs + "毫秒");
if (e != null) {
jobLog.setStatus(ZERO);
jobLog.setExceptionInfo(ExceptionUtil.getTrace(e));
jobLog.setStatus(ZERO); // 设置状态为失败
jobLog.setExceptionInfo(ExceptionUtil.getTrace(e)); // 记录异常堆栈信息
} else {
jobLog.setStatus(ONE);
jobLog.setStatus(ONE); // 设置状态为成功
}
// 插入任务日志到数据库
SpringUtil.getBean(JobLogMapper.class).insert(jobLog);
}
/**
*
*
* @param context
* @param job
* @throws Exception
*/
protected abstract void doExecute(JobExecutionContext context, Job job) throws Exception;
}

@ -29,11 +29,15 @@ import java.util.stream.Collectors;
import static com.aurora.constant.CommonConstant.UNKNOWN;
import static com.aurora.constant.RedisConstant.*;
/**
*
* 访SEOSwaggerElasticsearch
*/
@Slf4j
@Component("auroraQuartz")
public class AuroraQuartz {
@Autowired
@Autowired//注解,用于装配依赖注入,无需手动创建对象
private RedisService redisService;
@Autowired
@ -64,8 +68,15 @@ public class AuroraQuartz {
@Value("${website.url}")
private String websiteUrl;
/**
* Redis访
*
*/
public void saveUniqueView() {
// 获取当前Redis中的唯一访客数量
Long count = redisService.sSize(UNIQUE_VISITOR);
// 构造并插入新的唯一访问记录(时间为昨天)
UniqueView uniqueView = UniqueView.builder()
.createTime(LocalDateTimeUtil.offset(LocalDateTime.now(), -1, ChronoUnit.DAYS))
.viewsCount(Optional.of(count.intValue()).orElse(0))
@ -73,12 +84,21 @@ public class AuroraQuartz {
uniqueViewMapper.insert(uniqueView);
}
/**
* Redis访
* 访访
*/
public void clear() {
redisService.del(UNIQUE_VISITOR);
redisService.del(VISITOR_AREA);
}
/**
* Redis使
* 使IP"UNKNOWN"
*/
public void statisticalUserArea() {
// 查询所有用户认证信息中的IP来源字段
Map<String, Long> userAreaMap = userAuthMapper.selectList(new LambdaQueryWrapper<UserAuth>().select(UserAuth::getIpSource))
.stream()
.map(item -> {
@ -88,22 +108,34 @@ public class AuroraQuartz {
return UNKNOWN;
})
.collect(Collectors.groupingBy(item -> item, Collectors.counting()));
// 转换为DTO列表格式并存入Redis
List<UserAreaDTO> userAreaList = userAreaMap.entrySet().stream()
.map(item -> UserAreaDTO.builder()
.name(item.getKey())
.value(item.getValue())
.build())
.collect(Collectors.toList());
redisService.set(USER_AREA, JSON.toJSONString(userAreaList));
}
/**
*
* IDURLHTTP POST
*/
public void baiduSeo() {
// 提取所有文章ID构造完整URL路径
List<Integer> ids = articleService.list().stream().map(Article::getId).collect(Collectors.toList());
// 设置请求头模拟curl行为
HttpHeaders headers = new HttpHeaders();
headers.add("Host", "data.zz.baidu.com");
headers.add("User-Agent", "curl/7.12.1");
headers.add("Content-Length", "83");
headers.add("Content-Type", "text/plain");
// 对每篇文章发起POST请求通知百度爬虫更新索引
ids.forEach(item -> {
String url = websiteUrl + "/articles/" + item;
HttpEntity<String> entity = new HttpEntity<>(url, headers);
@ -111,13 +143,26 @@ public class AuroraQuartz {
});
}
/**
*
* JobLogService
*/
public void clearJobLogs() {
jobLogService.cleanJobLogs();
}
/**
* SwaggerAPI
*
*/
public void importSwagger() {
// 执行Swagger资源导入动作
resourceService.importSwagger();
// 获取刚导入的所有资源ID
List<Integer> resourceIds = resourceService.list().stream().map(Resource::getId).collect(Collectors.toList());
// 构建管理员角色与这些资源之间的关联关系对象列表
List<RoleResource> roleResources = new ArrayList<>();
for (Integer resourceId : resourceIds) {
roleResources.add(RoleResource.builder()
@ -125,12 +170,23 @@ public class AuroraQuartz {
.resourceId(resourceId)
.build());
}
// 批量保存角色-资源映射关系
roleResourceService.saveBatch(roleResources);
}
/**
* MySQLElasticsearch
*
*/
public void importDataIntoES() {
// 删除旧有全文检索索引数据
elasticsearchMapper.deleteAll();
// 加载最新文章列表
List<Article> articles = articleService.list();
// 复制转换为搜索专用DTO结构体并逐条写入ES
for (Article article : articles) {
elasticsearchMapper.save(BeanCopyUtil.copyObject(article, ArticleSearchDTO.class));
}

@ -4,9 +4,24 @@ import com.aurora.entity.Job;
import com.aurora.util.JobInvokeUtil;
import org.quartz.JobExecutionContext;
/**
* Quartz
* AbstractQuartzJob
*/
public class QuartzDisallowConcurrentExecution extends AbstractQuartzJob {
/**
*
* JobInvokeUtil
*
* @param context Quartz
* @param job
* @throws Exception
*/
@Override
protected void doExecute(JobExecutionContext context, Job job) throws Exception {
// 调用任务执行工具类的方法来执行具体的业务逻辑
JobInvokeUtil.invokeMethod(job);
}
}

@ -3,11 +3,13 @@ package com.aurora.quartz;
import com.aurora.entity.Job;
import com.aurora.util.JobInvokeUtil;
import org.quartz.JobExecutionContext;
/**
*
*/
public class QuartzJobExecution extends AbstractQuartzJob {
@Override
protected void doExecute(JobExecutionContext context, Job job) throws Exception {
JobInvokeUtil.invokeMethod(job);
}
}
}
Loading…
Cancel
Save