package com.ruoyi.system.service;
|
|
import com.ruoyi.system.domain.HospitalTokenizerTask;
|
import com.ruoyi.system.domain.TbHospData;
|
import com.ruoyi.system.mapper.TbHospDataMapper;
|
import org.slf4j.Logger;
|
import org.slf4j.LoggerFactory;
|
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.scheduling.annotation.Async;
|
import org.springframework.stereotype.Service;
|
|
import java.util.Date;
|
import java.util.List;
|
import java.util.Map;
|
import java.util.concurrent.ConcurrentHashMap;
|
|
/**
|
* 医院分词异步任务服务
|
*
|
* @author ruoyi
|
* @date 2026-01-20
|
*/
|
@Service
|
public class HospitalTokenizerAsyncService {
|
|
private static final Logger logger = LoggerFactory.getLogger(HospitalTokenizerAsyncService.class);
|
|
@Autowired
|
private ITbHospDataService tbHospDataService;
|
|
@Autowired
|
private TbHospDataMapper tbHospDataMapper;
|
|
/**
|
* 任务状态缓存 (生产环境建议使用 Redis)
|
*/
|
private static final Map<String, HospitalTokenizerTask> taskCache = new ConcurrentHashMap<>();
|
|
/**
|
* 异步执行医院分词任务
|
*
|
* @param taskId 任务ID
|
*/
|
@Async("taskExecutor")
|
public void executeTokenizerTask(String taskId) {
|
HospitalTokenizerTask task = new HospitalTokenizerTask(taskId);
|
taskCache.put(taskId, task);
|
|
logger.info("开始执行医院分词异步任务: taskId={}", taskId);
|
|
try {
|
// 查询所有正常状态的医院
|
TbHospData query = new TbHospData();
|
query.setStatus("0");
|
List<TbHospData> hospitalList = tbHospDataMapper.selectTbHospDataList(query);
|
|
task.setTotalCount(hospitalList.size());
|
logger.info("查询到 {} 个医院需要生成分词", hospitalList.size());
|
|
int successCount = 0;
|
int failedCount = 0;
|
|
// 遍历处理每个医院
|
for (int i = 0; i < hospitalList.size(); i++) {
|
TbHospData hospital = hospitalList.get(i);
|
|
try {
|
// 生成分词
|
String keywords = tbHospDataService.generateKeywordsForHospital(hospital);
|
hospital.setHospKeywords(keywords);
|
|
// 更新数据库
|
int result = tbHospDataMapper.updateTbHospData(hospital);
|
if (result > 0) {
|
successCount++;
|
} else {
|
failedCount++;
|
}
|
|
} catch (Exception e) {
|
failedCount++;
|
logger.error("生成医院分词失败: hospId={}, hospName={}",
|
hospital.getHospId(), hospital.getHospName(), e);
|
}
|
|
// 更新任务进度
|
task.setProcessedCount(i + 1);
|
task.setSuccessCount(successCount);
|
task.setFailedCount(failedCount);
|
|
// 每处理100条输出一次日志
|
if ((i + 1) % 100 == 0) {
|
logger.info("医院分词进度: {}/{}, 成功: {}, 失败: {}",
|
i + 1, hospitalList.size(), successCount, failedCount);
|
}
|
}
|
|
// 任务完成
|
task.setStatus("SUCCESS");
|
task.setEndTime(new Date());
|
|
logger.info("医院分词任务完成: taskId={}, 总数: {}, 成功: {}, 失败: {}",
|
taskId, hospitalList.size(), successCount, failedCount);
|
|
} catch (Exception e) {
|
// 任务失败
|
task.setStatus("FAILED");
|
task.setEndTime(new Date());
|
task.setErrorMessage(e.getMessage());
|
|
logger.error("医院分词任务执行失败: taskId={}", taskId, e);
|
}
|
}
|
|
/**
|
* 获取任务状态
|
*
|
* @param taskId 任务ID
|
* @return 任务状态
|
*/
|
public HospitalTokenizerTask getTaskStatus(String taskId) {
|
return taskCache.get(taskId);
|
}
|
|
/**
|
* 清理任务缓存
|
*
|
* @param taskId 任务ID
|
*/
|
public void clearTask(String taskId) {
|
taskCache.remove(taskId);
|
}
|
|
/**
|
* 清理所有已完成的任务 (超过1小时)
|
*/
|
public void clearExpiredTasks() {
|
long oneHourAgo = System.currentTimeMillis() - 3600000;
|
|
taskCache.entrySet().removeIf(entry -> {
|
HospitalTokenizerTask task = entry.getValue();
|
if (task.getEndTime() != null && task.getEndTime().getTime() < oneHourAgo) {
|
logger.info("清理过期任务: taskId={}", entry.getKey());
|
return true;
|
}
|
return false;
|
});
|
}
|
}
|