Commit c04b0b82 by 阳浪

配置信息

parent 374bd859
package com.yizhi.application.job.domain;
import lombok.Data;
/**
* 创建Job实体类
* @author yinyuyan 2018/06/06
*
*/
@Data
public class VmyJob {
/**
* 任务分组
*/
private String group;
/**
* 任务名称
*/
private String jobName;
/**
* 任务job的class名
*/
private String jobClassName;
/**
* 秒
*/
private String second="0";
/**
* 分
*/
private String minute="*";
/**
* 时
*/
private String hour="*";
/**
* 天
*/
private String day="*";
/**
* 月
*/
private String month="*";
/**
* 星期
*/
private String week="?";
/**
* 年
*/
private String year="2099";
}
package com.yizhi.application.job.remote;
import com.yizhi.application.job.domain.VmyJob;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
@FeignClient(value = "job", contextId = "JobClient")
public interface JobClient {
/**
* 新增任务
*
* @param job
* @throws Exception
*/
@RequestMapping(value = "/remote/job/insert", method = RequestMethod.POST)
void insert(@RequestBody VmyJob job) throws Exception;
/**
* 修改任务
*
* @param job
* @throws Exception
*/
@RequestMapping(value = "/remote/job/update", method = RequestMethod.POST)
void update(@RequestBody VmyJob job) throws Exception;
/**
* 删除任务
*
* @param jobKey
* @throws Exception
*/
@RequestMapping(value = "/remote/job/delete", method = RequestMethod.POST)
void delete(@RequestParam("jobKey") String jobKey) throws Exception;
}
D:\深蓝云校\cloud-job\cloud-job-api\src\main\java\com\fulan\application\job\remote\JobClient.java
D:\深蓝云校\cloud-job\cloud-job-api\src\main\java\com\fulan\application\job\domain\VmyJob.java
D:\work\hqzhdj\cloud-job\cloud-job-api\src\main\java\com\yizhi\application\job\domain\VmyJob.java
D:\work\hqzhdj\cloud-job\cloud-job-api\src\main\java\com\yizhi\application\job\remote\JobClient.java
......@@ -18,7 +18,8 @@
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-common-service</artifactId>
<artifactId>cloud-common-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
......@@ -43,64 +44,75 @@
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-site-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 专辑api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-album-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 本地依赖 -->
<!-- 课程api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-course-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 报表 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-statistics-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 培训项目api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-training-project-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 签到api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-sign-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 学习日志api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-course-studyLog-api</artifactId>
<artifactId>cloud-course-study-log-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 积分api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-point-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 考试api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-exam-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- 调研api 依赖 -->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-research-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!--微信服务依赖-->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-wechat-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
......@@ -108,20 +120,24 @@
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>library-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-system-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-newMessage-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-aliyun-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->
......@@ -160,6 +176,12 @@
<artifactId>wsdl4j</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-course</artifactId>
<version>1.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
......
......@@ -2,10 +2,11 @@ package com.yizhi.application;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
@SpringBootApplication
@SpringBootApplication(exclude = {RabbitAutoConfiguration.class})
@EnableDiscoveryClient
@EnableFeignClients(basePackages = {"com.yizhi"})
public class JobApplication {
......
......@@ -16,6 +16,11 @@ import java.util.UUID;
import javax.annotation.Resource;
import com.yizhi.util.application.constant.ReturnCode;
import com.yizhi.util.application.domain.Response;
import com.yizhi.util.application.date.DateUtil;
import com.yizhi.util.application.str.StringUtil;
import org.apache.commons.lang3.StringUtils;
import org.quartz.CronExpression;
import org.quartz.JobDataMap;
......@@ -41,10 +46,6 @@ import org.springframework.web.bind.annotation.RestController;
import com.alibaba.fastjson.JSON;
import com.yizhi.application.constant.JobError;
import com.yizhi.application.handle.quartz.QuartzConfigration;
import com.yizhi.application.util.constant.ReturnCode;
import com.yizhi.application.util.date.DateUtil;
import com.yizhi.application.util.domain.Response;
import com.yizhi.application.util.str.StringUtil;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
......
......@@ -19,6 +19,8 @@ import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.yizhi.util.application.date.DateUtil;
import com.yizhi.util.application.str.StringUtil;
import org.apache.commons.lang3.StringUtils;
import org.quartz.CronExpression;
import org.quartz.JobDataMap;
......@@ -44,8 +46,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import com.yizhi.application.handle.quartz.QuartzConfigration;
import com.yizhi.application.util.date.DateUtil;
import com.yizhi.application.util.str.StringUtil;
/**
* schedeule任务类
......
......@@ -9,7 +9,10 @@ import java.io.IOException;
import java.util.List;
import javax.annotation.Resource;
import javax.xml.ws.Response;
import com.yizhi.core.application.exception.BizException;
import com.yizhi.util.application.constant.ReturnCode;
import org.quartz.CronExpression;
import org.quartz.JobDetail;
import org.quartz.JobKey;
......@@ -26,11 +29,8 @@ import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.yizhi.application.constant.JobError;
import com.yizhi.application.exception.BizException;
import com.yizhi.application.handle.quartz.QuartzConfigration;
import com.yizhi.application.job.domain.VmyJob;
import com.yizhi.application.util.constant.ReturnCode;
import com.yizhi.application.util.domain.Response;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
......
......@@ -7,6 +7,7 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
import com.yizhi.util.application.file.FileUtils;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.TriggerKey;
......@@ -21,7 +22,6 @@ import org.springframework.core.io.ClassPathResource;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import com.yizhi.application.util.file.FileUtils;
/**
* Quartz配置类
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.feign.AliyunClient;
import com.yizhi.aliyun.application.feign.AliyunClient;
import com.yizhi.application.handle.quartz.BaseJob;
import lombok.extern.log4j.Log4j2;
import org.quartz.InterruptableJob;
......
......@@ -3,6 +3,7 @@ package com.yizhi.application.handle.quartz.job;
import java.text.SimpleDateFormat;
import java.util.Date;
import com.yizhi.album.application.feign.AlbumJobHandleClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -11,7 +12,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.application.feign.AlbumJobHandleClient;
import com.yizhi.application.handle.quartz.BaseJob;
public class AlbumUnLockJob implements BaseJob, InterruptableJob {
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.feign.BannerManageFeignClients;
import com.yizhi.application.handle.quartz.BaseJob;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
......@@ -9,7 +8,6 @@ import org.quartz.JobExecutionException;
import org.quartz.UnableToInterruptJobException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
......@@ -21,11 +19,6 @@ public class BannerUpJob implements BaseJob, InterruptableJob {
private final Logger LOG = LoggerFactory.getLogger(BannerUpJob.class);
// @Autowired
// SystemMessageClient messageClient;
@Autowired
BannerManageFeignClients bannerManageFeignClient;
@Override
......@@ -34,7 +27,6 @@ public class BannerUpJob implements BaseJob, InterruptableJob {
LOG.info("执行banner上架的操作:"+id);
try {
bannerManageFeignClient.upBanner(id);
} catch (Exception e) {
LOG.error("=====================",e);
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.course.application.feign.CourseReportClient;
import com.yizhi.library.application.feign.StatisticStudentCaseClient;
import com.yizhi.statistics.application.feign.StatisticsCourseClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -8,9 +11,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.api.feign.StatisticStudentCaseClient;
import com.yizhi.application.feign.CourseReportClient;
import com.yizhi.application.feign.StatisticsCourseClient;
import com.yizhi.application.handle.quartz.BaseJob;
public class CourseIntoTableJob implements BaseJob, InterruptableJob {
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.feign.CourseClient;
import com.yizhi.application.feign.InformationManageFeignClients;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.course.application.feign.CourseClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.feign.CourseStudyHourRankingClient;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.system.remote.SiteClient;
import com.yizhi.course.application.feign.CourseStudyHourRankingClient;
import com.yizhi.system.application.system.remote.SiteClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......
package com.yizhi.application.handle.quartz.job;
import cn.hutool.core.date.DateUtil;
import com.yizhi.application.feign.RemoteDaIndicatorCourseClient;
import com.yizhi.application.feign.RemoteDaIndicatorTpClient;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.system.remote.RemoteDaIndicatorSystemClient;
import com.yizhi.application.system.remote.SiteClient;
import com.yizhi.application.vo.RemoteDaIndicatorVo;
import com.yizhi.course.application.feign.RemoteDaIndicatorCourseClient;
import com.yizhi.system.application.system.remote.RemoteDaIndicatorSystemClient;
import com.yizhi.system.application.system.remote.SiteClient;
import com.yizhi.system.application.vo.RemoteDaIndicatorVo;
import com.yizhi.training.application.feign.RemoteDaIndicatorTpClient;
import org.apache.commons.collections.CollectionUtils;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
......@@ -69,7 +69,8 @@ public class DashboardJob implements BaseJob, InterruptableJob {
try {
log.info("-------------------- dashboard 课程完成数 开始处理 site_id: {} ----------------------", siteId);
courseClient.courseFinish(new RemoteDaIndicatorVo(siteId, startOfWeek, endOfWeek, null, now));
courseClient.courseFinish(new
com.yizhi.course.application.vo.RemoteDaIndicatorVo(siteId, startOfWeek, endOfWeek, null, now));
log.info("-------------------- dashboard 课程完成数 处理完成 site_id: {} ----------------------", siteId);
} catch (Exception e) {
e.printStackTrace();
......@@ -77,7 +78,8 @@ public class DashboardJob implements BaseJob, InterruptableJob {
try {
log.info("-------------------- dashboard 课程学习时长 开始处理 site_id: {} ----------------------", siteId);
courseClient.courseTime(new RemoteDaIndicatorVo(siteId, startOfWeek, endOfWeek, null, now));
courseClient.courseTime(new
com.yizhi.course.application.vo.RemoteDaIndicatorVo(siteId, startOfWeek, endOfWeek, null, now));
log.info("-------------------- dashboard 课程学习时长 处理完成 site_id: {} ----------------------", siteId);
} catch (Exception e) {
e.printStackTrace();
......@@ -85,7 +87,8 @@ public class DashboardJob implements BaseJob, InterruptableJob {
try {
log.info("-------------------- dashboard 项目通过率 开始处理 site_id: {} ----------------------", siteId);
tpClient.tpFinish(new RemoteDaIndicatorVo(siteId, startOfWeek, endOfWeek, null, now));
tpClient.tpFinish(new
com.yizhi.training.application.vo.RemoteDaIndicatorVo(siteId, startOfWeek, endOfWeek, null, now));
log.info("-------------------- dashboard 项目通过率 处理完成 site_id: {} ----------------------", siteId);
} catch (Exception e) {
e.printStackTrace();
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.exam.application.feign.StatisticsExamMetadataClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -7,7 +8,6 @@ import org.quartz.UnableToInterruptJobException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.api.feign.StatisticsExamMetadataClient;
import com.yizhi.application.handle.quartz.BaseJob;
......
......@@ -9,6 +9,8 @@
*/
package com.yizhi.application.handle.quartz.job;
import com.yizhi.exam.application.feign.ExamQuestionApiClient;
import com.yizhi.exam.application.vo.exam.ExamAnswerVO;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -17,8 +19,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.api.feign.ExamQuestionApiClient;
import com.yizhi.api.vo.exam.ExamAnswerVO;
import com.yizhi.application.handle.quartz.BaseJob;
/**
......
......@@ -9,17 +9,24 @@
*/
package com.yizhi.application.handle.quartz.job;
import cn.hutool.core.bean.BeanUtil;
import com.alibaba.fastjson.JSON;
import com.yizhi.application.cache.RedisCache;
import com.yizhi.application.cache.distributedlock.impl.RedisDistributedLock;
import com.yizhi.application.domain.*;
import com.yizhi.application.feign.CourseClient;
import com.yizhi.application.feign.CourseStudyClient;
import com.yizhi.application.feign.RecordeClient;
import com.yizhi.application.feign.StudyLogClient;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.util.date.DateUtil;
import com.yizhi.application.vo.HeartBeatVo;
import com.yizhi.core.application.cache.RedisCache;
import com.yizhi.core.application.cache.distributedlock.impl.RedisDistributedLock;
import com.yizhi.course.application.domain.Recorde;
import com.yizhi.course.application.feign.CourseClient;
import com.yizhi.course.application.feign.CourseStudyClient;
import com.yizhi.course.application.feign.RecordeClient;
import com.yizhi.course.application.vo.HeartBeatVo;
import com.yizhi.course.application.vo.StudyLog;
import com.yizhi.course.application.vo.domain.ChapterEntityVo;
import com.yizhi.course.application.vo.domain.CourseEntityVo;
import com.yizhi.course.application.vo.domain.MaterialEntityVo;
import com.yizhi.course.application.vo.domain.RecordeEntityVo;
import com.yizhi.studylog.application.feign.StudyLogClient;
import com.yizhi.studylog.application.vo.StudyLogVo;
import com.yizhi.util.application.date.DateUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.quartz.InterruptableJob;
......@@ -94,7 +101,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
return;
}*/
try {
Map<String, List<StudyLog>> errorData = new HashMap<>();
Map<String, List<StudyLogVo>> errorData = new HashMap<>();
// 获取最大的id
Object maxIdObj = redisCache.get(REDIS_KEY_MAX);
......@@ -112,7 +119,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
}
LOG.info(uuid+"批次,redis重试次数={}",retryMaxRedisObj);
List<StudyLog> studyLogs = studyLogClient.list("default");
List<StudyLogVo> studyLogs = studyLogClient.list("default");
//LOG.info("原始数据={}",JSON.toJSONString(studyLogs));
if (CollectionUtils.isNotEmpty(studyLogs)) {
......@@ -140,18 +147,20 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
HeartBeatVo heartBeatVo = new HeartBeatVo();// todo 心跳参数设置,目前为vo固定,获取根据接口获取
// 查询scorm课程的心跳记录
List<StudyLog> scormStudyLogs = studyLogClient.list("scorm");
List<StudyLogVo> scormStudyLogs = studyLogClient.list("scorm");
if(CollectionUtils.isNotEmpty(scormStudyLogs)){
Map<String, List<StudyLog> > scormStudyData = scormStudyLogs.parallelStream()
Map<String, List<StudyLogVo> > scormStudyData = scormStudyLogs.parallelStream()
.collect(Collectors.groupingBy(scormObjKey->scormObjKey.getAccountId()+"#"+scormObjKey.getCourseId()));
for(Map.Entry<String,List<StudyLog>> scormItem:scormStudyData.entrySet()){
for(Map.Entry<String,List<StudyLogVo>> scormItem:scormStudyData.entrySet()){
//Optional<StudyLog> validData = scormItem.getValue().parallelStream().filter(scormItemFilter -> scormItemFilter.getChapterId()>0).findAny();
List<StudyLog> dataSort = scormItem.getValue().stream().sorted(Comparator.comparing(StudyLog::getNginxTime)).collect(Collectors.toList());
List<StudyLogVo> dataSort = scormItem.getValue().stream().sorted(Comparator.comparing(StudyLogVo::getNginxTime)).collect(Collectors.toList());
int size = dataSort.size();
StudyLog studyLogScorm = dataSort.get(size-1);
StudyLogVo studyLogScorm = dataSort.get(size-1);
studyLogScorm.setTerminalDevice((size*heartBeatVo.getCount())+"");
studyLogScorm.setCreateTime(dataSort.get(0).getNginxTime());
courseClient.saveScormHeartBeat(studyLogScorm);
StudyLog studyLog = new StudyLog();
BeanUtil.copyProperties(studyLogScorm,studyLog);
courseClient.saveScormHeartBeat(studyLog);
/*if(validData.isPresent()){
// 传输数据进行更新
StudyLog studyLogScorm = validData.get();
......@@ -175,7 +184,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
LOG.info("查询到studyLog数据{}条",studyLogs.size());
//studyLogs.stream().parallel().collect(Collectors.groupingBy(obj->obj.getSessionId(),HashMap::new,Collectors.toCollection(LinkedList::new)));
Map<String, List<StudyLog>> data = studyLogs.stream().parallel().collect(Collectors.groupingBy(
Map<String, List<StudyLogVo>> data = studyLogs.stream().parallel().collect(Collectors.groupingBy(
//obj -> obj.getSessionId() + SPILT + obj.getCourseId() + SPILT + obj.getChapterId()));//,Collectors.counting()
obj -> obj.getSessionId() ));
LOG.info("分组SessionId以后的数据为{}条",data.size());
......@@ -185,16 +194,16 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
Set<Long> courseIds = new HashSet<>();
List<Long> chapterIds = new ArrayList<>();
//for (String keyVal : keySets) {
for (Map.Entry<String, List<StudyLog>> entryObj : data.entrySet()) {
for (Map.Entry<String, List<StudyLogVo>> entryObj : data.entrySet()) {
//String[] valTemp = keyVal.split(SPILT);
sessionIds.add(entryObj.getKey());
List<StudyLog> studyLogsIn = entryObj.getValue();
StudyLog studyLogIn = studyLogsIn.get(0);
List<StudyLogVo> studyLogsIn = entryObj.getValue();
StudyLogVo studyLogIn = studyLogsIn.get(0);
Long courseIdIn = studyLogIn.getCourseId();
Long chapterId = studyLogIn.getChapterId();
if(Objects.isNull(courseIdIn) || courseIdIn.longValue()<=0){
StudyLog studyLogIn1 = studyLogsIn.stream().filter(obj->(Objects.nonNull(obj.getChapterId()) && obj.getChapterId().longValue()>0))
StudyLogVo studyLogIn1 = studyLogsIn.stream().filter(obj->(Objects.nonNull(obj.getChapterId()) && obj.getChapterId().longValue()>0))
.findFirst().get();
courseIdIn = studyLogIn1.getCourseId();
chapterId = studyLogIn1.getChapterId();
......@@ -205,10 +214,10 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
}
// 查询已经存在的学习记录
// LOG.info("sessionIds={}", sessionIds);
List<Recorde> recordes = recordeClient.getExistRecorde(sessionIds);
List<RecordeEntityVo> recordes = recordeClient.getExistRecorde(sessionIds);
//LOG.info("根据sessionIds={},查询结果={}",JSON.toJSONString(sessionIds),JSON.toJSONString(recordes));
Map<String, Recorde> recordeMap = null; // 存在的学习记录
Map<String, RecordeEntityVo> recordeMap = null; // 存在的学习记录
boolean existFlag = false;
if (CollectionUtils.isNotEmpty(recordes)) {
existFlag = true;
......@@ -217,19 +226,19 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
} catch (Exception e) {
LOG.info("存在重复的学习课程,进行特殊处理,数据={}", JSON.toJSONString(recordes));
recordeMap = new HashMap<>();
Map<String, List<Recorde>> recordesMap = recordes.stream().collect(Collectors.groupingBy(key -> key.getSessionId()));
for (Map.Entry<String, List<Recorde>> item : recordesMap.entrySet()) {
List<Recorde> recordeListT = item.getValue();
Map<String, List<RecordeEntityVo>> recordesMap = recordes.stream().collect(Collectors.groupingBy(key -> key.getSessionId()));
for (Map.Entry<String, List<RecordeEntityVo>> item : recordesMap.entrySet()) {
List<RecordeEntityVo> recordeListT = item.getValue();
int legt = recordeListT.size();
if (legt == 1) {
recordeMap.put(item.getKey(), recordeListT.get(0));
} else {
recordeListT.sort(new Comparator<Recorde>() {
/* recordeListT.sort(new Comparator<RecordeEntityVo>() {
@Override
public int compare(Recorde o1, Recorde o2) {
return o2.getEndTime().compareTo(o1.getEndTime());
public int compare(CourseEntityVo o1, CourseEntityVo o2) {
return o2.getUpdateTime().compareTo(o1.getUpdateTime());
}
});
});*/
recordeMap.put(item.getKey(), recordeListT.get(legt - 1));
}
}
......@@ -237,11 +246,11 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
}
//LOG.info("根据sessionIds={},查询结果转成map={}",JSON.toJSONString(sessionIds),JSON.toJSONString(recordeMap));
// 查询课程信息
List<Course> courseList = null;
List<CourseEntityVo> courseList = null;
try {
if(recordeMap != null){
for(Recorde recordeItems : recordeMap.values()){
for(RecordeEntityVo recordeItems : recordeMap.values()){
courseIds.add(recordeItems.getCourseId());
chapterIds.add(recordeItems.getChapterId());
}
......@@ -250,16 +259,16 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
} catch (Exception e) {
return;
}
Map<Long, Course> courseMap = courseList.stream().collect(Collectors.toMap(key -> key.getId(), val -> val));
Map<Long, CourseEntityVo> courseMap = courseList.stream().collect(Collectors.toMap(key -> key.getId(), val -> val));
// 查询章节信息
List<Chapter> chapterList = recordeClient.selectExistChapter(chapterIds);
Map<Long, Chapter> chapterMap = chapterList.stream().collect(Collectors.toMap(key -> key.getId(), val -> val));
List<ChapterEntityVo> chapterList = recordeClient.selectExistChapter(chapterIds);
Map<Long, ChapterEntityVo> chapterMap = chapterList.stream().collect(Collectors.toMap(key -> key.getId(), val -> val));
// 查询素材信息
List<Long> materials = chapterList.stream().map(obj -> obj.getMaterialId()).collect(Collectors.toList());
List<Material> materialList = recordeClient.selectExistMaterial(materials);
Map<Long, Material> materialMap = materialList.stream().collect(Collectors.toMap(key -> key.getId(), val -> val));
List<MaterialEntityVo> materialList = recordeClient.selectExistMaterial(materials);
Map<Long, MaterialEntityVo> materialMap = materialList.stream().collect(Collectors.toMap(key -> key.getId(), val -> val));
// 处理心跳数据
List<Recorde> newRecorde = new ArrayList<>();
......@@ -268,7 +277,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
List<Recorde> upRecorde = new ArrayList<>();
//LOG.info("对原始数据进行分组以后的数据={}",JSON.toJSONString(data));
for (Map.Entry<String, List<StudyLog>> item : data.entrySet()) {
for (Map.Entry<String, List<StudyLogVo>> item : data.entrySet()) {
excutIndex++;
LOG.info("分组SessionId以后的开始处理{}条数据",excutIndex);
String keyVal = item.getKey();
......@@ -280,7 +289,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
LOG.info("sessionId为空的数据={}", JSON.toJSONString(item));
continue;
}
List<StudyLog> valList = item.getValue();
List<StudyLogVo> valList = item.getValue();
// 每次处理的批次的数据条数
int studyLogSize = valList.size();
// 学习时间次数(对相同时间的同一个sessionId进行合并)
......@@ -294,12 +303,12 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
//return e1.getWight() - e2.getWight();
return e1.getNginxTime().compareTo(e2.getNginxTime());
});*/
valList = valList.stream().sorted(Comparator.comparing(StudyLog::getNginxTime)).collect(Collectors.toList());
valList = valList.stream().sorted(Comparator.comparing(StudyLogVo::getNginxTime)).collect(Collectors.toList());
//LOG.info("排序后的数据={}",JSON.toJSONString(valList));
// 合并心跳记录转换成学习记录
Recorde recorde = new Recorde();
StudyLog lastStudy = valList.get(studyLogSize - 1);
StudyLogVo lastStudy = valList.get(studyLogSize - 1);
// 获取最后的学习时间
//long lastStudyDateVal = lastStudy.getTimestamp();
//Date lastStudyDate = new Date(lastStudyDateVal);
......@@ -309,7 +318,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
Date lastStudyDate = lastStudy.getNginxTime();
//LOG.info("最后学习时间={}",lastStudyDate);
StudyLog startStudyLog = valList.get(0);
StudyLogVo startStudyLog = valList.get(0);
// 获取开始时间
//long startStudyDateVal = startStudyLog.getTimestamp();
......@@ -317,7 +326,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
// 获取课程的策略
Long courseIdBycurrStudy = startStudyLog.getCourseId();
Course course = courseMap.get(courseIdBycurrStudy);
CourseEntityVo course = courseMap.get(courseIdBycurrStudy);
if (course == null) {
if (existFlag && recordeMap.containsKey(sessionId)) {
courseIdBycurrStudy = recordeMap.get(sessionId).getCourseId();
......@@ -339,7 +348,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
extedText = "顺时间心跳完成";
overed = 1;
}else {
Optional<StudyLog> dataOpt = valList.stream().parallel().filter(itemStudy -> Integer.valueOf(1).equals(itemStudy.getMaterialCompleted())).findAny();
Optional<StudyLogVo> dataOpt = valList.stream().parallel().filter(itemStudy -> Integer.valueOf(1).equals(itemStudy.getMaterialCompleted())).findAny();
if(dataOpt.isPresent()){
//StudyLog headObj = dataOpt.get();
overed = 1;
......@@ -348,7 +357,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
}
Recorde existRecordeItem = null; // 是否是有学习过的记录
RecordeEntityVo existRecordeItem = null; // 是否是有学习过的记录
if (existFlag && recordeMap.containsKey(sessionId)) {
// 获取存在的学习记录
existRecordeItem = recordeMap.get(sessionId);
......@@ -362,7 +371,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
terminalDevice+="-->"+startStudyLog.getTerminalDevice()+"["+DateUtil.toShortSeconds(new Date())+"]"+recorde.getOvered()+extedText;
recorde.setTerminalDevice(terminalDevice);
}else if(overed==1){
terminalDevice+="-->"+"["+DateUtil.toShortSeconds(new Date())+"]"+recorde.getOvered()+extedText;
terminalDevice+="-->"+"["+ DateUtil.toShortSeconds(new Date())+"]"+recorde.getOvered()+extedText;
recorde.setTerminalDevice(terminalDevice);
}
String startTerminalType = existRecordeItem.getTerminalType();
......@@ -390,11 +399,11 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
recorde.setCourseId(startStudyLog.getCourseId());
recorde.setChapterId(studyLogChapterId);
if(studyLogChapterId==null || studyLogChapterId.longValue()==0){
Optional<StudyLog> dataStudyLogChapterIdL = valList.stream().parallel().filter
Optional<StudyLogVo> dataStudyLogChapterIdL = valList.stream().parallel().filter
(studyLogChapterIdL->(studyLogChapterIdL.getChapterId()!=null
&& studyLogChapterIdL.getChapterId().longValue()>0)).findAny();
if(dataStudyLogChapterIdL.isPresent()){
StudyLog studyLogChapterIdLd = dataStudyLogChapterIdL.get();
StudyLogVo studyLogChapterIdLd = dataStudyLogChapterIdL.get();
recorde.setCourseId(studyLogChapterIdLd.getCourseId());
recorde.setChapterId(studyLogChapterIdLd.getChapterId());
}else {
......@@ -484,7 +493,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
chapterId = existRecordeItem.getChapterId();
}
}
Chapter chapter = chapterMap.get(chapterId);
ChapterEntityVo chapter = chapterMap.get(chapterId);
if (chapter == null) {
//LOG.info("{}该章节已经在课程里已被修改,重新查询", chapterId);
errorData.put(sessionId+"章节不存在", item.getValue());
......@@ -564,8 +573,10 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
}while (!courseExecLockStatus && maxRetry<3);
try {
try {
RecordeEntityVo entityVo = new RecordeEntityVo();
BeanUtil.copyProperties(sendPoint,entityVo);
//LOG.info("发送处理的数据={}",JSON.toJSONString(sendPoint));
courseStudyClient.completionStatus(sendPoint);
courseStudyClient.completionStatus(entityVo);
} catch (Exception e) {
LOG.info("{}发放积分异常{}", JSON.toJSONString(sendPoint), e);
String errKey = sendPoint.getSessionId();
......@@ -595,9 +606,14 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
recordeU.getDuration().intValue()*1000)));
}*/
//LOG.info("更新数据={}",JSON.toJSONString(recordeU));
recordeClient.update(recordeU);
RecordeEntityVo entityVo = new RecordeEntityVo();
BeanUtil.copyProperties(recordeU,entityVo);
recordeClient.update(entityVo);
}else{
recordeClient.save(sendPoint);
RecordeEntityVo entityVo = new RecordeEntityVo();
BeanUtil.copyProperties(sendPoint,entityVo);
recordeClient.save(entityVo);
}
} finally {
if(courseExecLockStatus){
......@@ -643,7 +659,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
//Files.write(Paths.get("/app/logs/"+fileName), JSON.toJSONString(errorData).getBytes());
String lineStr = System.getProperty("line.separator");
StringBuilder sb = new StringBuilder();
for(Map.Entry<String, List<StudyLog>> errItem:errorData.entrySet()){
for(Map.Entry<String, List<StudyLogVo>> errItem:errorData.entrySet()){
sb.append(JSON.toJSONString(errItem)).append(lineStr);
}
Files.write(Paths.get("/app/logs/"+fileName), sb.toString().getBytes());
......@@ -667,8 +683,8 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
}
private int getOvered(Map<Long, Material> materialMap, int duration,
int overed, Chapter chapter, int complete, long accountId, long siteId) {
private int getOvered(Map<Long, MaterialEntityVo> materialMap, int duration,
int overed, ChapterEntityVo chapter, int complete, long accountId, long siteId) {
if(complete==0){
overed = 1;
}else{
......@@ -678,7 +694,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
if(Objects.nonNull(countDuration)){
duration += countDuration.longValue();
}
Material material = materialMap.get(chapter.getMaterialId());
MaterialEntityVo material = materialMap.get(chapter.getMaterialId());
if(material == null){
return -1;
}
......@@ -691,7 +707,7 @@ public class HttpSetjob implements BaseJob, InterruptableJob {
return overed;
}
private static int getTimeLong(Material material, int timeLong) {
private static int getTimeLong(MaterialEntityVo material, int timeLong) {
Integer textSecond = material.getTextSecond();
if(Objects.nonNull(textSecond)){
timeLong += textSecond.intValue();
......
......@@ -9,18 +9,32 @@
*/
package com.yizhi.application.handle.quartz.job;
import cn.hutool.core.bean.BeanUtil;
import com.alibaba.fastjson.JSON;
import com.yizhi.application.cache.RedisCache;
import com.yizhi.application.domain.*;
import com.yizhi.application.event.EventWrapper;
import com.yizhi.application.feign.*;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.publish.CloudEventPublisher;
import com.yizhi.application.task.TaskExecutor;
import com.yizhi.application.util.constant.QueueConstant;
import com.yizhi.application.util.event.TrainingProjectEvent;
import com.yizhi.application.vo.PointParamVO;
import com.yizhi.application.vo.RecordeDurationVO;
import com.yizhi.core.application.cache.RedisCache;
import com.yizhi.core.application.event.EventWrapper;
import com.yizhi.core.application.publish.CloudEventPublisher;
import com.yizhi.core.application.task.TaskExecutor;
import com.yizhi.course.application.domain.Chapter;
import com.yizhi.course.application.domain.Course;
import com.yizhi.course.application.domain.Material;
import com.yizhi.course.application.domain.Recorde;
import com.yizhi.course.application.feign.CourseClient;
import com.yizhi.course.application.feign.CourseStudyClient;
import com.yizhi.course.application.feign.MaterialClient;
import com.yizhi.course.application.feign.RecordeClient;
import com.yizhi.course.application.vo.RecordeDurationVO;
import com.yizhi.course.application.vo.domain.ChapterEntityVo;
import com.yizhi.course.application.vo.domain.CourseEntityVo;
import com.yizhi.course.application.vo.domain.MaterialEntityVo;
import com.yizhi.course.application.vo.domain.RecordeEntityVo;
import com.yizhi.point.application.feign.PointRedisFeignClients;
import com.yizhi.point.application.vo.PointParamVO;
import com.yizhi.studylog.application.feign.StudyLogClient;
import com.yizhi.studylog.application.vo.StudyLogVo;
import com.yizhi.util.application.constant.QueueConstant;
import com.yizhi.util.application.event.TrainingProjectEvent;
import org.apache.commons.lang3.time.DateUtils;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
......@@ -28,7 +42,6 @@ import org.quartz.JobExecutionException;
import org.quartz.UnableToInterruptJobException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.amqp.core.AmqpTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
......@@ -78,10 +91,10 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
StudyLogClient studyLogClient;
@Autowired
PointRedisFeignClients pointRedisFeignClients;
/* @Autowired
AmqpTemplate amqpTemplate;*/
@Autowired
AmqpTemplate amqpTemplate;
@Autowired
CourseClient courseClient;
CourseClient courseClient;
@Autowired
MaterialClient materialClient;
@Autowired
......@@ -97,26 +110,34 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
* 初始化 课程 章节 素材 学习记录list
*/
private void init() {
List<Course> courseList = recordeClient.getCourseList();
List<CourseEntityVo> courseList = recordeClient.getCourseList();
LOG.info("心跳:setp1");
for (Course course : courseList) {
for (CourseEntityVo courseVo : courseList) {
Course course = new Course();
BeanUtil.copyProperties(courseVo,course);
this.allCoursesMap.put(course.getId(), course);
}
List<Chapter> chapterList = recordeClient.getChapterList();
List<ChapterEntityVo> chapterList = recordeClient.getChapterList();
LOG.info("心跳:setp2");
for (Chapter chapter : chapterList) {
for (ChapterEntityVo chapterVo : chapterList) {
Chapter chapter = new Chapter();
BeanUtil.copyProperties(chapterVo,chapter);
this.allChaptersMap.put(chapter.getId(), chapter);
}
List<Material> materialList = recordeClient.getMaterialList();
List<MaterialEntityVo> materialList = recordeClient.getMaterialList();
LOG.info("心跳:setp3");
for (Material material : materialList) {
for (MaterialEntityVo materialVo : materialList) {
Material material = new Material();
BeanUtil.copyProperties(materialVo,material);
this.allMaterialsMap.put(material.getId(), material);
}
List<Recorde> recordes = recordeClient.getAllRecordeList();
List<RecordeEntityVo> recordes = recordeClient.getAllRecordeList();
LOG.info("心跳:setp4");
for (Recorde recorde : recordes) {
for (RecordeEntityVo recordeVo : recordes) {
Recorde recorde = new Recorde();
BeanUtil.copyProperties(recordeVo,recorde);
String sessionkey = makeSessionKey(recorde);
// 如果map 中已经有该记录,并且是已完成该章节
// 如果map 中已经有该记录,并且是已完成该章节
......@@ -147,7 +168,7 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
try {
long start = System.currentTimeMillis();
LOG.info("<---------------------------------定时解析心跳---------------------------------------------------------->" + start);
List<StudyLog> studyLogs = studyLogClient.list("");
List<StudyLogVo> studyLogs = studyLogClient.list("");
if (studyLogs.isEmpty() & studyLogs.size() == 0) {
LOG.info("无学习记录");
......@@ -164,7 +185,7 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
// 每条 studyLog 的记录时间
Date newTime = null;
for (StudyLog studyLog : studyLogs) {
for (StudyLogVo studyLog : studyLogs) {
ids.add(studyLog.getId());
String sessionKey = makeSessionKey(studyLog);
// 如果当前 学习记录 map 中已经有过该章节的学习记录信息,更新开始时间(算最小的那条)、结束时间(算最大的那条)、是否完成(完成一次就算完成)
......@@ -223,7 +244,7 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
Date nowDate = new Date();
// 将要保存的新纪录
List<Recorde> newRecords = new ArrayList<>();
List<RecordeEntityVo> newRecords = new ArrayList<>();
// 循环本次新产生的学习记录 recordMap
for (Map.Entry<String, Recorde> entry : currentRecordeMap.entrySet()) {
......@@ -258,7 +279,9 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
oldRecord.setOvered(1);
}
}
newRecords.add(recorde);
RecordeEntityVo entityVo = new RecordeEntityVo();
BeanUtil.copyProperties(recorde,entityVo);
newRecords.add(entityVo);
}
LOG.info("心跳:setp7");
// 插入本次解析到的学习记录
......@@ -282,7 +305,7 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
}
LOG.info("心跳:setp8");
// 发放积分
for (Recorde recorde : newRecords) {
for (RecordeEntityVo recorde : newRecords) {
if (1 == recorde.getOvered()) {
//getPoint(recorde.getChapterId(), recorde.getAccountId());
try {
......@@ -318,15 +341,15 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
*
* @param newRecords
*/
private void sendCourseFinishedMsg(List<Recorde> newRecords) {
Map<String, List<Recorde>> records = new HashMap<>();
private void sendCourseFinishedMsg(List<RecordeEntityVo> newRecords) {
Map<String, List<RecordeEntityVo>> records = new HashMap<>();
String key = null;
for (Recorde r : newRecords) {
for (RecordeEntityVo r : newRecords) {
key = r.getCourseId() + "_" + r.getAccountId();
if (records.containsKey(key)) {
records.get(key).add(r);
} else {
List<Recorde> list = new ArrayList<>();
List<RecordeEntityVo> list = new ArrayList<>();
list.add(r);
records.put(key, list);
}
......@@ -335,19 +358,19 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
Long courseId = null;
Long accountId = null;
Long duration = null;
List<Recorde> accountRecord = null;
List<RecordeEntityVo> accountRecord = null;
Date date = new Date();
// 查询学员是否完成这些课程
Map<String, Boolean> map = courseClient.isCourseFinished(records.keySet(), true);
for (Map.Entry<String, List<Recorde>> entry : records.entrySet()) {
for (Map.Entry<String, List<RecordeEntityVo>> entry : records.entrySet()) {
duration = 0L;
courseId = Long.valueOf(entry.getKey().split("_")[0]);
accountId = Long.valueOf(entry.getKey().split("_")[1]);
accountRecord = entry.getValue();
if (!CollectionUtils.isEmpty(accountRecord)) {
for (Recorde recorde : accountRecord) {
for (RecordeEntityVo recorde : accountRecord) {
duration += recorde.getDuration() == null ? 0 : recorde.getDuration();
}
}
......@@ -539,7 +562,7 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
String sitePointId = pointRedisFeignClients.addPointRedis(pointParamVO);
if (null != sitePointId) {
//发送积分获取消息
amqpTemplate.convertAndSend("course", sitePointId);
// amqpTemplate.convertAndSend("course", sitePointId);
}
}
}
......@@ -598,7 +621,7 @@ public class HttpSetjobBack implements BaseJob, InterruptableJob {
* @param studyLog
* @return
*/
private String makeSessionKey(StudyLog studyLog) {
private String makeSessionKey(StudyLogVo studyLog) {
String sessionkey = "";
if (studyLog.getAccountId() == null) {
sessionkey = studyLog.getSessionId();
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.feign.InformationManageFeignClients;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.site.application.feign.InformationManageFeignClients;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.system.remote.OrganizationClient;
import com.yizhi.system.application.system.remote.OrganizationClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.cache.CacheNamespace;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.system.remote.OrganizationClient;
import com.yizhi.core.application.cache.CacheNamespace;
import com.yizhi.system.application.system.remote.OrganizationClient;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.collections.CollectionUtils;
import org.quartz.InterruptableJob;
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.research.application.feign.ResearchReportClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -8,7 +9,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.application.feign.ResearchReportClient;
import com.yizhi.application.handle.quartz.BaseJob;
public class ResearchIntoTable implements BaseJob, InterruptableJob {
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.application.feign.remote.RemoteJobHandleClient;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.message.application.feign.remote.RemoteJobHandleClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.sign.application.feign.ReportSignClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -8,7 +9,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.application.feign.ReportSignClient;
import com.yizhi.application.handle.quartz.BaseJob;
public class SignInToTable implements BaseJob, InterruptableJob {
......
package com.yizhi.application.handle.quartz.job;
import com.yizhi.training.application.feign.TrainingProjectReportClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
......@@ -8,7 +9,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.application.feign.TrainingProjectReportClient;
import com.yizhi.application.handle.quartz.BaseJob;
public class StatisticsTrainingProjectJob implements BaseJob, InterruptableJob {
......
......@@ -11,20 +11,19 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.yizhi.application.feign.SydPointAcctranClient;
import com.yizhi.application.handle.quartz.BaseJob;
public class SydPointAcctranJob implements BaseJob, InterruptableJob {
private final Logger LOG = LoggerFactory.getLogger(AlbumUnLockJob.class);
private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
/*
@Autowired
private SydPointAcctranClient sydPointAcctranClient;
private SydPointAcctranClient sydPointAcctranClient;*/
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
LOG.info("定时任务开始处理江苏银行苏银豆积分,当前时间:" + format.format(new Date()));
try {
sydPointAcctranClient.AsynchronousPointAcctran(null, null, null, null);
//sydPointAcctranClient.AsynchronousPointAcctran(null, null, null, null);
} catch (Exception e) {
LOG.error("处理江苏银行苏银豆积分异常!!!",e);
}
......
package com.yizhi.application.handle.quartz.job;
import com.alibaba.fastjson.JSONArray;
import com.yizhi.application.cache.RedisCache;
import com.yizhi.application.domain.Chapter;
import com.yizhi.application.domain.Recorde;
import com.yizhi.application.domain.StudyLog;
import com.yizhi.application.event.EventWrapper;
import com.yizhi.application.feign.*;
import com.yizhi.application.handle.quartz.BaseJob;
import com.yizhi.application.publish.CloudEventPublisher;
import com.yizhi.application.task.AbstractTaskHandler;
import com.yizhi.application.task.TaskExecutor;
import com.yizhi.application.util.constant.QueueConstant;
import com.yizhi.application.util.event.TrainingProjectEvent;
import com.yizhi.application.vo.PointParamVO;
import com.yizhi.training.application.feign.TrainingProjectClient;
import com.yizhi.wechat.application.feign.WeiXinClient;
import org.quartz.InterruptableJob;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.UnableToInterruptJobException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.amqp.core.AmqpTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StringUtils;
......
......@@ -7,4 +7,4 @@ spring.cloud.nacos.config.shared-dataids=common-${spring.profiles.active}.proper
spring.cloud.nacos.config.namespace=${spring.profiles.active}
spring.cloud.nacos.config.prefix=${spring.application.name}
spring.cloud.nacos.config.file-extension=properties
spring.cloud.nacos.config.server-addr=192.168.0.203:8848
\ No newline at end of file
spring.cloud.nacos.config.server-addr=10.23.1.183:8848
\ No newline at end of file
......@@ -32,7 +32,7 @@ org.quartz.jobStore.misfireThreshold = 5000
#dataSource
org.quartz.dataSource.qzDS.driver = com.mysql.jdbc.Driver
org.quartz.dataSource.qzDS.driver = com.mysql.cj.jdbc.Driver
org.quartz.dataSource.qzDS.URL = ${spring.datasource.url}
org.quartz.dataSource.qzDS.user = ${spring.datasource.username}
org.quartz.dataSource.qzDS.password = ${spring.datasource.password}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment