Commit 3902d947 by liangkaiping

copy

parent a39db23c
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>cloud-esearch</artifactId>
<groupId>com.yizhi</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<artifactId>cloud-esearch-api</artifactId>
<dependencies>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-common-api</artifactId>
<version>1.0-SNAPSHOT</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<version>4.0.1.RELEASE</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
package com.yizhi.esearch.application.constant;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 15:30
*/
public interface Constant {
String index_Assignment = "assignment";
String INDEX_CASE_LIBRARY = "case_library";
String INDEX_COURSE = "course";
String INDEX_EXAM = "exam";
String INDEX_LECTURER = "lecturer";
String INDEX_MARKETING_COURSE = "course";
String INDEX_RESEARCH = "research";
String INDEX_STUDENT_CASE = "student_case";
String INDEX_TRAINING_PROJECT = "training_project";
String INDEX_ACCOUNT = "account";
String INDEX_ALBUM = "album";
String FIELD_COMPANY_ID = "companyId";
String FIELD_SITE_ID = "siteId";
String FIELD_ORG_ID = "orgId";
String FIELD_DELETED = "deleted";
String FIELD_VISIBLE_RANGE = "visibleRange";
// /**
// * pojo实体的index类型
// */
// Map<Class, String> POJO_INDEX_MAP = new HashMap<>();
// /**
// * pojo实体的type类型
// */
// Map<Class, String> POJO_TYPE_MAP = new HashMap<>();
//
// static {
// POJO_INDEX_MAP.put(TrainingProject.class, "training_project");
// POJO_INDEX_MAP.put(Assignment.class, "assignment");
// POJO_INDEX_MAP.put(Course.class, "course");
// POJO_INDEX_MAP.put(Exam.class, "exam");
// POJO_INDEX_MAP.put(Research.class, "research");
//
// POJO_TYPE_MAP.put(TrainingProject.class, "training_project");
// POJO_TYPE_MAP.put(Assignment.class, "assignment");
// POJO_TYPE_MAP.put(Course.class, "course");
// POJO_TYPE_MAP.put(Exam.class, "exam");
// POJO_TYPE_MAP.put(Research.class, "research");
// }
/**
* 是否删除:否
*/
Integer DELETED_NO = 0;
/**
* 是否删除:是
*/
Integer DELETED_YES = 1;
/**
* 上架状态
*/
Integer STATUS_UP = 1;
/**
* 下架状态
*/
Integer STATUS_DOWN = 2;
/**
* 草稿状态
*/
Integer STATUS_DRAFT = 0;
/**
* 可见范围:全站点可见
*/
Integer VISIBLE_RANGE_PUBLIC = 1;
/**
* 可见范围:指定范围可见
*/
Integer VISIBLE_RANGE_APPOINTED = 2;
/**
* 可见范围指定部门
*/
Integer VISIBLE_RANGE_APPOINTED_ORGANIZATION = 1;
/**
* 可见范围指定用户
*/
Integer VISIBLE_RANGE_APPOINTED_ACCOUNT = 2;
/**
* ---------------------------------------- 可见范围查询type ---------------------------------------------------
*/
String BIZ_TYPE_VISIBLE_ASSIGNMENT = "assignment_auth";
String BIZ_TYPE_VISIBLE_CASE_LIBRARY = "case_library_auth";
String BIZ_TYPE_VISIBLE_COURSE = "course_auth";
String BIZ_TYPE_VISIBLE_EXAM = "exam_auth";
String BIZ_TYPE_VISIBLE_STUDENT_CASE = "student_case_auth";
String BIZ_TYPE_VISIBLE_TRAINING_PROJECT = "training_project_auth";
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "album")
public class EsAlbum {
@Id
private String id;
private String name;
private String keyword;
private String code;
/**
* @see Constant
*/
private Integer status;
/**
* @see Constant
*/
private Integer visibleRange;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
private LocalDateTime createTime;
private LocalDateTime updateTime;
private Long updateTimeLong;
public void setUpdateTime(LocalDateTime updateTime) {
this.updateTime = updateTime;
this.updateTimeLong = updateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getUpdateTimeLong() {
if (updateTime == null) {
return null;
}
return updateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
\ No newline at end of file
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "assignment")
public class EsAssignment {
@ApiModelProperty("id")
@Id
private String id;
@ApiModelProperty("名称")
private String name;
private String keyword;
@ApiModelProperty("编码")
private String code;
@ApiModelProperty("是否支持补交(0:否,1:是")
private Integer amendable;
@ApiModelProperty("结束时间,如果是 null 值,则是长期有效")
private LocalDateTime endTime;
@ApiModelProperty("结束时间戳")
private Long endTimeLong;
private Integer status;
private Integer visibleRange;
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
public void setEndTime(LocalDateTime time) {
this.endTime = time;
if (null != time) {
endTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getEndTimeLong() {
if (null == endTime) {
return null;
}
return endTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "biz_keyword")
public class EsBizKeyword {
/**
* recommendType_siteId_{keyword}
*/
@Id
private String id;
private String recommendType;
/**
* 业务id
*/
@Field
private String bizId;
private String value;
private String kw_value;
/**
* @see Constant
*/
@Field
private Integer status;
@Field
private Integer deleted;
@Field
private String companyId;
@Field
private String siteId;
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import java.time.LocalDateTime;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "biz_visible_range")
public class EsBizVisibleRange {
/**
* concat(id, '_training_project_auth') as id,
*/
@Id
private String id;
@Field(type = FieldType.Keyword)
private String bizType;
/**
* 业务id
*/
@Field(type = FieldType.Keyword)
private String bizId;
/**
* @see Constant
*/
private Integer type;
@Field(type = FieldType.Keyword)
private String relationId;
/**
*
*/
private String name;
@Field
private Integer deleted;
@Field(type = FieldType.Keyword)
private String siteId;
private LocalDateTime indexTime;
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "case_library")
public class EsCaseLibrary {
@Id
private String id;
/**
* title
*/
private String name;
/**
* @see Constant
*/
private Integer status;
/**
* @see Constant
*/
private Integer visibleRange;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
}
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 15:24
*/
@Data
@Document(indexName = "course")
public class EsCourse {
@ApiModelProperty("id")
@Id
private String id;
@ApiModelProperty("名称")
private String name;
private String keyword;
// private String code;
//
// private String authorUnit;
//
// private String description;
//
// private String keyword;
@ApiModelProperty("上架时间")
private LocalDateTime releaseTime;
@ApiModelProperty("上架时间戳")
private Long releaseTimeLong;
private Integer visibleRange;
private Integer status;
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
public void setReleaseTime(LocalDateTime time) {
this.releaseTime = time;
if (time != null) {
this.releaseTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getReleaseTimeLong() {
if (releaseTime == null) {
return null;
}
return releaseTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 15:24
*/
@Data
@Document(indexName = "doc_recommend")
public class EsDocRecommend {
@Id
private String id;
private String recommendId;
private String recommendType;
private String name;
private String code;
private String keyword;
/**
* @see Constant
*/
private Integer status;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
}
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 20:42
*/
@Data
@Document(indexName = "exam")
public class EsExam {
@ApiModelProperty("id")
@Id
private String id;
@ApiModelProperty("编码")
private String code;
@ApiModelProperty("名称")
private String name;
private String keyword;
private String description;
@ApiModelProperty("开始时间")
private LocalDateTime startTime;
@ApiModelProperty("开始时间戳")
private Long startTimeLong;
@ApiModelProperty("结束时间")
private LocalDateTime endTime;
@ApiModelProperty("结束时间戳")
private Long endTimeLong;
private Integer status;
private Integer visibleRange;
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
public void setStartTime(LocalDateTime time) {
this.startTime = time;
if (null != time) {
this.startTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public void setEndTime(LocalDateTime time) {
this.endTime = time;
if (null != time) {
this.endTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getStartTimeLong() {
if (null == startTime) {
return null;
}
return startTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getEndTimeLong() {
if (null == endTime) {
return null;
}
return endTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "lecturer")
public class EsLecturer {
@Id
private String id;
private String username;
/**
* 所在平台用户id
*/
private String accountId;
private String lecturerName;
private String keyword;
private String relationIds;
private String classifyId;
private String source;
private String avatar;
private String image;
private String title;
/**
* @see Constant
*/
private Integer status;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @ClassName Live
* @Description TODO
* @Author shengchenglong
* @DATE 2019-12-26 15:39
* @Version 1.0
*/
@Data
@Document(indexName = "live")
public class EsLive {
@Id
private String id;
/**
* title
*/
private String name;
/**
* @see Constant
*/
private Integer status;
private String code;
private String keyword;
/**
* @see Constant
*/
private Integer visibleRange;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
}
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 15:24
*/
@Data
@Document(indexName = "course")
public class EsMarketingCourse {
@ApiModelProperty("id")
@Id
private String id;
@ApiModelProperty("名称")
private String name;
// private String code;
//
// private String authorUnit;
//
// private String description;
//
// private String keyword;
private String keyword;
@ApiModelProperty("上架时间")
private LocalDateTime releaseTime;
@ApiModelProperty("上架时间戳")
private Long releaseTimeLong;
private Integer visibleRange;
private Integer status;
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
public void setReleaseTime(LocalDateTime time) {
this.releaseTime = time;
if (time != null) {
this.releaseTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getReleaseTimeLong() {
if (releaseTime == null) {
return null;
}
return releaseTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @ClassName OfflineCourse
* @Description TODO
* @Author shengchenglong
* @DATE 2019-05-21 15:55
* @Version 1.0
*/
@Data
@Document(indexName = "offline_course")
public class EsOfflineCourse {
@Id
private String id;
private String name;
private String keyword;
private String code;
/**
* @see Constant
*/
private Integer status;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
}
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:46
*/
@Data
@Document(indexName = "research")
public class EsResearch {
@ApiModelProperty("id")
@Id
private String id;
@ApiModelProperty("编码")
private String code;
@ApiModelProperty("名称")
private String name;
private String keyword;
@ApiModelProperty("开始时间")
private LocalDateTime startTime;
@ApiModelProperty("开始时间戳")
private Long startTimeLong;
@ApiModelProperty("结束时间")
private LocalDateTime endTime;
@ApiModelProperty("结束时间戳")
private Long endTimeLong;
private Integer status;
private Integer visibleRange;
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
public void setStartTime(LocalDateTime time) {
this.startTime = time;
if (null != time) {
this.startTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public void setEndTime(LocalDateTime time) {
this.endTime = time;
if (null != time) {
this.endTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getStartTimeLong() {
if (startTime == null) {
return null;
}
return startTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getEndTimeLong() {
if (endTime == null) {
return null;
}
return endTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.biz;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "student_case")
public class EsStudentCase {
@Id
private String id;
/**
* title
*/
private String name;
/**
* @see Constant
*/
private Integer status;
/**
* @see Constant
*/
private Integer visibleRange;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
}
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 16:18
*/
@Data
@Document(indexName = "training_project")
public class EsTrainingProject {
@ApiModelProperty("id")
@Id
private String id;
@ApiModelProperty("名称")
private String name;
// private String description;
private String keyword;
@ApiModelProperty("开始时间")
private LocalDateTime startTime;
@ApiModelProperty("开始时间戳")
private Long startTimeLong;
@ApiModelProperty("结束时间")
private LocalDateTime endTime;
@ApiModelProperty("结束时间戳")
private Long endTimeLong;
@ApiModelProperty("上架时间")
private LocalDateTime releaseTime;
@ApiModelProperty("上架时间戳")
private Long releaseTimeLong;
private Integer status;
private Integer enableEnroll;
/**
* 项目是否0:指定学员可见,1平台用户可见(创建人管理权限范围)
*/
private Integer visibleRange;
private Integer deleted;
private String companyId;
private String siteId;
private String orgId;
private String image;
public void setStartTime(LocalDateTime time) {
this.startTime = time;
if (null != time) {
this.startTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public void setEndTime(LocalDateTime time) {
this.endTime = time;
if (null != time) {
this.endTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public void setReleaseTime(LocalDateTime time) {
this.releaseTime = time;
if (time != null) {
this.releaseTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getStartTimeLong() {
if (startTime == null) {
return null;
}
return startTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getEndTimeLong() {
if (endTime == null) {
return null;
}
return endTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getReleaseTimeLong() {
if (releaseTime == null) {
return null;
}
return releaseTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.biz;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 16:18
*/
@Data
@Document(indexName = "training_project_enroll_record")
public class EsTrainingProjectEnrollRecord {
@Id
private String id;
@Field(type = FieldType.Keyword)
private String trainingProjectId;
@Field(type = FieldType.Keyword)
private String siteId;
@Field(type = FieldType.Keyword)
private String enrollId;
@Field(type = FieldType.Keyword)
private String accountId;
private LocalDateTime startTime;
private Long startTimeLong;
private LocalDateTime endTime;
private Long endTimeLong;
private LocalDateTime joinTime;
private Long joinTimeLong;
public void setStartTime(LocalDateTime time) {
this.startTime = time;
if (null != time) {
this.startTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public void setEndTime(LocalDateTime time) {
this.endTime = time;
if (null != time) {
this.endTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public void setJoinTime(LocalDateTime time) {
this.joinTime = time;
if (time != null) {
this.joinTimeLong = time.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
public Long getStartTimeLong() {
if (startTime == null) {
return null;
}
return startTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getEndTimeLong() {
if (endTime == null) {
return null;
}
return endTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
public Long getJoinTimeLong() {
if (joinTime == null) {
return null;
}
return joinTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
}
}
package com.yizhi.esearch.application.pojo.system;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @ClassName Account
* @Description 平台用户
* @Author chengchenglong
* @DATE 2019-04-17 14:57
* @Version 1.0
*/
@Data
@Document(indexName = "account")
@ApiModel(value = "用户")
public class EsAccount {
@ApiModelProperty(value = "id")
private String id;
@ApiModelProperty(value = "账号名称")
private String name;
@ApiModelProperty(value = "性别(‘M’:男,'F':女)")
private String sex;
@ApiModelProperty(value = "账号描述")
private String description;
@ApiModelProperty(value = "全名")
private String fullName;
@ApiModelProperty(value = "工号")
private String workNum;
@ApiModelProperty(value = "座机")
private String telephone;
@ApiModelProperty(value = "手机号")
private String mobile;
@ApiModelProperty(value = "邮箱地址")
private String email;
@ApiModelProperty(value = "微信")
private String wechat;
@ApiModelProperty(value = "职位")
private String position;
@ApiModelProperty(value = "是否开通")
private Integer enabled;
@ApiModelProperty(value = "是否锁定")
private Integer locked;
@ApiModelProperty(value = "部门id")
private Long orgId;
@ApiModelProperty(value = "部门名称")
private String orgName;
private Integer orgLeftIndex;
private Integer orgRightIndex;
@ApiModelProperty(value = "租户(公司)id")
private String companyId;
@ApiModelProperty(value = "公司名称")
private String companyName;
}
\ No newline at end of file
package com.yizhi.esearch.application.pojo.system;
import com.yizhi.esearch.application.constant.Constant;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
/**
* @ClassName Organization
* @Description 部门
* @Author chengchenglong
* @DATE 2019-04-17 20:18
* @Version 1.0
*/
@Data
@Document(indexName = "organization")
public class EsOrganization {
@Id
private String id;
private String name;
private String parentId;
// private String parentIdLayer;
//
// private String parentIdLayerName;
//
// private String childIdLayer;
//
// private String childIdLayerName;
private Integer layer;
/**
* @see Constant
*/
private Integer deleted;
private String companyId;
private Integer leftIndex;
private Integer rightIndex;
}
package com.yizhi.esearch.application.query;
/**
* @ClassName CIndexQuery
* @Description TODO
* @Author shengchenglong
* @DATE 2019-12-30 19:26
* @Version 1.0
*/
public class CIndexQuery {
}
package com.yizhi.esearch.application.vo;
import com.yizhi.esearch.application.pojo.biz.*;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/27 10:32
*/
@Data
@ApiModel("全局搜索返回 vo")
public class GlobalSearchVo {
@ApiModelProperty("课程")
private List<EsCourse> courses;
@ApiModelProperty("营销课程")
private List<EsMarketingCourse> marketingCourses;
@ApiModelProperty("培训项目")
private List<EsTrainingProject> trainingProjects;
@ApiModelProperty("考试")
private List<EsExam> exams;
@ApiModelProperty("作业")
private List<EsAssignment> assignments;
@ApiModelProperty("调研")
private List<EsResearch> research;
@ApiModelProperty("讲师")
private List<EsLecturer> lecturers;
@ApiModelProperty("案例库")
private List<EsStudentCase> studentCases;
@ApiModelProperty("案例库")
private List<EsAlbum> albums;
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>cloud-esearch</artifactId>
<groupId>com.yizhi</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-esearch-service</artifactId>
<dependencies>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-core</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<version>4.0.1.RELEASE</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.yizhi</groupId>-->
<!-- <artifactId>cloud-orm</artifactId>-->
<!-- <version>1.0-SNAPSHOT</version>-->
<!-- -->
<!-- </dependency>-->
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-util</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-esearch-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<!-- https://mvnrepository.com/artifact/cglib/cglib -->
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
</dependency>
<dependency>
<groupId>com.yizhi</groupId>
<artifactId>cloud-drools-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
<plugins>
<!-- deploy 时跳过该模块 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
package com.yizhi.esearch.application;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories;
import org.springframework.scheduling.annotation.EnableScheduling;
@EnableDiscoveryClient
@SpringBootApplication
@EnableElasticsearchRepositories(basePackages = {"com.yizhi.esearch.application.repository"})
@EnableScheduling
@ComponentScan({"com.yizhi"})
public class CloudEsearchApplication {
public static void main(String[] args) {
SpringApplication.run(CloudEsearchApplication.class, args);
}
}
//package com.fulan.esearch.application.config;
//
//import cn.hutool.core.date.DateTime;
//import cn.hutool.core.date.DateUtil;
//import org.springframework.beans.factory.InitializingBean;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.core.convert.TypeDescriptor;
//import org.springframework.core.convert.converter.GenericConverter;
//import org.springframework.core.convert.support.GenericConversionService;
//import org.springframework.stereotype.Component;
//
//import java.util.Collections;
//import java.util.Date;
//import java.util.Set;
//
///**
// * @ClassName DateGenericConverter
// * @Description TODO
// * @Author shengchenglong
// * @DATE 2020/7/19 4:31 下午
// * @Version 1.0
// */
//@Component
//public class DateGenericConverter implements GenericConverter, InitializingBean {
//
// private static final String PARTTERN = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX";
//
// @Autowired
// private GenericConversionService genericConversionService;
//
// @Override
// public Set<ConvertiblePair> getConvertibleTypes() {
// return Collections.singleton(new ConvertiblePair(String.class, Date.class));
// }
//
// @Override
// public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
// DateTime dateTime = DateUtil.parse(String.valueOf(source), PARTTERN);
// return dateTime.toJdkDate();
// }
//
//
// @Override
// public void afterPropertiesSet() throws Exception {
// genericConversionService.addConverter(this);
// }
//}
package com.yizhi.esearch.application.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
* @Author: shengchenglong
* @Date: 2018/3/6 14:30
*/
@Configuration
@EnableSwagger2
public class SwaggerConfig {
@Bean
public Docket createRestApi() {
return new Docket(DocumentationType.SWAGGER_2)
.groupName("搜索")
.apiInfo(apiInfo())
.select()
.apis(RequestHandlerSelectors.basePackage("com.fulan.application"))
.paths(PathSelectors.any())
.build();
}
private ApiInfo apiInfo() {
return new ApiInfoBuilder()
.title("搜索")
//版本
.version("1.0")
.build();
}
// @Override
// public void addResourceHandlers(ResourceHandlerRegistry registry) {
// registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
// registry.addResourceHandler("docs.html").addResourceLocations("classpath:/META-INF/resources/");
// registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
// }
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsAlbum;
import com.yizhi.esearch.application.pojo.biz.EsAssignment;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:54
*/
@Api(tags = "作业搜索")
@RestController
@RequestMapping("/search/albums")
public class AlbumController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "作业搜索", response = EsAssignment.class)
public Response<List<EsAlbum>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsAlbum.class, pageNo, pageSize, "indexTime", false, elasticsearchRestTemplate);
Page<EsAlbum> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsAlbum.class, IndexCoordinates.of(Constant.INDEX_ALBUM));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsAssignment;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:54
*/
@Api(tags = "作业搜索")
@RestController
@RequestMapping("/search/assignment")
public class AssignmentController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "作业搜索", response = EsAssignment.class)
public Response<List<EsAssignment>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsAssignment.class, pageNo, pageSize, "endTime", false, elasticsearchRestTemplate);
Page<EsAssignment> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsAssignment.class, IndexCoordinates.of(Constant.index_Assignment));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsCourse;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:06
*/
@Api(tags = "课程搜索")
@RestController
@RequestMapping("/search/course")
public class CourseController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "课程搜索", response = EsCourse.class)
public Response<List<EsCourse>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsCourse.class, pageNo, pageSize, "releaseTime", false, elasticsearchRestTemplate);
Page<EsCourse> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsCourse.class, IndexCoordinates.of(Constant.INDEX_COURSE));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsExam;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/27 10:00
*/
@Api(tags = "考试搜索")
@RestController
@RequestMapping("/search/exam")
public class ExamController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "考试搜索", response = EsExam.class)
public Response<List<EsExam>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsExam.class, pageNo, pageSize, "endTime", false, elasticsearchRestTemplate);
Page<EsExam> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsExam.class, IndexCoordinates.of(Constant.INDEX_EXAM));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.pojo.biz.*;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.vo.GlobalSearchVo;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @Author: shengchenglong
* @Date: 2018/12/27 10:08
*/
@Api(tags = "全局搜索")
@RestController
@RequestMapping("/search/global")
public class GlobalSearchController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@Autowired
private LecturerController lecturerController;
@GetMapping("/{keyword}")
@ApiOperation(value = "全局搜索", response = GlobalSearchVo.class)
public Response<GlobalSearchVo> globalSearch(
@ApiParam(name = "keyword", value = "搜索关键字") @PathVariable("keyword") String keyword) {
GlobalSearchVo vo = new GlobalSearchVo();
vo.setAssignments(QueryHelper.globalSearch(keyword, EsAssignment.class, elasticsearchRestTemplate));
vo.setCourses(QueryHelper.globalSearch(keyword, EsCourse.class, elasticsearchRestTemplate));
vo.setMarketingCourses(QueryHelper.globalSearch(keyword, EsMarketingCourse.class, elasticsearchRestTemplate));
vo.setExams(QueryHelper.globalSearch(keyword, EsExam.class, elasticsearchRestTemplate));
vo.setResearch(QueryHelper.globalSearch(keyword, EsResearch.class, elasticsearchRestTemplate));
vo.setTrainingProjects(QueryHelper.globalSearch(keyword, EsTrainingProject.class, elasticsearchRestTemplate));
vo.setStudentCases(QueryHelper.globalSearch(keyword, EsStudentCase.class, elasticsearchRestTemplate));
vo.setAlbums(QueryHelper.globalSearch(keyword, EsAlbum.class, elasticsearchRestTemplate));
Page page = (Page) lecturerController.name(keyword, 1, 2).getData();
if (page != null) {
vo.setLecturers((page.getContent()));
}
return Response.ok(vo);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.core.application.context.ContextHolder;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsCourse;
import com.yizhi.esearch.application.pojo.biz.EsLecturer;
import com.yizhi.esearch.application.pojo.biz.EsOfflineCourse;
import com.yizhi.esearch.application.pojo.biz.EsTrainingProject;
import com.yizhi.esearch.application.repository.CourseRepository;
import com.yizhi.esearch.application.repository.LecturerRepository;
import com.yizhi.esearch.application.repository.OfflineCourseRepository;
import com.yizhi.esearch.application.repository.TrainingProjectRepository;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.document.Document;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
import org.springframework.data.elasticsearch.core.query.UpdateResponse;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:54
*/
@Api(tags = "讲师搜索")
@RestController
@RequestMapping("/search/lecturer")
public class LecturerController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@Autowired
private CourseRepository courseRepository;
@Autowired
private TrainingProjectRepository trainingProjectRepository;
@Autowired
private OfflineCourseRepository offlineCourseRepository;
@Autowired
private LecturerRepository lecturerRepository;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "讲师搜索", response = EsLecturer.class)
public Response<EsLecturer> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
return list(keyword, null, pageNo, pageSize);
}
@GetMapping("/list")
@ApiOperation(value = "讲师搜索", response = EsLecturer.class)
public Response<EsLecturer> list(
@ApiParam(name = "keyword", value = "搜索关键字") @RequestParam(name = "keyword", required = false) String keyword,
@ApiParam(name = "classifyId", value = "讲师分类") @RequestParam(name = "classifyId", required = false) String classifyId,
@ApiParam(name = "pageNo", value = "第几页") @RequestParam("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @RequestParam("pageSize") Integer pageSize) {
String siteId = String.valueOf(ContextHolder.get().getSiteId());
QueryBuilder qbContext = QueryHelper.getCommonQueryBuilderForContext();
QueryBuilder qbSearch = null;
if (!StringUtils.isEmpty(keyword)) {
String relationIds = "";
// 查出课程的
List<EsCourse> courses = courseRepository.findBySiteIdEqualsAndNameAndStatusAndDeleted(siteId, keyword, Constant.STATUS_UP, Constant.DELETED_NO);
if (!CollectionUtils.isEmpty(courses)) {
for (EsCourse c : courses) {
relationIds = relationIds.concat(c.getId().concat(" "));
}
}
// 查出培训项目的
List<EsTrainingProject> trainingProjects = trainingProjectRepository.findBySiteIdEqualsAndNameAndStatusAndDeleted(siteId, keyword, Constant.STATUS_UP, Constant.DELETED_NO);
if (!CollectionUtils.isEmpty(trainingProjects)) {
for (EsTrainingProject tp : trainingProjects) {
relationIds = relationIds.concat(tp.getId().concat(" "));
}
}
// 查出线下课程的
List<EsOfflineCourse> offlineCourses = offlineCourseRepository.findBySiteIdEqualsAndNameAndStatusAndDeleted(siteId, keyword, Constant.STATUS_UP, Constant.DELETED_NO);
if (!CollectionUtils.isEmpty(offlineCourses)) {
for (EsOfflineCourse oc : offlineCourses) {
relationIds = relationIds.concat(oc.getId().concat(" "));
}
}
QueryBuilder qbForRelationIds = QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("relationIds", relationIds));
//支持模糊查询,以*为前缀会影响查询性能。
QueryBuilder qbForLecturerName = QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("lecturerName", keyword));
qbSearch = QueryBuilders.boolQuery().should(qbForRelationIds).should(qbForLecturerName);
}
if (!StringUtils.isEmpty(classifyId)) {
String[] classifyIds = classifyId.split(",");
BoolQueryBuilder qb = QueryBuilders.boolQuery();
for (String id : classifyIds) {
qb.should(QueryBuilders.matchQuery("classifyId", id));
}
if (qbSearch != null) {
qbSearch = QueryBuilders.boolQuery().must(qbSearch).must(qb);
} else {
qbSearch = QueryBuilders.boolQuery().must(qb);
}
}
Query searchQuery = new NativeSearchQueryBuilder()
// .addAggregation(getTimeAggreation())
.withQuery(qbSearch)
.withFilter(qbContext)
// page从0开始
.withPageable(PageRequest.of(pageNo - 1, pageSize))
// notice: 先按评分倒排,再按业务要求排序
.withSort(SortBuilders.scoreSort().order(SortOrder.DESC))
.build();
Page<EsLecturer> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsLecturer.class, IndexCoordinates.of(Constant.INDEX_LECTURER));
return Response.ok(page);
}
/**
* 更新启用禁用状态
*
* @param map
* @return
*/
@PostMapping("/enableStatus/update")
public boolean updateEnableStatus(@RequestBody Map<String, Object> map) {
Long id = Long.valueOf(String.valueOf(map.get("id")));
boolean enabled = Boolean.valueOf(String.valueOf(map.get("enabled")));
Optional<EsLecturer> optional = lecturerRepository.findById(id);
if (!optional.isPresent()) {
return false;
}
EsLecturer lecturer = optional.get();
HashMap<String, Object> params = new HashMap<>();
params.put("status", enabled ? Constant.STATUS_UP : Constant.STATUS_DOWN);
UpdateQuery updateQuery = UpdateQuery.builder(lecturer.getId())
.withDocument(Document.from(params))
.build();
UpdateResponse updateResponse = elasticsearchRestTemplate.update(updateQuery, IndexCoordinates.of(Constant.INDEX_LECTURER));
return UpdateResponse.Result.UPDATED.equals(updateResponse.getResult())
// 未改变值也算更新成功
|| UpdateResponse.Result.NOOP.equals(updateResponse.getResult());
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsMarketingCourse;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:06
*/
@Api(tags = "营销课程搜索")
@RestController
@RequestMapping("/search/marketingCourse")
public class MarketingCourseController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "课程搜索", response = EsMarketingCourse.class)
public Response<List<EsMarketingCourse>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsMarketingCourse.class, pageNo, pageSize, "releaseTime", false, elasticsearchRestTemplate);
Page<EsMarketingCourse> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsMarketingCourse.class, IndexCoordinates.of(Constant.INDEX_MARKETING_COURSE));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsResearch;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:06
*/
@Api(tags = "调研搜索")
@RestController
@RequestMapping("/search/research")
public class ResearchController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "调研搜索", response = EsResearch.class)
public Response<List<EsResearch>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsResearch.class, pageNo, pageSize, "endTime", true, elasticsearchRestTemplate);
Page<EsResearch> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsResearch.class, IndexCoordinates.of(Constant.INDEX_RESEARCH));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsExam;
import com.yizhi.esearch.application.pojo.biz.EsStudentCase;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/27 10:00
*/
@Api(tags = "案例库搜索")
@RestController
@RequestMapping("/search/studentCase")
public class StudentCaseController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "案例库搜索", response = EsExam.class)
public Response<List<EsStudentCase>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsStudentCase.class, pageNo, pageSize, "status", false, elasticsearchRestTemplate);
Page<EsStudentCase> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsStudentCase.class, IndexCoordinates.of(Constant.INDEX_STUDENT_CASE));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsTrainingProject;
import com.yizhi.esearch.application.util.QueryHelper;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 14:39
*/
@Api(tags = "培训项目搜索")
@RestController
@RequestMapping("/search/trainingProject")
public class TrainingProjectController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@GetMapping("name/{keyword}/{pageNo}/{pageSize}")
@ApiOperation(value = "培训项目搜索", response = EsTrainingProject.class)
public Response<List<EsTrainingProject>> name(
@ApiParam(name = "keyword", value = "搜索关键词") @PathVariable("keyword") String keyword,
@ApiParam(name = "pageNo", value = "第几页") @PathVariable("pageNo") Integer pageNo,
@ApiParam(name = "pageSize", value = "每页几条") @PathVariable("pageSize") Integer pageSize) {
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, EsTrainingProject.class, pageNo, pageSize, "releaseTime", false, elasticsearchRestTemplate);
Page<EsTrainingProject> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsTrainingProject.class, IndexCoordinates.of(Constant.INDEX_TRAINING_PROJECT));
return ResponseHelper.ok(page);
}
}
package com.yizhi.esearch.application.controller.api;
import com.yizhi.core.application.context.ContextHolder;
import com.yizhi.core.application.context.RequestContext;
import com.yizhi.esearch.application.pojo.biz.WebErrorLog;
import com.yizhi.esearch.application.repository.WebErrorLogRepository;
import com.yizhi.esearch.application.util.ResponseHelper;
import com.yizhi.util.application.domain.Response;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Date;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:54
*/
@Api(tags = "前端错误日志")
@RestController
@RequestMapping("/webErrorLog")
public class WebErrorLogController {
@Autowired
private WebErrorLogRepository webErrorLogRepository;
@PostMapping("/save")
@ApiOperation(value = "保存前端错误日志", response = String.class)
public Response saveWebErrorLog(@ApiParam(name = "webErrorLog", value = "保存错误日志")
@RequestBody WebErrorLog webErrorLog) {
RequestContext context = ContextHolder.get();
webErrorLog.setCompanyId(context.getCompanyId().toString());
webErrorLog.setSiteId(context.getSiteId().toString());
webErrorLog.setOrgId(context.getOrgId().toString());
webErrorLog.setCreateById(context.getAccountId().toString());
webErrorLog.setCreateByName(context.getAccountName());
webErrorLog.setCreateTime(new Date());
WebErrorLog save = webErrorLogRepository.save(webErrorLog);
if (save != null) {
return ResponseHelper.ok("保存成功!");
}
return ResponseHelper.ok("保存失败!");
}
}
package com.yizhi.esearch.application.controller.remote;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.yizhi.core.application.context.ContextHolder;
import com.yizhi.core.application.context.RequestContext;
import com.yizhi.esearch.application.pojo.biz.EsBizKeyword;
import com.yizhi.esearch.application.repository.BizKeywordRepository;
import com.yizhi.esearch.application.util.QueryHelper;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @ClassName RecommendController
* @Description TODO
* @Author shengchenglong
* @DATE 2019-12-30 14:14
* @Version 1.0
*/
@Slf4j
@RestController
@RequestMapping("/recommend/bizKeyword")
public class BizKeywordController {
@Autowired
private BizKeywordRepository bizKeywordRepository;
/**
* taskId
* taskFieldValue
* taskParamsType
*/
@PostMapping("keywordPage")
public Map<String, String> keywordPage(@RequestBody Map<String, String> map) {
int pageNo = Integer.parseInt(map.get("pageNo"));
int pageSize = Integer.parseInt(map.get("pageSize"));
String contextJson = map.get("context");
RequestContext context = JSON.parseObject(contextJson, RequestContext.class);
ContextHolder.set(context);
BoolQueryBuilder qb = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery("recommendType", map.get("taskType").toLowerCase()));
if (StringUtils.isNotEmpty(map.get("taskParamsValue"))) {
qb.must(QueryBuilders.matchQuery("value", map.get("taskParamsValue")));
}
Query searchQuery = new NativeSearchQueryBuilder()
.withQuery(qb)
.withFilter(QueryHelper.getCommonQueryBuilderForContext())
.withPageable(PageRequest.of(pageNo - 1, pageSize))
.build();
Page page = bizKeywordRepository.search(searchQuery);
List<EsBizKeyword> list = page.getContent();
Map<String, String> resMap = new HashMap<>();
resMap.put("total", String.valueOf(page.getTotalElements()));
if (!CollectionUtils.isEmpty(list)) {
JSONArray jsonArray = new JSONArray();
list.forEach(item -> {
JSONObject jsonObject = new JSONObject();
jsonObject.put("taskId", item.getBizId());
jsonObject.put("taskFieldValue", item.getValue());
jsonObject.put("taskParamsType", "keyword");
jsonArray.add(jsonObject);
});
resMap.put("record", jsonArray.toJSONString());
}
log.info(JSON.toJSONString(resMap));
return resMap;
}
}
package com.yizhi.esearch.application.controller.remote;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.system.EsOrganization;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.GetQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.List;
/**
* @ClassName OrganizationController
* @Description TODO
* @Author chengchenglong
* @DATE 2019-04-18 16:47
* @Version 1.0
*/
@RestController
@RequestMapping("/org")
public class OrganizationController {
@Autowired
private ElasticsearchRestTemplate elasticsearchTemplate;
/**
* 查询父级部门的名称集合
* @param orgId
* @return
*/
@GetMapping("/parent/names")
public List<String> getParentNames(@RequestParam("orgId") Long orgId) {
GetQuery getQuery = new GetQuery(String.valueOf(orgId));
EsOrganization org = elasticsearchTemplate.queryForObject(getQuery, EsOrganization.class);
if (org != null) {
int leftIndex = org.getLeftIndex();
int rightIndex = org.getRightIndex();
QueryBuilder leftIndexQB = QueryBuilders.rangeQuery("leftIndex").lte(leftIndex);
QueryBuilder rightIndexQB = QueryBuilders.rangeQuery("rightIndex").gte(rightIndex);
QueryBuilder queryBuilder = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery(Constant.FIELD_COMPANY_ID, org.getCompanyId()))
.must(QueryBuilders.termQuery(Constant.FIELD_DELETED, Constant.DELETED_NO))
.must(leftIndexQB)
.must(rightIndexQB);
Query searchQuery = new NativeSearchQueryBuilder()
.withQuery(queryBuilder)
.withSort(SortBuilders.fieldSort("leftIndex").order(SortOrder.ASC))
// notice: 这里指定字段则只会查询相应的字段值,其余字段为null
.withFields("name")
.build();
List<EsOrganization> parents = elasticsearchTemplate.queryForList(searchQuery, EsOrganization.class, IndexCoordinates.of("organization"));
if (!CollectionUtils.isEmpty(parents)) {
List<String> names = new ArrayList<>(parents.size() + 1);
parents.forEach(item -> names.add(item.getName()));
names.add(org.getName());
return names;
}
}
return null;
}
}
package com.yizhi.esearch.application.controller.remote;
import com.alibaba.fastjson.JSON;
import com.yizhi.core.application.context.ContextHolder;
import com.yizhi.core.application.context.RequestContext;
import com.yizhi.drools.application.constant.DroolsConstants;
import com.yizhi.drools.application.vo.drools.*;
import com.yizhi.drools.application.vo.manage.RuleItemVo;
import com.yizhi.drools.application.vo.manage.RuleVo;
import com.yizhi.esearch.application.eum.RecommendAccountRelationEnum;
import com.yizhi.esearch.application.eum.RecommendBusinessRelationEnum;
import com.yizhi.esearch.application.eum.RuleAndOrQbBuilder;
import com.yizhi.esearch.application.eum.RuleItemAndOrQbBuilder;
import com.yizhi.esearch.application.pojo.biz.EsDocRecommend;
import com.yizhi.esearch.application.pojo.system.EsAccount;
import com.yizhi.esearch.application.util.QueryHelper;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.*;
/**
* @ClassName RecommendController
* @Description TODO
* @Author shengchenglong
* @DATE 2019-12-30 14:14
* @Version 1.0
*/
@Slf4j
@RestController
@RequestMapping("/recommend/")
public class RecommendController {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@PostMapping("/recommend4Manage")
public BusinessVo4PortalResponse recommend4Manage(@RequestBody BusinessVo4PortalParam param) {
log.info(" ------------------------- " + param);
System.out.println(JSON.toJSONString(param));
ContextHolder.set(param.getRequestContext());
Map<String, QueryBuilder> business_qb_map = build_index_qb_map(param);
return queryBusinessResult(param, business_qb_map);
}
@PostMapping("/recommend4Portal")
public BusinessVo4PortalResponse recommend4Portal(@RequestBody BusinessVo4PortalParam param) {
log.info("protal -- param: " + JSON.toJSONString(param));
RequestContext context = param.getRequestContext();
// 如果没有上下文,返回空
if (context == null) {
return new BusinessVo4PortalResponse();
}
ContextHolder.set(context);
Map<String, QueryBuilder> business_qb_map = build_index_qb_map(param);
// 没有策略
if (CollectionUtils.isEmpty(business_qb_map)) {
return new BusinessVo4PortalResponse();
}
return queryBusinessResult(param, business_qb_map);
}
@PostMapping("/recommend4ManageAccount")
public AccountVo4ManageResponse recommend4ManageAccount(@RequestBody AccountVo4ManageParam param) {
BoolQueryBuilder queryBuilder = buildQueryBuilder4Account(param.getRuleVos());
if (queryBuilder == null) {
return null;
}
queryBuilder.must(QueryBuilders.termQuery("companyId", param.getRequestContext().getCompanyId()));
if (!StringUtils.isEmpty(param.getKeyword())) {
queryBuilder.must(QueryBuilders.boolQuery()
.should(QueryBuilders.matchQuery("name", param.getKeyword()))
.should(QueryBuilders.matchQuery("fullName", param.getKeyword()))
.should(QueryBuilders.matchQuery("mobile", param.getKeyword())));
}
Query searchQuery = new NativeSearchQueryBuilder()
.withPageable(PageRequest.of(param.getPageNo() - 1, param.getPageSize()))
.withQuery(queryBuilder)
.build();
log.info("管理端 account - " + queryBuilder);
AccountVo4ManageResponse response = new AccountVo4ManageResponse();
Page<EsAccount> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsAccount.class, IndexCoordinates.of("account"));
List<AccountVo4Manage> accountVo4ManageList = new ArrayList<>();
List<EsAccount> accountList = page.getContent();
if (!CollectionUtils.isEmpty(accountList)) {
accountVo4ManageList = new ArrayList<>(accountList.size());
AccountVo4Manage accountVo4Manage;
EsAccount account;
for (int i = 0; i < accountList.size(); i++) {
accountVo4Manage = new AccountVo4Manage();
account = accountList.get(i);
accountVo4Manage.setFullName(account.getFullName());
accountVo4Manage.setId(Long.valueOf(account.getId()));
accountVo4Manage.setMobile(account.getMobile());
accountVo4Manage.setName(account.getName());
accountVo4Manage.setNum(i + 1);
accountVo4Manage.setPosition(account.getPosition());
accountVo4ManageList.add(accountVo4Manage);
}
}
response.setList(accountVo4ManageList);
response.setPageNo(param.getPageNo());
response.setPageSize(param.getPageSize());
response.setTotal(Math.toIntExact(page.getTotalElements()));
return response;
}
/**
* 组装 business/account key - qb map
*
* @param param
* @return
*/
private Map<String, QueryBuilder> build_index_qb_map(BusinessVo4PortalParam param) {
List<RuleVo> ruleVos = param.getRuleVos();
Map<Long, List<RuleVo>> map = param.getStrategyRuleVoMap();
Map<String, QueryBuilder> business_qb_map_rule;
if (CollectionUtils.isEmpty(map)) {
business_qb_map_rule = buildQueryBuilderMap4Manage(param, ruleVos);
} else {
business_qb_map_rule = buildQueryBuilderMap4Portal(param, map);
}
return business_qb_map_rule;
}
/**
* 根据组装好的 qb map 查询业务数据
*
* @param param
* @param business_qb_map
* @return
*/
private BusinessVo4PortalResponse queryBusinessResult(BusinessVo4PortalParam param, Map<String, QueryBuilder> business_qb_map) {
// 没有策略
if (CollectionUtils.isEmpty(business_qb_map)) {
return new BusinessVo4PortalResponse();
}
business_qb_map.remove(DroolsConstants.BUSINESS_NAME_USER);
BusinessVo4PortalResponse response = new BusinessVo4PortalResponse();
response.setPageNo(param.getPageNo());
response.setPageSize(param.getPageSize());
response.setTotal(0);
BoolQueryBuilder BizQb = QueryBuilders.boolQuery();
business_qb_map.forEach((key, item) -> {
BoolQueryBuilder currentBizQb = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery("recommendType", key.toLowerCase()))
.must(item);
BizQb.should(currentBizQb);
});
BoolQueryBuilder qb = QueryBuilders.boolQuery();
qb.must(BizQb);
// 这里如果keywords里面有值,则是管理端的搜索,需要加上过滤
if (!StringUtils.isEmpty(param.getKeyword())) {
QueryBuilder keywordsQb = QueryBuilders.boolQuery()
.should(QueryBuilders.matchQuery("name", param.getKeyword().toLowerCase()))
.should(QueryBuilders.matchQuery("code", param.getKeyword().toLowerCase()));
qb.must(keywordsQb);
}
Query searchQuery = new NativeSearchQueryBuilder()
.withQuery(qb)
.withFilter(QueryHelper.getCommonQueryBuilderForContext())
// page从0开始
.withPageable(PageRequest.of(param.getPageNo() - 1, param.getPageSize()))
// notice: 先按评分倒排,再按业务要求排序
.withSort(SortBuilders.scoreSort().order(SortOrder.DESC))
// .withSort(isAsc ? SortBuilders.fieldSort(sortFiled).order(SortOrder.ASC) : SortBuilders.fieldSort(sortFiled).order(SortOrder.DESC))
.build();
Page<EsDocRecommend> page = elasticsearchRestTemplate.queryForPage(searchQuery, EsDocRecommend.class, IndexCoordinates.of("doc_recommend"));
log.info(" --------------------------------- page " + JSON.toJSONString(page));
List<EsDocRecommend> list = page.getContent();
if (!CollectionUtils.isEmpty(list)) {
List<BusinessVo4Portal> resList = new ArrayList<>(list.size());
for (EsDocRecommend docRecommend : list) {
BusinessVo4Portal businessVo4Portal = new BusinessVo4Portal();
businessVo4Portal.setId(Long.valueOf(docRecommend.getRecommendId().split("_")[0]));
businessVo4Portal.setType(docRecommend.getRecommendType());
businessVo4Portal.setName(docRecommend.getName());
businessVo4Portal.setImage(docRecommend.getImage());
businessVo4Portal.setCode(docRecommend.getCode());
resList.add(businessVo4Portal);
}
response.setList(resList);
response.setTotal((int) page.getTotalElements());
}
return response;
}
/**
* manage 组装 queryBuilder
*
* @param param
* @param ruleVos
* @return
*/
private Map<String, QueryBuilder> buildQueryBuilderMap4Manage(BusinessVo4PortalParam param, List<RuleVo> ruleVos) {
Map<String, QueryBuilder> business_qb_map_rule = new HashMap<>();
Map<String, QueryBuilder> business_qb_map_ruleItem;
RuleVo ruleVo;
QueryBuilder qbCurrentLoop = null;
if (CollectionUtils.isEmpty(ruleVos)){
log.error("ruleVos参数为空");
return business_qb_map_rule;
}
for (int i = 0; i < ruleVos.size(); i++) {
ruleVo = ruleVos.get(i);
// 如果条件规则是 用户 规则,继续
if (ruleVo.getType().equals(DroolsConstants.RULE_TYPE_USER)) {
continue;
}
List<RuleItemVo> ruleItemVosTmp = ruleVo.getList();
String ruleAndOr = ruleVo.getAndOr();
if (!CollectionUtils.isEmpty(ruleItemVosTmp)) {
business_qb_map_ruleItem = new HashMap<>();
for (int j = 0; j < ruleItemVosTmp.size(); j++) {
RuleItemVo itemVo = ruleItemVosTmp.get(j);
if (!StringUtils.isEmpty(itemVo.getValue())) {
itemVo.setValue(itemVo.getValue().toLowerCase());
}
qbCurrentLoop = business_qb_map_ruleItem.get(itemVo.getBusiness()) == null
? QueryBuilders.boolQuery() : business_qb_map_ruleItem.get(itemVo.getBusiness());
// 如果business不为空,则是管理端的内容搜索,直接过滤掉非相关规则
if (!StringUtils.isEmpty(param.getBusiness())
&& !param.getBusiness().equalsIgnoreCase(itemVo.getBusiness())) {
continue;
}
// 1. 如果还没有当前业务的queryBuilder
if (!business_qb_map_ruleItem.containsKey(itemVo.getBusiness())) {
// 如果当前查询所有
if ("all".equalsIgnoreCase(itemVo.getProperty())) {
qbCurrentLoop = QueryBuilders.boolQuery();
} else {
qbCurrentLoop = RecommendBusinessRelationEnum.valueOf(itemVo.getRelation().toUpperCase(Locale.ENGLISH))
.build(itemVo);
}
business_qb_map_ruleItem.put(itemVo.getBusiness(), qbCurrentLoop);
continue;
}
// 2. 如果有当前业务的 queryBuilder
qbCurrentLoop = RuleItemAndOrQbBuilder.valueOf(itemVo.getAndOr().toUpperCase(Locale.ENGLISH))
.build(qbCurrentLoop, itemVo);
business_qb_map_ruleItem.put(itemVo.getBusiness(), qbCurrentLoop);
}
// 3. 如果是最后一次循环 组装上rule的andOr
for (Map.Entry<String, QueryBuilder> stringQueryBuilderEntry : business_qb_map_ruleItem.entrySet()) {
if (business_qb_map_rule.containsKey(stringQueryBuilderEntry.getKey())) {
business_qb_map_rule.put(stringQueryBuilderEntry.getKey(),
RuleAndOrQbBuilder.valueOf(ruleAndOr.toUpperCase(Locale.ENGLISH))
.build(business_qb_map_rule.get(stringQueryBuilderEntry.getKey()), qbCurrentLoop));
} else {
business_qb_map_rule.put(stringQueryBuilderEntry.getKey(), stringQueryBuilderEntry.getValue());
}
}
}
}
return business_qb_map_rule;
}
/**
* portal 组装 queryBuilder
*
* @param param
* @param map
* @return
*/
private Map<String, QueryBuilder> buildQueryBuilderMap4Portal(BusinessVo4PortalParam param, Map<Long, List<RuleVo>> map) {
Map<String, QueryBuilder> business_qb_map_rule = new HashMap<>();
Map<String, QueryBuilder> business_qb_map_ruleItem;
RuleVo ruleVo;
QueryBuilder qbCurrentLoop = null;
for (Map.Entry<Long, List<RuleVo>> longListEntry : map.entrySet()) {
List<RuleVo> ruleVos = longListEntry.getValue();
boolean flag = containsCurrentAccount(ruleVos, param.getRequestContext());
if (!flag) {
continue;
}
for (int i = 0; i < ruleVos.size(); i++) {
ruleVo = ruleVos.get(i);
List<RuleItemVo> ruleItemVosTmp = ruleVo.getList();
String ruleAndOr = ruleVo.getAndOr();
if (!CollectionUtils.isEmpty(ruleItemVosTmp)) {
business_qb_map_ruleItem = new HashMap<>();
for (int j = 0; j < ruleItemVosTmp.size(); j++) {
RuleItemVo itemVo = ruleItemVosTmp.get(j);
if (!StringUtils.isEmpty(itemVo.getValue())) {
itemVo.setValue(itemVo.getValue().toLowerCase());
}
qbCurrentLoop = business_qb_map_ruleItem.get(itemVo.getBusiness()) == null
? QueryBuilders.boolQuery() : business_qb_map_ruleItem.get(itemVo.getBusiness());
// 如果business不为空,则是管理端的内容搜索,直接过滤掉非相关规则
if (!StringUtils.isEmpty(param.getBusiness())
&& !param.getBusiness().equalsIgnoreCase(itemVo.getBusiness())) {
continue;
}
// 1. 如果还没有当前业务的queryBuilder
if (!business_qb_map_ruleItem.containsKey(itemVo.getBusiness())) {
// 如果当前查询所有
if ("all".equalsIgnoreCase(itemVo.getProperty())) {
qbCurrentLoop = QueryBuilders.boolQuery();
} else {
qbCurrentLoop = RecommendBusinessRelationEnum.valueOf(itemVo.getRelation().toUpperCase(Locale.ENGLISH))
.build(itemVo);
}
business_qb_map_ruleItem.put(itemVo.getBusiness(), qbCurrentLoop);
continue;
}
// 2. 如果有当前业务的 queryBuilder
qbCurrentLoop = RuleItemAndOrQbBuilder.valueOf(itemVo.getAndOr().toUpperCase(Locale.ENGLISH))
.build(qbCurrentLoop, itemVo);
business_qb_map_ruleItem.put(itemVo.getBusiness(), qbCurrentLoop);
}
// 3. 如果是最后一次循环 组装上rule的andOr
for (Map.Entry<String, QueryBuilder> stringQueryBuilderEntry : business_qb_map_ruleItem.entrySet()) {
if (business_qb_map_rule.containsKey(stringQueryBuilderEntry.getKey())) {
business_qb_map_rule.put(stringQueryBuilderEntry.getKey(),
RuleAndOrQbBuilder.valueOf(ruleAndOr.toUpperCase(Locale.ENGLISH))
.build(business_qb_map_rule.get(stringQueryBuilderEntry.getKey()), qbCurrentLoop));
} else {
business_qb_map_rule.put(stringQueryBuilderEntry.getKey(), stringQueryBuilderEntry.getValue());
}
}
}
}
}
return business_qb_map_rule;
}
/**
* 查看当前策略策略下推荐的用户 是否 包含当前
*
* @param ruleVos
* @param requestContext
* @return
*/
private boolean containsCurrentAccount(List<RuleVo> ruleVos, RequestContext requestContext) {
BoolQueryBuilder finalQb = buildQueryBuilder4Account(ruleVos);
if (finalQb == null) {
return false;
} else {
finalQb.must(QueryBuilders.termQuery("id", String.valueOf(requestContext.getAccountId())))
.must(QueryBuilders.termQuery("companyId", requestContext.getCompanyId()));
}
Query searchQuery = new NativeSearchQueryBuilder()
.withQuery(finalQb)
.build();
log.info("account - " + finalQb);
// 查询当前用户是否符合 用户规则
long count = elasticsearchRestTemplate.count(searchQuery, EsAccount.class);
return count > 0;
}
/**
* 获取用户的queryBuildervo
*
* @param ruleVos
* @return
*/
private BoolQueryBuilder buildQueryBuilder4Account(List<RuleVo> ruleVos) {
log.info(" -- buildQueryBuilder4Account ", JSON.toJSONString(ruleVos));
Map<String, QueryBuilder> business_qb_map_rule = new HashMap<>();
Map<String, QueryBuilder> business_qb_map_ruleItem;
RuleVo ruleVo;
QueryBuilder qbCurrentLoop = null;
for (int i = 0; i < ruleVos.size(); i++) {
ruleVo = ruleVos.get(i);
// 如果条件规则是 用户 规则,继续
if (!ruleVo.getType().equals(DroolsConstants.RULE_TYPE_USER)) {
continue;
}
List<RuleItemVo> ruleItemVosTmp = ruleVo.getList();
String ruleAndOr = ruleVo.getAndOr();
if (!CollectionUtils.isEmpty(ruleItemVosTmp)) {
business_qb_map_ruleItem = new HashMap<>();
for (int j = 0; j < ruleItemVosTmp.size(); j++) {
RuleItemVo itemVo = ruleItemVosTmp.get(j);
if (!StringUtils.isEmpty(itemVo.getValue())) {
itemVo.setValue(itemVo.getValue().toLowerCase());
}
qbCurrentLoop = business_qb_map_ruleItem.get(itemVo.getBusiness()) == null
? QueryBuilders.boolQuery() : business_qb_map_ruleItem.get(itemVo.getBusiness());
// 1. 如果还没有当前业务的queryBuilder
if (!business_qb_map_ruleItem.containsKey(itemVo.getBusiness())) {
// 如果当前查询所有
if ("all".equalsIgnoreCase(itemVo.getProperty())) {
qbCurrentLoop = QueryBuilders.boolQuery();
} else {
qbCurrentLoop = RecommendAccountRelationEnum.valueOf(itemVo.getRelation().toUpperCase(Locale.ENGLISH))
.build(itemVo);
}
business_qb_map_ruleItem.put(itemVo.getBusiness(), qbCurrentLoop);
continue;
}
// 2. 如果有当前业务的 queryBuilder
qbCurrentLoop = RuleItemAndOrQbBuilder.valueOf(itemVo.getAndOr().toUpperCase(Locale.ENGLISH))
.build(qbCurrentLoop, itemVo);
business_qb_map_ruleItem.put(itemVo.getBusiness(), qbCurrentLoop);
}
// 3. 如果是最后一次循环 组装上rule的andOr
for (Map.Entry<String, QueryBuilder> stringQueryBuilderEntry : business_qb_map_ruleItem.entrySet()) {
if (business_qb_map_rule.containsKey(stringQueryBuilderEntry.getKey())) {
business_qb_map_rule.put(stringQueryBuilderEntry.getKey(),
RuleAndOrQbBuilder.valueOf(ruleAndOr.toUpperCase(Locale.ENGLISH))
.build(business_qb_map_rule.get(stringQueryBuilderEntry.getKey()), qbCurrentLoop));
} else {
business_qb_map_rule.put(stringQueryBuilderEntry.getKey(), stringQueryBuilderEntry.getValue());
}
}
}
}
QueryBuilder accountQb = business_qb_map_rule.get(DroolsConstants.BUSINESS_NAME_USER);
if (accountQb == null) {
return null;
}
return QueryBuilders.boolQuery().must(accountQb);
}
}
package com.yizhi.esearch.application.controller.remote;
import com.alibaba.fastjson.JSON;
import com.yizhi.core.application.context.ContextHolder;
import com.yizhi.core.application.context.RequestContext;
import com.yizhi.esearch.application.util.QueryHelper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.List;
/**
* @ClassName RemoteBizVisibleRangeController
* @Description TODO
* @Author shengchenglong
* @DATE 2020-03-27 19:28
* @Version 1.0
*/
@Slf4j
@RestController
@RequestMapping("remote/bizVisibleRange")
public class RemoteBizVisibleRangeController {
@Autowired
private ElasticsearchRestTemplate elasticsearchTemplate;
@GetMapping("getVisibleBizId")
public List<Long> getVisibleBizId(@RequestParam("bizType") String bizType,
@RequestParam("requestContext") String requestContext) {
RequestContext context = (RequestContext) JSON.parse(requestContext);
ContextHolder.set(context);
List<String> bizIdStrs = QueryHelper.getBizIds4VisibleRange(elasticsearchTemplate, bizType, QueryHelper.VISIBLE_RANGE_CLASS.get(bizType));
if (!CollectionUtils.isEmpty(bizIdStrs)) {
List<Long> bizIds = new ArrayList<>(bizIdStrs.size());
bizIdStrs.forEach(id -> bizIds.add(Long.valueOf(id)));
return bizIds;
}
return null;
}
}
package com.yizhi.esearch.application.controller.remote;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsLecturer;
import com.yizhi.esearch.application.repository.LecturerRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* @ClassName LecturerController
* @Description 讲师
* @Author shengchenglong
* @DATE 2019-07-24 11:35
* @Version 1.0
*/
@RequestMapping("/remote/lecturer")
@RestController
public class RemoteLecturerController {
@Autowired
private LecturerRepository lecturerRepository;
/**
* 更新启用禁用状态
*
* @param id
* @param enabled
* @return
*/
@GetMapping("/enableStatus/update")
public boolean updateEnableStatus(@RequestParam("id") Long id, @RequestParam("enabled") boolean enabled) {
EsLecturer lecturer = new EsLecturer();
lecturer.setId(String.valueOf(id));
lecturer.setStatus(enabled ? Constant.STATUS_UP : Constant.STATUS_DOWN);
lecturerRepository.save(lecturer);
return true;
}
}
package com.yizhi.esearch.application.eum;
import com.yizhi.drools.application.constant.DroolsConstants;
import com.yizhi.drools.application.vo.manage.RuleItemVo;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
/**
* @ClassName RecommendAccountRelationEnum
* @Description TODO
* @Author shengchenglong
* @DATE 2020-01-07 14:22
* @Version 1.0
*/
public enum RecommendAccountRelationEnum {
CONTAINS(DroolsConstants.RELATION_CONTAINS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
String prop = getPropName(ruleItemVo.getProperty());
if (prop.equalsIgnoreCase("orgName")) {
return QueryBuilders.termQuery("kw_".concat(prop), ruleItemVo.getValue());
}
return QueryBuilders.matchQuery(prop, ruleItemVo.getValue());
}
},
NOTCONTAINS(DroolsConstants.RELATION_NOT_CONTAINS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
String prop = getPropName(ruleItemVo.getProperty());
if (prop.equalsIgnoreCase("orgName")) {
return QueryBuilders.boolQuery()
.mustNot(QueryBuilders.termQuery("kw_".concat(prop), ruleItemVo.getValue()));
}
return QueryBuilders.boolQuery()
.mustNot(QueryBuilders.matchQuery(prop, ruleItemVo.getValue()));
}
},
EQUALS(DroolsConstants.RELATION_EQUALS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
String prop = getPropName(ruleItemVo.getProperty());
return QueryBuilders.termQuery("kw_".concat(prop), ruleItemVo.getName());
}
},
NOTEQUALS(DroolsConstants.RELATION_NOT_EQUALS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
String prop = getPropName(ruleItemVo.getProperty());
return QueryBuilders.boolQuery()
.mustNot(QueryBuilders.termQuery("kw_".concat(prop), ruleItemVo.getName()));
}
};
private String relation;
RecommendAccountRelationEnum(String type) {
this.relation = type;
}
public String getRelation() {
return this.relation;
}
public abstract QueryBuilder build(RuleItemVo ruleItemVo);
/**
* 获取真是需要查询的字段
*
* @param origin
* @return
*/
public String getPropName(String origin) {
String target = origin;
switch (origin) {
case "org":
target = "orgName";
break;
default:
break;
}
return target;
}
}
package com.yizhi.esearch.application.eum;
import com.yizhi.drools.application.constant.DroolsConstants;
import com.yizhi.drools.application.vo.manage.RuleItemVo;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
/**
* @ClassName RecommendBusinessBuildEnum
* @Description TODO
* @Author shengchenglong
* @DATE 2020-01-07 14:22
* @Version 1.0
*/
public enum RecommendBusinessRelationEnum {
CONTAINS(DroolsConstants.RELATION_CONTAINS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
return QueryBuilders.matchQuery(getPropName(ruleItemVo.getProperty()), ruleItemVo.getValue());
}
},
NOTCONTAINS(DroolsConstants.RELATION_NOT_CONTAINS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
return QueryBuilders.boolQuery()
.mustNot(QueryBuilders.matchQuery(getPropName(ruleItemVo.getProperty()), ruleItemVo.getValue()));
}
},
EQUALS(DroolsConstants.RELATION_EQUALS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
String property = getPropName(ruleItemVo.getProperty());
return QueryBuilders.termQuery("kw_".concat(property), ruleItemVo.getName());
}
},
NOTEQUALS(DroolsConstants.RELATION_NOT_EQUALS) {
@Override
public QueryBuilder build(RuleItemVo ruleItemVo) {
String property = getPropName(ruleItemVo.getProperty());
return QueryBuilders.boolQuery()
.mustNot(QueryBuilders.termQuery("kw_".concat(property), ruleItemVo.getName()));
}
};
private String relation;
RecommendBusinessRelationEnum(String type) {
this.relation = type;
}
public String getRelation() {
return this.relation;
}
public abstract QueryBuilder build(RuleItemVo ruleItemVo);
/**
* 获取真实需要查询的字段
*
* @param origin
* @return
*/
public String getPropName(String origin) {
String target = origin;
switch (origin) {
case "classify":
target = "classifyName";
break;
case "belongCaseActivity":
target = "caseActivityName";
break;
default:
break;
}
return target;
}
}
package com.yizhi.esearch.application.eum;
import com.yizhi.drools.application.constant.DroolsConstants;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
/**
* @ClassName AndOrQbBuilder
* @Description TODO
* @Author shengchenglong
* @DATE 2020-01-09 16:32
* @Version 1.0
*/
public enum RuleAndOrQbBuilder {
AND(DroolsConstants.AND) {
@Override
public QueryBuilder build(QueryBuilder Qb1, QueryBuilder Qb2) {
return QueryBuilders.boolQuery()
.must(Qb1)
.must(Qb2);
}
},
OR(DroolsConstants.OR) {
@Override
public QueryBuilder build(QueryBuilder Qb1, QueryBuilder Qb2) {
return QueryBuilders.boolQuery()
.should(Qb1)
.should(Qb2);
}
};
private String andOr;
RuleAndOrQbBuilder(String type) {
this.andOr = type;
}
public String getAndOr() {
return this.andOr;
}
public abstract QueryBuilder build(QueryBuilder Qb1, QueryBuilder Qb2);
}
package com.yizhi.esearch.application.eum;
import com.yizhi.drools.application.constant.DroolsConstants;
import com.yizhi.drools.application.vo.manage.RuleItemVo;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import java.util.Locale;
/**
* @ClassName AndOrQbBuilder
* @Description TODO
* @Author shengchenglong
* @DATE 2020-01-09 16:32
* @Version 1.0
*/
public enum RuleItemAndOrQbBuilder {
AND(DroolsConstants.AND) {
@Override
public QueryBuilder build(QueryBuilder originQb, RuleItemVo ruleItemVo) {
QueryBuilder qb = RecommendBusinessRelationEnum.valueOf(ruleItemVo.getRelation().toUpperCase(Locale.ENGLISH)).build(ruleItemVo);
if (ruleItemVo.getRelation().startsWith("not")) {
return QueryBuilders.boolQuery()
.must(originQb)
.mustNot(qb);
}
return QueryBuilders.boolQuery()
.must(originQb)
.must(qb);
}
},
OR(DroolsConstants.OR) {
@Override
public QueryBuilder build(QueryBuilder originQb, RuleItemVo ruleItemVo) {
QueryBuilder qb = RecommendBusinessRelationEnum.valueOf(ruleItemVo.getRelation().toUpperCase(Locale.ENGLISH)).build(ruleItemVo);
if (ruleItemVo.getRelation().startsWith("not")) {
return QueryBuilders.boolQuery()
.should(originQb)
.mustNot(qb);
}
return QueryBuilders.boolQuery()
.should(originQb)
.should(qb);
}
};
private String andOr;
RuleItemAndOrQbBuilder(String type) {
this.andOr = type;
}
public String getAndOr() {
return this.andOr;
}
public abstract QueryBuilder build(QueryBuilder originQb, RuleItemVo ruleItemVo);
}
package com.yizhi.esearch.application.pojo.biz;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import java.io.Serializable;
import java.util.Date;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 21:09
*/
@Data
@Document(indexName = "web_error_log", type = "web_error_log")
public class WebErrorLog implements Serializable {
private static final long serialVersionUID = 1L;
@Id
private String id;
@Field(type = FieldType.Text, searchAnalyzer = "whitespace", analyzer = "whitespace")
private String content;
@ApiModelProperty(value = "PC端:pc、微信端:weixin、管理端:manage")
private String terminalType;
private String companyId;
private String siteId;
private String orgId;
private String createById;
private String createByName;
private Date createTime;
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.system.EsAccount;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:49
*/
public interface AccountRepository extends ElasticsearchRepository<EsAccount, Long> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsAssignment;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:49
*/
public interface AssignmentRepository extends ElasticsearchRepository<EsAssignment, Long> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsBizKeyword;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:49
*/
public interface BizKeywordRepository extends ElasticsearchRepository<EsBizKeyword, String> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsCourse;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 15:39
*/
public interface CourseRepository extends ElasticsearchRepository<EsCourse, Long> {
/**
* @param siteId
* @param name
* @param status
* @return
*/
List<EsCourse> findBySiteIdEqualsAndNameAndStatusAndDeleted(String siteId, String name, Integer status, Integer deleted);
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsDocRecommend;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:49
*/
public interface DocRecommendRepository extends ElasticsearchRepository<EsDocRecommend, String> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsExam;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:50
*/
public interface ExamRepository extends ElasticsearchRepository<EsExam, Long> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsLecturer;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:50
*/
public interface LecturerRepository extends ElasticsearchRepository<EsLecturer, Long> {
List<EsLecturer> findBySiteId(String siteId);
List<EsLecturer> findByRelationIds(String relationIds);
// List<Lecturer> findBySiteIdEqualsAndLecturerNameOrRelationIds(String siteId, String lecturerName, String relationIds);
//
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsOfflineCourse;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 16:24
*/
public interface OfflineCourseRepository extends ElasticsearchRepository<EsOfflineCourse, Long> {
/**
* @param siteId
* @param name
* @param status
* @return
*/
List<EsOfflineCourse> findBySiteIdEqualsAndNameAndStatusAndDeleted(String siteId, String name, Integer status, Integer deleted);
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.system.EsOrganization;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:49
*/
public interface OrganizationRepository extends ElasticsearchRepository<EsOrganization, Long> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsResearch;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:51
*/
public interface ResearchRepository extends ElasticsearchRepository<EsResearch, Long> {
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.EsTrainingProject;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import java.util.List;
/**
* @Author: shengchenglong
* @Date: 2018/12/20 16:24
*/
public interface TrainingProjectRepository extends ElasticsearchRepository<EsTrainingProject, Long> {
/**
* @param siteId
* @param name
* @param status
* @return
*/
List<EsTrainingProject> findBySiteIdEqualsAndNameAndStatusAndDeleted(String siteId, String name, Integer status, Integer deleted);
}
package com.yizhi.esearch.application.repository;
import com.yizhi.esearch.application.pojo.biz.WebErrorLog;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
/**
* @Author: shengchenglong
* @Date: 2018/12/24 09:49
*/
public interface WebErrorLogRepository extends ElasticsearchRepository<WebErrorLog, String> {
}
package com.yizhi.esearch.application.scheduler;
import com.yizhi.esearch.application.pojo.biz.EsBizKeyword;
import com.yizhi.esearch.application.pojo.biz.EsDocRecommend;
import com.yizhi.esearch.application.repository.BizKeywordRepository;
import com.yizhi.esearch.application.repository.DocRecommendRepository;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.SearchScrollHits;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* @ClassName KeywordScheduler
* @Description TODO
* @Author shengchenglong
* @DATE 2020-03-11 16:40
* @Version 1.0
*/
@Component
@Log4j2
public class KeywordScheduler {
@Autowired
private DocRecommendRepository docRecommendRepository;
@Autowired
private BizKeywordRepository bizKeywordRepository;
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@Value("${needInit}")
private String needInit;
// @PostConstruct
@Scheduled(cron = "0 0/5 * * * ?")
public void run() throws ParseException {
Date date = new Date();
if ("yes".equalsIgnoreCase(needInit)) {
date = DateUtils.parseDate("2000-01-01 00:00:00", "yyyy-MM-dd HH:mm:ss");
}
run(date);
}
public void run(Date date) {
String time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
log.info("-------------------------------------------- 同步关键字,开始 -----------------------------------------------");
QueryBuilder qb = QueryBuilders.rangeQuery("@timestamp").gte(DateUtils.addMinutes(date, -6).getTime());
Query searchQuery = new NativeSearchQueryBuilder().withQuery(qb).build();
IndexCoordinates index = IndexCoordinates.of("doc_recommend");
SearchScrollHits<EsDocRecommend> scroll = elasticsearchRestTemplate.searchScrollStart(1000, searchQuery, EsDocRecommend.class, index);
String scrollId = scroll.getScrollId();
while (scroll.hasSearchHits()) {
scrollId = scroll.getScrollId();
insert(scroll, time);
scroll = elasticsearchRestTemplate.searchScrollContinue(scrollId, 1000, EsDocRecommend.class, index);
}
log.info("-------------------------------------------- 同步关键字,结束 -----------------------------------------------");
}
private void insert(SearchScrollHits<EsDocRecommend> scroll, String time) {
List<EsBizKeyword> bizKeywords = new ArrayList<>();
scroll.getSearchHits().forEach(a -> {
EsDocRecommend doc = a.getContent();
String keyword = doc.getKeyword();
String[] keywords = null;
if (StringUtils.isNotEmpty(keyword)) {
keywords = keyword.split(",");
for (String s : keywords) {
EsBizKeyword bizKeyword = new EsBizKeyword();
bizKeyword.setId(doc.getRecommendType().concat("_").concat(doc.getSiteId()).concat("_").concat(s));
bizKeyword.setBizId(doc.getId().split("_")[0]);
bizKeyword.setCompanyId(doc.getCompanyId());
bizKeyword.setDeleted(doc.getDeleted());
// bizKeyword.setKwValue(s);
bizKeyword.setKw_value(s);
bizKeyword.setKw_value(s);
bizKeyword.setSiteId(doc.getSiteId());
bizKeyword.setStatus(doc.getStatus());
bizKeyword.setValue(s);
bizKeyword.setRecommendType(doc.getRecommendType());
bizKeywords.add(bizKeyword);
}
}
});
if (!CollectionUtils.isEmpty(bizKeywords)) {
Iterable<EsBizKeyword> it = bizKeywordRepository.saveAll(bizKeywords);
Iterator<EsBizKeyword> itra = it.iterator();
while (itra.hasNext()) {
log.info(time + " " + itra.next().toString());
}
}
}
}
package com.yizhi.esearch.application.service;
/**
* @ClassName RuleItemAccountbuilder
* @Description TODO
* @Author shengchenglong
* @DATE 2020-01-07 14:20
* @Version 1.0
*/
public class RuleItemAccountbuilder {
}
package com.yizhi.esearch.application.service.using;
import com.yizhi.esearch.application.pojo.system.EsOrganization;
import com.yizhi.esearch.application.repository.OrganizationRepository;
import com.yizhi.esearch.application.util.OrgIndex;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.Optional;
/**
* @ClassName OrgIndexServiceUsing
* @Description OrgIndexServiceUsing
* @Author shengchenglong
* @DATE 2020-06-10 10:48
* @Version 1.0
*/
@Component
public class OrgIndexServiceUsing {
@Autowired
private OrganizationRepository organizationRepository;
/**
* 获取一个 部门的 orgIndex
*
* @param orgId
* @return
*/
public OrgIndex getOne(Long orgId) {
Optional<EsOrganization> optional = organizationRepository.findById(orgId);
if (optional.isPresent()) {
return new OrgIndex(optional.get().getLeftIndex(), optional.get().getRightIndex());
}
return null;
}
/**
* 获取一个部门 子集 的 orgIndex queryBuilder
*
* @param orgId
* @return
*/
public QueryBuilder get4ContainingChildren(Long orgId) {
OrgIndex orgIndex = getOne(orgId);
if (orgIndex == null) {
return null;
}
return QueryBuilders.boolQuery()
.must(QueryBuilders.rangeQuery("orgLeftIndex").gte(orgIndex.getLeftIndex()))
.must(QueryBuilders.rangeQuery("orgRightIndex").lte(orgIndex.getRightIndex()));
}
}
package com.yizhi.esearch.application.util;
import net.sf.cglib.beans.BeanCopier;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @ClassName BeanUtils
* @Description 高性能,高质量的 bean copy
* @Author chengchenglong
* @DATE 2019-04-17 15:11
* @Version 1.0
*/
public class BeanUtils {
/**
* 缓存BeanCopier,节省创建时间,使用ConcurrentHashMap来保证同步
*/
public static final Map<String, BeanCopier> BEAN_COPIER_MAP = new ConcurrentHashMap<>();
public static void cpoy(Object origin, Object target) {
BeanCopier beanCopier = getBeanCopier(origin.getClass(), target.getClass());
if (beanCopier != null) {
beanCopier.copy(origin, target, null);
}
}
/**
* 获取BeanCopier实例
*
* @param origin
* @param target
* @return
*/
public static BeanCopier getBeanCopier(Class origin, Class target) {
String key = origin.getName() + target.getName();
if (BEAN_COPIER_MAP.containsKey(key)) {
return BEAN_COPIER_MAP.get(key);
}
BeanCopier beanCopier = BeanCopier.create(origin, target, false);
BEAN_COPIER_MAP.put(key, beanCopier);
return beanCopier;
}
}
package com.yizhi.esearch.application.util;
import org.springframework.stereotype.Component;
/**
* @ClassName CacheUtil
* @Description TODO
* @Author shengchenglong
* @DATE 2019-12-30 21:00
* @Version 1.0
*/
@Component
public class CacheUtil {
// @Cacheable(value = "recommend", key = "#cacheKey")
// public Object getBusiness(Long accountId, String time, String cacheKey) {
//
// }
}
package com.yizhi.esearch.application.util;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @ClassName OrgIndex
* @Description 左右索引
* @Author shengchenglong
* @DATE 2020-06-10 10:48
* @Version 1.0
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class OrgIndex {
private static final long serialVersionUID = 4908567268375776414L;
private Integer leftIndex;
private Integer rightIndex;
}
package com.yizhi.esearch.application.util;
import com.yizhi.core.application.context.ContextHolder;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.util.CollectionUtils;
import javax.validation.constraints.NotNull;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.time.ZoneId;
import java.util.*;
/**
* @Author: shengchenglong
* @Date: 2018/12/25 15:19
*/
public class QueryHelper {
private static final Logger LOGGER = LoggerFactory.getLogger(QueryHelper.class);
public static final List<String> SEARCH_SETTER_METHOD_NAME = new ArrayList<>();
public static final List<String> SEARCH_GETTER_METHOD_NAME = new ArrayList<>();
public static final Map<Class, String> VISIBLE_RANGE_TYPE = new HashMap<>();
public static final Map<String, Class> VISIBLE_RANGE_CLASS = new HashMap<>();
static {
SEARCH_SETTER_METHOD_NAME.add("setName");
SEARCH_SETTER_METHOD_NAME.add("setKeyword");
SEARCH_GETTER_METHOD_NAME.add("getName");
SEARCH_GETTER_METHOD_NAME.add("getKeyword");
VISIBLE_RANGE_TYPE.put(EsAssignment.class, Constant.BIZ_TYPE_VISIBLE_ASSIGNMENT);
VISIBLE_RANGE_TYPE.put(EsCaseLibrary.class, Constant.BIZ_TYPE_VISIBLE_CASE_LIBRARY);
VISIBLE_RANGE_TYPE.put(EsCourse.class, Constant.BIZ_TYPE_VISIBLE_COURSE);
VISIBLE_RANGE_TYPE.put(EsExam.class, Constant.BIZ_TYPE_VISIBLE_EXAM);
VISIBLE_RANGE_TYPE.put(EsStudentCase.class, Constant.BIZ_TYPE_VISIBLE_STUDENT_CASE);
VISIBLE_RANGE_TYPE.put(EsTrainingProject.class, Constant.BIZ_TYPE_VISIBLE_TRAINING_PROJECT);
VISIBLE_RANGE_CLASS.put(Constant.BIZ_TYPE_VISIBLE_ASSIGNMENT, EsAssignment.class);
VISIBLE_RANGE_CLASS.put(Constant.BIZ_TYPE_VISIBLE_CASE_LIBRARY, EsCaseLibrary.class);
VISIBLE_RANGE_CLASS.put(Constant.BIZ_TYPE_VISIBLE_COURSE, EsCourse.class);
VISIBLE_RANGE_CLASS.put(Constant.BIZ_TYPE_VISIBLE_EXAM, EsExam.class);
VISIBLE_RANGE_CLASS.put(Constant.BIZ_TYPE_VISIBLE_STUDENT_CASE, EsStudentCase.class);
VISIBLE_RANGE_CLASS.put(Constant.BIZ_TYPE_VISIBLE_TRAINING_PROJECT, EsTrainingProject.class);
}
/**
* 上下文 租户隔离 queryBuilder
*
* @return
*/
public static QueryBuilder getCommonQueryBuilderForContext() {
// 站点 id,企业 id queryBuilder
QueryBuilder queryBuilder = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery("deleted", Constant.DELETED_NO))
.must(QueryBuilders.termQuery("status", Constant.STATUS_UP))
.must(QueryBuilders.termQuery("companyId", ContextHolder.get().getCompanyId()))
.must(QueryBuilders.termQuery("siteId", ContextHolder.get().getSiteId()));
return queryBuilder;
}
/**
* 可见范围 queryBuilder
*
* @return
*/
public static QueryBuilder getCommonQueryBuilderForVisibleRange(ElasticsearchRestTemplate elasticsearchRestTemplate, String indexName, Class clazz) {
// 全平台可见 visible_range_public
QueryBuilder queryBuilderForVRP = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery("visibleRange", Constant.VISIBLE_RANGE_PUBLIC));
// visible_range_relationIds
QueryBuilder queryBuilderForVRR = queryBuilderForVRR(elasticsearchRestTemplate, indexName, clazz);
// 1. 如果没有可见到的 biz
if (queryBuilderForVRR == null) {
return queryBuilderForVRP;
}
// 2. 指定范围可见 visible_range_appoint
QueryBuilder queryBuilderForVRA = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery("visibleRange", Constant.VISIBLE_RANGE_APPOINTED))
.must(queryBuilderForVRR);
// 组合到一起 visible_range: appoint or public
QueryBuilder queryBuilder = QueryBuilders.boolQuery()
.should(queryBuilderForVRA)
.should(queryBuilderForVRP);
LOGGER.info("visible_range queryBuilder----------: " + queryBuilder.toString());
return queryBuilder;
}
/**
* 获取基础 queryBuilder
*
* @return
*/
public static QueryBuilder getCommonQueryBuilder(ElasticsearchRestTemplate elasticsearchRestTemplate, String indexName, Class clazz) {
QueryBuilder queryBuilder = QueryBuilders.boolQuery()
.must(getCommonQueryBuilderForContext())
.must(getCommonQueryBuilderForVisibleRange(elasticsearchRestTemplate, indexName, clazz));
return queryBuilder;
}
/**
* OR 条件连接的 queryBuilder
* <p>
* TODO 动态字段类型分析
*
* @param object
* @return
*/
public static QueryBuilder getMustQueryBuilderFromObject(@NotNull Object object) {
BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery();
Method getMethod;
String name;
Object value;
Class clazz = object.getClass();
try {
for (String getter : SEARCH_GETTER_METHOD_NAME) {
getMethod = clazz.getMethod(getter);
if (getMethod != null) {
name = getter.substring(3).toLowerCase();
value = getMethod.invoke(object);
queryBuilder.should(QueryBuilders.matchQuery(name, value));
}
}
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
LOGGER.error("", e);
}
return queryBuilder;
}
/**
* 设置时区
*
* @return
*/
public static AbstractAggregationBuilder getTimeAggreation() {
// 取一个名字在改聚合功能器上
DateHistogramAggregationBuilder builder = AggregationBuilders.dateHistogram("date_transfer")
.timeZone(ZoneId.of("Asia/Shanghai"))
.interval(24 * 60 * 60)
.field("releaseTime")
.offset("+8h");
// .minDocCount(0);
return builder;
}
/**
* 根据关键词搜索分页
*
* @param keyword 目前仅支持 name 属性
* @param pageNo
* @param pageSize
* @param clazz
* @return
*/
public static Query commonPageSearchQueryForKeyword(String keyword, Class clazz, Integer pageNo, Integer pageSize, String sortFiled, Boolean isAsc, ElasticsearchRestTemplate elasticsearchRestTemplate) {
try {
Object object = clazz.newInstance();
Method method;
try {
for (String methodName : SEARCH_SETTER_METHOD_NAME) {
method = clazz.getMethod(methodName, String.class);
method.invoke(object, keyword);
}
} catch (NoSuchMethodException e) {
LOGGER.info("-----------全局搜索,Class:{} ,没有方法:{}-----------", clazz.toString(), e.getMessage());
}
BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery()
.must(getMustQueryBuilderFromObject(object));
QueryBuilder queryBuilderTimeLimit = specificQueryBuilder(clazz);
if (null != queryBuilderTimeLimit) {
queryBuilder.must(queryBuilderTimeLimit);
}
Query searchQuery = new NativeSearchQueryBuilder()
// .addAggregation(getTimeAggreation())
.withQuery(queryBuilder)
.withFilter(getCommonQueryBuilder(elasticsearchRestTemplate, VISIBLE_RANGE_TYPE.get(clazz), clazz))
// page从0开始
.withPageable(PageRequest.of(pageNo - 1, pageSize))
// notice: 先按评分倒排,再按业务要求排序
.withSort(SortBuilders.scoreSort().order(SortOrder.DESC))
//增加unmappedType 防止某type找到相应字段
.withSort(isAsc ? SortBuilders.fieldSort(sortFiled).order(SortOrder.ASC).unmappedType("long") : SortBuilders.fieldSort(sortFiled).order(SortOrder.DESC).unmappedType("long"))
.build();
return searchQuery;
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
/**
* 组建时间范围 queryBuilder
*
* @param clazz
* @return
*/
public static QueryBuilder specificQueryBuilder(Class clazz) {
QueryBuilder result = null;
// 调研,时间内
if (clazz.equals(EsResearch.class)) {
QueryBuilder queryBuilderStart = QueryBuilders.rangeQuery("startTime")
// .format("yyyy-MM-dd")
.lte(getToday());
QueryBuilder queryBuilderEnd = QueryBuilders.rangeQuery("endTime")
// .format("yyyy-MM-dd")
.gte(getToday());
result = QueryBuilders.boolQuery()
.must(queryBuilderStart)
.must(queryBuilderEnd);
}
// 课程,排除营销课程
else if (clazz.equals(EsCourse.class)) {
result = QueryBuilders.boolQuery()
.must(QueryBuilders.termsQuery("source", "1", "2", "3", "5", "6"));
}
// 营销课程,排除课程
else if (clazz.equals(EsMarketingCourse.class)) {
result = QueryBuilders.boolQuery()
.must(QueryBuilders.termQuery("source", "4"));
}
return result;
}
/**
* 全局搜索
*
* @param keyword
* @param clazz
* @param template
* @return
*/
public static List globalSearch(String keyword, Class clazz, ElasticsearchRestTemplate template) {
int pageSize = 0;
String sortField = null;
boolean isAsc = true;
// 课程、项目 最多显示4条、按上架时间倒排
if (clazz.equals(EsCourse.class) || clazz.equals(EsTrainingProject.class) || clazz.equals(EsMarketingCourse.class)) {
pageSize = 4;
sortField = "releaseTime";
isAsc = false;
} else if (clazz.equals(EsExam.class) || clazz.equals(EsAssignment.class) || clazz.equals(EsResearch.class)) {
pageSize = 2;
sortField = "endTime";
} else if (clazz.equals(EsStudentCase.class)) {
pageSize = 2;
sortField = "status";
} else {
pageSize = 2;
sortField = "status";
}
Query searchQuery = QueryHelper.commonPageSearchQueryForKeyword(keyword, clazz, 1, pageSize, sortField, isAsc, template);
SearchHits searchHits = template.search(searchQuery, clazz);
// 如果没有命中
if (!searchHits.hasSearchHits()) {
return null;
}
List list = new ArrayList(Math.toIntExact(searchHits.getTotalHits()));
searchHits.get().forEach(item -> list.add(item));
return list;
}
/**
* 检查字段类型
*
* @param value
* @return
*/
public static Boolean checkValue(Object value) {
return value != null &&
(value instanceof Byte
|| value instanceof Short
|| value instanceof Integer
|| value instanceof Long
|| value instanceof Float
|| value instanceof Double
|| value instanceof Character
|| value instanceof Boolean
|| value instanceof String);
}
private static String buildRelationIdsString() {
StringBuilder sb = new StringBuilder();
ContextHolder.get().getRelationIds().stream().forEach(e -> sb.append(e).append(" "));
return sb.toString();
}
/**
* 组装 relationIds 数组
*
* @return
*/
private static Long[] buildRelationIdsArray() {
Long[] arr = new Long[ContextHolder.get().getRelationIds().size()];
return arr;
}
/**
* 组装 relationIds 或 QueryBuilder
*
* @return
*/
private static QueryBuilder queryBuilderForVRR(ElasticsearchRestTemplate elasticsearchRestTemplate, String indexName, Class clazz) {
List<String> bizIds = getBizIds4VisibleRange(elasticsearchRestTemplate, indexName, clazz);
if (!CollectionUtils.isEmpty(bizIds)) {
StringBuilder sb = new StringBuilder();
bizIds.stream().forEach(e -> sb.append(e).append(" "));
return QueryBuilders.matchQuery("id", sb.toString());
}
return null;
}
private static String getToday() {
return DateFormatUtils.format(new Date(), "yyyy-MM-dd");
}
public static List<String> getBizIds4VisibleRange(ElasticsearchRestTemplate elasticsearchRestTemplate, String indexName, Class clazz) {
if (StringUtils.isEmpty(indexName)) {
return null;
}
Query searchQuery = new NativeSearchQueryBuilder()
.withQuery(QueryBuilders.matchQuery("relationIds", buildRelationIdsString()))
.withPageable(PageRequest.of(0, 10000))
.build();
SearchHits searchHits = elasticsearchRestTemplate.search(searchQuery, clazz, IndexCoordinates.of(indexName));
// 如果命中了
if (searchHits.hasSearchHits()) {
List<String> bizIds = new ArrayList<>();
List<SearchHit> searchHitList = searchHits.getSearchHits();
for (SearchHit hit : searchHitList) {
bizIds.add(hit.getId());
}
return bizIds;
}
return null;
}
}
package com.yizhi.esearch.application.util;
import com.yizhi.util.application.domain.Response;
import org.springframework.data.domain.Page;
import java.util.HashMap;
import java.util.Map;
/**
* @Author: shengchenglong
* @Date: 2018/12/26 17:00
*/
public class ResponseHelper {
public static Response ok(Object data) {
return Response.ok(data);
}
public static Response ok(Page page) {
Map<String, Integer> pageMap = new HashMap<>();
pageMap.put("pageNo", page.getNumber() + 1);
pageMap.put("pageSize", page.getSize());
pageMap.put("pageTotal", page.getTotalPages());
pageMap.put("pageRecords", Integer.valueOf(String.valueOf(page.getTotalElements())));
return Response.ok(page.getContent(), pageMap);
}
}
server.port=15000
spring.application.name=esearch
ACTIVE=${spring.profiles.active}
spring.profiles.active=wmy401
# nacos
spring.cloud.nacos.config.shared-dataids=common-${spring.profiles.active}.properties
spring.cloud.nacos.config.namespace=${spring.profiles.active}
spring.cloud.nacos.config.prefix=${spring.application.name}
spring.cloud.nacos.config.file-extension=properties
spring.cloud.nacos.config.server-addr=192.168.1.7:3333
\ No newline at end of file
From registry.cn-shanghai.aliyuncs.com/wmy/jres8:latest
Run mkdir -p /opt
Add logstash-7.6.2 /opt/logstash-7.6.2
WORKDIR /opt/logstash-7.6.2/logstash-config
Entrypoint ["sh","./docker/run.sh"]
\ No newline at end of file
#!/usr/bin/env bash
rm -rf logstash-6.5.3
cp -r ../logstash-6.5.3 .
rm -rf logstash-6.5.3/config/stable/*log
rm -rf logstash-6.5.3/config/stable/*txt
docker build -t registry.cn-shanghai.aliyuncs.com/wmy/logstash .
docker push registry.cn-shanghai.aliyuncs.com/wmy/logstash
echo run CMD:
#echo " docker run -d --name logstash --restart=always --log-opt max-size=100m --log-opt max-file=3 registry.cn-shanghai.aliyuncs.com/wmy/logstash <conf name default: my-test.conf>"
echo '# use env relaseENV: <test uat prod> assign the conf'
echo "docker run -d --name logstash --restart=always --log-driver=json-file --log-opt max-size=100m --log-opt max-file=3 -e 'relaseENV=test' registry.cn-shanghai.aliyuncs.com/wmy/logstash "
echo "docker run -d --name logstash --restart=always -e "relaseENV=test" --add-host=elasticsearch.elasticsearch:192.168.0.173 registry.cn-shanghai.aliyuncs.com/wmy/logstash"
#! /bin/sh
echo --------------------------------------------------------------
echo 已选定环境: $relaseENV
echo 已指定配置文件: ./mysql/mysql-$relaseENV
sh /opt/logstash-7.6.2/bin/logstash -f ./mysql/mysql-$relaseENV
ALTER TABLE `cloud_case_library`.`case_library`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_case_library`.`case_library_related_classify`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_case_library`.`case_library_authorize`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_course`.`offline_course`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_case_library`.`student_case`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_case_library`.`student_case_related_classify`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_case_library`.`student_case_authorize`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_case_library`.`classify`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_lecturer`.`lecturer_classification`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_album`.`album`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_album`.`album_classify`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_trainning_project`.`tp_classification`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
ALTER TABLE `cloud_course`.`classify`
ADD COLUMN `index_time` timestamp(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '索引时间',
DROP PRIMARY KEY,
ADD PRIMARY KEY (`id`) USING BTREE;
\ No newline at end of file
cloud_trainning_project.assignment_student
cloud_case_library.case_library_related_classify
cloud_case_library.case_library_authorize
cloud_case_library.student_case_related_classify
cloud_case_library.student_case_authorize
cloud_course.course_account
cloud_exam.tr_exam_authorize
cloud_research.tr_research_authorize
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!物理删除
cloud_lecturer.tr_lecturer_classification
cloud_lecturer.lecturer_keywords
cloud_live.scope_authorization
\ No newline at end of file
input {
stdin {
}
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 培训项目
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 作业
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 考试
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 调研
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 组织部门
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 用户
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 讲师
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 线下课程
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 案例库
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.30:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "devDB"
jdbc_password => "devDB@123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
}
#filter {
# json {
# source => "message"
# remove_field => ["message"]
# }
#}
output {
if [type] == "course" {
elasticsearch {
hosts => "localhost:9200"
# index名
index => "course"
document_type => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
},
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course"
document_type => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
# account start -------------------------------------------------------------------------------------------------------
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_system?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# account end -------------------------------------------------------------------------------------------------------
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_exam?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_exam?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_live?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
# organization start -----------------------------------------------------------------------------------------------------
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_system?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# organization end -----------------------------------------------------------------------------------------------------
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_research?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_research?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_enroll_record"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./quety_training_project_record_enroll.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_record_enroll.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
# biz_visible_range start -----------------------------------------------------------------------------------------------------
jdbc {
type => "biz_visible_range"
# 数据库
jdbc_connection_string => "jdbc:mysql://220.248.15.46:26556/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project_visible_range.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_visible_range.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# biz_visible_range end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course"
document_type => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course_auth"
document_type => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project"
document_type => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project_auth"
document_type => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project_enroll_record" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project_enroll_record"
document_type => "training_project_enroll_record"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment"
document_type => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment_auth"
document_type => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam"
document_type => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam_auth"
document_type => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research"
document_type => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research_auth"
document_type => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "organization"
document_type => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "account"
document_type => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "lecturer"
document_type => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "offline_course"
document_type => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case"
document_type => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case_auth"
document_type => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "case_library"
document_type => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "case_library_auth"
document_type => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "live"
document_type => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "live_auth"
document_type => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "album"
document_type => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "biz_visible_range" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "biz_visible_range"
document_type => "biz_visible_range"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "album"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_album?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_album.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_album.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_live?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "training_project_enroll_record"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/quety_training_project_record_enroll.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_record_enroll.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
# biz_visible_range start -----------------------------------------------------------------------------------------------------
jdbc {
type => "biz_visible_range"
# 数据库
jdbc_connection_string => "jdbc:mysql://localhost:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./sql/query_training_project_visible_range.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_visible_range.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# biz_visible_range end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "training_project_enroll_record" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project_enroll_record"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./template/logstash.json"
}
}
if [type] == "biz_visible_range" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "biz_visible_range"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://rm-uf65tob4f2p3c0wrr.mysql.rds.aliyuncs.com:3306/cloud_course_mk?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rm-uf65tob4f2p3c0wrr.mysql.rds.aliyuncs.com:3306/cloud_course_mk?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://rm-uf65tob4f2p3c0wrr.mysql.rds.aliyuncs.com:3306/cloud_course_mk?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
# account start -------------------------------------------------------------------------------------------------------
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_system_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# account end -------------------------------------------------------------------------------------------------------
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_trainning_project_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_trainning_project_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_case_library_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_case_library_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf611rbw6yj4xhs92.mysql.rds.aliyuncs.com:3306/cloud_exam_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf611rbw6yj4xhs92.mysql.rds.aliyuncs.com:3306/cloud_exam_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_lecturer_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_live_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
# organization start -----------------------------------------------------------------------------------------------------
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_system_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# organization end -----------------------------------------------------------------------------------------------------
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_research_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_research_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_case_library_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_case_library_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_trainning_project_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://rr-uf69x98be4rgs740n.mysql.rds.aliyuncs.com:3306/cloud_trainning_project_mk?tinyInt1isBit=false"
jdbc_user => "mlkadmin"
jdbc_password => "mlkRDS3306"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course"
document_type => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course_auth"
document_type => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project"
document_type => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project_auth"
document_type => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment"
document_type => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment_auth"
document_type => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam"
document_type => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam_auth"
document_type => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research"
document_type => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research_auth"
document_type => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "organization"
document_type => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "account"
document_type => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "lecturer"
document_type => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "offline_course"
document_type => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case"
document_type => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case_auth"
document_type => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "case_library"
document_type => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "case_library_auth"
document_type => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "live"
document_type => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "live_auth"
document_type => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "album"
document_type => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "doc_recommend"
document_type => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "./logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "album"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_album?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_album.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_album.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://prod-course.mysql.rds.aliyuncs.com:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://prod-course.mysql.rds.aliyuncs.com:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://prod-course.mysql.rds.aliyuncs.com:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_live?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_enroll_record"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/quety_training_project_record_enroll.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_record_enroll.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
# biz_visible_range start -----------------------------------------------------------------------------------------------------
jdbc {
type => "biz_visible_range"
# 数据库
jdbc_connection_string => "jdbc:mysql://pc-uf64lg48f1k9u29dl.rwlb.rds.aliyuncs.com:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_visible_range.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_visible_range.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# biz_visible_range end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_enroll_record" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project_enroll_record"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "biz_visible_range" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "biz_visible_range"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "album"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_album?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_album.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_album.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_live?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
jdbc {
type => "training_project_enroll_record"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/quety_training_project_record_enroll.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_record_enroll.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
# biz_visible_range start -----------------------------------------------------------------------------------------------------
jdbc {
type => "biz_visible_range"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.48:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "wmyadmin"
jdbc_password => "wmyRDS3306"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_visible_range.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_visible_range.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/5 * * * *"
}
# biz_visible_range end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_enroll_record" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project_enroll_record"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "biz_visible_range" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "biz_visible_range"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
input {
stdin {
}
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 培训项目
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 作业
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 考试
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 调研
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 组织部门
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 用户
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 讲师
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 线下课程
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
# 案例库
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.0.230:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "fulan123"
jdbc_driver_library => "./mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "./query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "2000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "*/3 * * * *"
}
}
#filter {
# json {
# source => "message"
# remove_field => ["message"]
# }
#}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "course"
document_type => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "training_project"
document_type => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "assignment"
document_type => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "exam"
document_type => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "research"
document_type => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "organization"
document_type => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "account"
document_type => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "lecturer"
document_type => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "offline_course"
document_type => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch.elasticsearch:9200"
# index名
index => "student_case"
document_type => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "./logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "album"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_album?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_album.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_album.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_live?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "training_project_enroll_record"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/quety_training_project_record_enroll.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_record_enroll.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
# biz_visible_range start -----------------------------------------------------------------------------------------------------
jdbc {
type => "biz_visible_range"
# 数据库
jdbc_connection_string => "jdbc:mysql://172.16.254.64:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "wmy!123A"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_visible_range.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_visible_range.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# biz_visible_range end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_enroll_record" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "training_project_enroll_record"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "biz_visible_range" {
elasticsearch {
hosts => "elasticsearch7.elasticsearch:9200"
# index名
index => "biz_visible_range"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "album"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_album?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_album.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_album.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "album"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "album" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# caseLibrary start -----------------------------------------------------------------------------------------------------
jdbc {
type => "case_library"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "case_library_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_caseLibrary_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_caseLibrary_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# caseLibrary end -----------------------------------------------------------------------------------------------------
# studentCase start -----------------------------------------------------------------------------------------------------
jdbc {
type => "student_case"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "student_case_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_case_library?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_studentCase_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_studentCase_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# studentCase end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "student_case"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "student_case_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "student_case" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "case_library"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "case_library_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "case_library" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# course start -----------------------------------------------------------------------------------------------------
# 课程
# 课程
jdbc {
type => "course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# 课程
jdbc {
type => "course_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_course_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_course_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# course end -----------------------------------------------------------------------------------------------------
# offlineCourse start -----------------------------------------------------------------------------------------------------
jdbc {
type => "offline_course"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_course?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_offlineCourse.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_offlineCourse.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# offlineCourse end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
date {
match => ["time", "yyyy-MM-dd HH:mm:ss"]
}
}
output {
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "course_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "course" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "offline_course" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "offline_course"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# exam start -----------------------------------------------------------------------------------------------------
jdbc {
type => "exam"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "exam_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_exam?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_exam_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_exam_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# exam end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "exam"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "exam_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "exam" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# lecturer start -----------------------------------------------------------------------------------------------------
# 物理删除,暂时无法做到近实时同步
jdbc {
type => "lecturer"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_lecturer?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_lecturer.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_lecturer.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# lecturer end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "lecturer" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "lecturer"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# live start -----------------------------------------------------------------------------------------------------
jdbc {
type => "live"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_live?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_live.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => update_time
record_last_run => true
last_run_metadata_path => "./station_live.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# live end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "live"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "live_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "live" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# research start -----------------------------------------------------------------------------------------------------
jdbc {
type => "research"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "research_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_research?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_research_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_research_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# research end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "research"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "research_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "research" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
jdbc {
type => "account"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_account.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_account.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "organization"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_system?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_organization.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_organization.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "organization" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "organization"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "account" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "account"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
input {
stdin {
}
# assignment start -----------------------------------------------------------------------------------------------------
jdbc {
type => "assignment"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "assignment_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_assignment_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_assignment_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# assignment end -----------------------------------------------------------------------------------------------------
# training_project start -----------------------------------------------------------------------------------------------------
jdbc {
type => "training_project"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "training_project_auth"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_relationIds.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_relationIds.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
jdbc {
type => "training_project_enroll_record"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/quety_training_project_record_enroll.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_record_enroll.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# training_project end -----------------------------------------------------------------------------------------------------
# biz_visible_range start -----------------------------------------------------------------------------------------------------
jdbc {
type => "biz_visible_range"
# 数据库
jdbc_connection_string => "jdbc:mysql://192.168.1.9:3306/cloud_trainning_project?tinyInt1isBit=false"
jdbc_user => "root"
jdbc_password => "Yo^!3@LTvBJvtFHy"
jdbc_driver_library => "/opt/logstash-7.6.2/logstash-config/mysql-connector-java-5.1.25.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
codec => plain { charset => "UTF-8"}
lowercase_column_names => false
statement_filepath => "/opt/logstash-7.6.2/logstash-config/sql/query_training_project_visible_range.sql"
jdbc_paging_enabled => "true"
jdbc_page_size => "5000"
# 不使用字段值追踪,直接用该次更新的当前时间赋值到:sql_last_value,然后在sql中用创建时间和更新时间 >= :sql_last_value
use_column_value => false
tracking_column => index_time
record_last_run => true
last_run_metadata_path => "./station_training_project_visible_range.txt"
# 设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
}
# biz_visible_range end -----------------------------------------------------------------------------------------------------
}
filter {
ruby {
code => "event.timestamp.time.localtime"
}
}
output {
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "training_project"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "training_project_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "training_project_enroll_record" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "training_project_enroll_record"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "assignment"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment_auth" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "assignment_auth"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "assignment" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "doc_recommend"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{recommendId}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
if [type] == "biz_visible_range" {
elasticsearch {
hosts => "elasticsearch7.slb:9200"
# index名
index => "biz_visible_range"
# 需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
template => "/opt/logstash-7.6.2/logstash-config/template/logstash.json"
}
}
stdout {
codec => json_lines
}
}
SELECT concat(a.id, '') AS id,
a.`name`,
a.sex,
a.description,
a.full_name AS fullName,
a.full_name AS kw_fullName,
a.work_num AS workNum,
a.telephone,
a.mobile,
a.email,
a.wechat,
a.position AS duty,
a.position AS kw_duty,
a.enabled,
a.locked,
concat(a.org_id, '') AS orgId,
o.`name` AS orgName,
o.`name` AS kw_orgName,
o.left_index AS orgLeftIndex,
o.right_index AS orgRightIndex,
concat(a.company_id, '') AS companyId,
c.`name` AS companyName,
a.remark_first AS remark1,
a.remark_first AS kw_remark1,
a.remark_second AS remark2,
a.remark_second AS kw_remark2,
a.remark_third AS remark3,
a.remark_third AS kw_remark3,
a.index_time AS indexTime
FROM account a
LEFT JOIN `organization` o ON a.org_id = o.id
LEFT JOIN company c ON c.id = a.company_id
WHERE a.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
or o.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
or c.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT concat(a.id, '') AS id,
concat(a.id, '_album') AS recommendId,
'album' AS recommendType,
a.`name` AS `name`,
a.`name` AS kw_name,
a.tags AS keyword,
replace(a.tags, ",", " ") AS kw_keyword,
a.classify_id AS classifyId,
ac.`name` AS classifyName,
ac.`name` AS kw_classifyName,
a.del_flg AS deleted,
a.scope as visibleRange,
a.image AS image,
a.create_time AS createTime,
a.update_time AS updateTime,
a.description AS description,
concat(a.company_id, '') AS companyId,
concat(a.site_id, '') AS siteId,
concat(a.org_id, '') AS orgId,
(CASE a.shelves WHEN 2 THEN 0 WHEN 1 THEN 1 WHEN 0 THEN 2 END) * 1 AS `status`,
a.index_time AS indexTime
FROM album a
LEFT JOIN album_classify ac ON ac.id = a.classify_id
WHERE (ac.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') OR
a.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
\ No newline at end of file
SELECT concat(id, '') AS id,
concat(id, '_assignment') AS recommendId,
'assignment' AS recommendType,
serial_num AS `code`,
amendable,
`name`,
`name` AS kw_name,
keywords AS keyword,
replace(keywords, ",", " ") AS kw_keyword,
(CASE finish_time WHEN '2038-01-01 00:00:00' THEN NULL ELSE finish_time END) AS endTime,
(`status` - 1) * 1 AS `status`,
(CASE `status` WHEN 0 THEN 1 ELSE 0 END) AS deleted,
visible_range * 1 AS visibleRange,
concat(company_id, '') AS companyId,
concat(site_id, '') AS siteId,
concat(org_id, '') AS orgId,
update_time AS updateTime,
create_time AS createTime,
image,
index_time AS indexTime
FROM assignment
WHERE index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT id,
recommendId,
GROUP_CONCAT(relationIds SEPARATOR '') AS relationIds,
indexTime
FROM (
(
SELECT CONCAT(assignment_id, '') AS id,
CONCAT(assignment_id, '_assignment') AS recommendId,
GROUP_CONCAT(account_id SEPARATOR ' ') AS relationIds,
index_time AS indexTime
FROM assignment_student
WHERE deleted = 0
AND index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY assignment_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT id,
recommendId,
recommendType,
`name`,
kw_name,
`status`,
visibleRange,
deleted,
companyId,
siteId,
orgId,
updateTime,
createTime,
image,
indexTime,
concat(group_concat(classifyId SEPARATOR ' '), '') AS classifyId,
group_concat(classifyName SEPARATOR ' ') AS classifyName
FROM (
(
SELECT concat(t_case.id, '') AS id,
concat(t_case.id, '_case_library') AS recommendId,
'caseActivity' AS recommendType,
t_case.`name` AS `name`,
t_case.`name` AS kw_name,
(CASE t_case.state WHEN 1 THEN 0 WHEN 2 THEN 1 ELSE 2 END) AS `status`,
t_case.visible_range * 1 AS visibleRange,
(CASE t_case.state WHEN 0 THEN 1 ELSE 0 END) AS deleted,
concat(t_case.company_id, '') AS companyId,
concat(t_case.site_id, '') AS siteId,
concat(t_case.org_id, '') AS orgId,
t_case.update_time AS updateTime,
t_case.create_time AS createTime,
t_case.logo_url AS image,
t_case.index_time AS indexTime,
GROUP_CONCAT(t_cla.classify_id SEPARATOR ' ') AS classifyId,
GROUP_CONCAT(classify.`name` SEPARATOR ' ') AS classifyName
FROM case_library t_case
LEFT JOIN case_library_related_classify t_cla ON t_cla.case_lirary_id = t_case.id
AND t_cla.state = 1
LEFT JOIN classify ON classify.id = t_cla.classify_id
WHERE (t_case.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
OR t_cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') OR
classify.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
GROUP BY t_case.id
)
UNION
(
SELECT concat(t_case.id, '') AS id,
concat(t_case.id, '_case_library') AS recommendId,
'caseActivity' AS recommendType,
t_case.`name` AS `name`,
t_case.`name` AS kw_name,
(CASE t_case.state WHEN 1 THEN 0 WHEN 2 THEN 1 ELSE 2 END) AS `status`,
t_case.visible_range * 1 AS visibleRange,
(CASE t_case.state WHEN 0 THEN 1 ELSE 0 END) AS deleted,
concat(t_case.company_id, '') AS companyId,
concat(t_case.site_id, '') AS siteId,
concat(t_case.org_id, '') AS orgId,
t_case.update_time AS updateTime,
t_case.create_time AS createTime,
t_case.logo_url AS image,
t_case.index_time AS indexTime,
NULL AS classifyId,
NULL AS classifyName
FROM case_library t_case
LEFT JOIN case_library_related_classify t_cla ON t_cla.case_lirary_id = t_case.id
AND t_cla.state = 0
WHERE (t_case.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') OR
t_cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
GROUP BY t_case.id
)
) t
GROUP BY id
\ No newline at end of file
SELECT id,
recommendId,
group_concat(relationIds SEPARATOR ' ') as relationIds,
indexTime
FROM (
(
SELECT concat(case_library_id, '') AS id,
concat(case_library_id, '_case_library') AS recommendId,
GROUP_CONCAT(relation_id SEPARATOR ' ') AS relationIds,
index_time AS indexTime
FROM case_library_authorize
WHERE state = 1
AND index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY case_library_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT concat(c.id, '') AS id,
concat(c.id, '_course') AS recommendId,
'course' AS recommendType,
concat(c.classify_id, '') AS classifyId,
cla.`name` AS classifyName,
cla.`name` AS kw_classifyName,
c.`name`,
c.`name` AS kw_name,
c.`code`,
c.`code` AS kw_code,
c.author_unit AS authorUnit,
c.`description`,
c.tags AS keyword,
replace(c.tags, ",", " ") AS kw_keyword,
concat(c.source, '') AS source,
c.release_time AS releaseTime,
c.del_flg AS deleted,
(CASE c.shelves WHEN 2 THEN 0 WHEN 1 THEN 1 WHEN 0 THEN 2 END) * 1 AS `status`,
c.scope * 1 AS visibleRange,
concat(c.company_id, '') AS companyId,
concat(c.site_id, '') AS siteId,
concat(c.org_id, '') AS orgId,
c.update_time AS updateTime,
c.create_time AS createTime,
c.image,
c.index_time AS indexTime
FROM course c
LEFT JOIN classify cla ON cla.id = c.classify_id
WHERE (c.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') or
cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
\ No newline at end of file
SELECT id,
recommendId,
group_concat(relationIds SEPARATOR ' ') AS relationIds,
indexTime
FROM (
(
SELECT concat(ca.course_id, '') AS id,
concat(ca.course_id, '_course') AS recommendId,
group_concat(ca.relation_id SEPARATOR ' ') AS relationIds,
ca.index_time AS indexTime
FROM course_account ca
WHERE ca.deleted = 0
AND ca.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY ca.course_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT concat(id, '') AS id,
concat(id, '_exam') AS recommendId,
'exam' AS recommendType,
exam_no AS `code`,
`name`,
`name` AS kw_name,
keywords AS keyword,
replace(keywords, ",", " ") AS kw_keyword,
`description`,
start_time AS startTime,
end_time AS endTime,
(state - 1) * 1 AS `status`,
(CASE state WHEN 0 THEN 1 ELSE 0 END) AS deleted,
visible_range * 1 AS visibleRange,
concat(company_id, '') AS companyId,
concat(site_id, '') AS siteId,
concat(org_id, '') AS orgId,
update_time AS updateTime,
create_time AS createTime,
image,
index_time AS indexTime
FROM exam
WHERE index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT id,
recommendId,
group_concat(relationIds SEPARATOR ' ') AS relationIds,
indexTime
FROM (
(
SELECT concat(relation_id, '') AS id,
concat(relation_id, '_exam') AS recommendId,
group_concat(relation_id SEPARATOR ' ') AS relationIds,
index_time AS indexTime
FROM tr_exam_authorize
WHERE state = 1
AND index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY relation_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT concat(l.id, '') AS id,
l.NAME AS username,
l.account_id AS accountId,
l.lecturer_name AS lecturerName,
GROUP_CONCAT(lk.keywords SEPARATOR ' ') AS keyword,
GROUP_CONCAT(lec.roots SEPARATOR ' ') AS classifyId,
GROUP_CONCAT(lec.`name` SEPARATOR ' ') AS classifyName,
l.avatar,
l.avatar as image,
l.title AS title,
l.source_code as source,
(CASE l.STATUS WHEN 0 THEN 2 WHEN 1 THEN 1 END) * 1 AS status,
l.del_flag AS deleted,
concat(l.company_id, '') AS companyId,
concat(l.site_id, '') AS siteId,
l.update_time as updateTime,
l.create_time as createTime,
GROUP_CONCAT(lr.relation_id SEPARATOR ' ') AS relationIds
FROM lecturer l
LEFT JOIN lecturer_keywords lk ON lk.lecturer_id = l.id
LEFT JOIN tr_lecturer_classification lc ON lc.lecturer_id = l.id
left join (
select lc.classification_id idd,b.name,b.roots from tr_lecturer_classification lc
left join (select id,name,REPLACE(concat(substring_index(substring_index(parent_ids,',',3),',',-2),' ',id),',',' ') roots
from lecturer_classification
where `status`=1 and del_flag=0
) b on(b.id = lc.classification_id)
GROUP BY lc.classification_id
HAVING b.roots is not null
) lec on lc.classification_id = lec.idd
LEFT JOIN lecturer_relation lr ON lr.lecturer_id = l.id AND lr.del_flag = 0
GROUP BY l.id
ORDER BY l.lecturer_name
\ No newline at end of file
SELECT tlc.lecturer_id AS id,
concat(tlc.lecturer_id, '_lecturer') AS recommendId,
GROUP_CONCAT(tlc.classification_id) AS classifyId,
GROUP_CONCAT(lc.`name`) AS classifyName
FROM tr_lecturer_classification tlc
LEFT JOIN lecturer_classification lc ON lc.id = tlc.classification_id
WHERE lc.del_flag = 0
GROUP BY tlc.lecturer_id
\ No newline at end of file
SELECT concat(lk.lecturer_id, '') AS id,
l.NAME AS username,
l.account_id AS accountId,
l.lecturer_name AS lecturerName,
GROUP_CONCAT(lk.keywords SEPARATOR ' ') AS keyword
FROM lecturer l
LEFT JOIN lecturer_keywords lk ON lk.lecturer_id = l.id
GROUP BY l.id
ORDER BY l.lecturer_name
\ No newline at end of file
SELECT concat(l.id, '') AS id,
l.`name` AS username,
l.account_id AS accountId,
l.lecturer_name AS lecturerName,
GROUP_CONCAT(lk.keywords SEPARATOR ' ') AS keyword,
GROUP_CONCAT(lc.classification_id SEPARATOR ' ') AS classifyId,
GROUP_CONCAT(lec.`name` SEPARATOR ' ') AS classifyName,
l.avatar,
l.avatar AS image,
l.title AS title,
l.source_code AS source,
(CASE l.STATUS WHEN 0 THEN 2 WHEN 1 THEN 1 END) * 1 AS `status`,
l.del_flag AS deleted,
concat(l.company_id, '') AS companyId,
concat(l.site_id, '') AS siteId,
l.update_time AS updateTime,
l.create_time AS createTime,
GROUP_CONCAT(lr.relation_id SEPARATOR ' ') AS relationIds
FROM lecturer l
LEFT JOIN lecturer_keywords lk ON lk.lecturer_id = l.id
LEFT JOIN tr_lecturer_classification lc ON lc.lecturer_id = l.id
LEFT JOIN lecturer_classification lec ON lc.classification_id = lec.id
LEFT JOIN lecturer_relation lr ON lr.lecturer_id = l.id
AND lr.del_flag = 0
GROUP BY l.id
\ No newline at end of file
SELECT concat(a.id, '') AS id,
concat(a.id, '_live') AS recommendId,
'live' as recommendType,
a.title AS `name`,
a.title AS kw_name,
a.channel AS `code`,
a.channel AS kw_code,
a.keywords AS keyword,
replace(a.keywords, ",", " ") AS kw_keyword,
GROUP_CONCAT(au.account_id SEPARATOR ' ') AS relationIds,
a.anchor as anchorName,
a.anchor as kw_anchorName,
(case a.scope when 0 then 1 when 1 then 2 else 2 end) as visibleRange,
concat(a.company_id, '') AS companyId,
concat(a.site_id, '') AS siteId,
concat(a.org_id, '') AS orgId,
a.deleted,
a.logo_image as image,
a.shelves as `status`
FROM live_activity a
LEFT JOIN scope_authorization au ON au.live_id = a.id
GROUP BY a.id
\ No newline at end of file
SELECT concat(a.id, '') AS id,
concat(a.id, '_live') AS recommendId,
'live' as recommendType,
a.title AS name,
a.channel AS code,
a.keywords AS keyword,
GROUP_CONCAT(au.account_id SEPARATOR ' ') AS relationIds,
a.anchor as anchorName,
(case a.scope when 0 then 1 when 1 then 2 else 2 end) as visibleRange,
concat(a.company_id, '') AS companyId,
concat(a.site_id, '') AS siteId,
concat(a.org_id, '') AS orgId,
a.deleted,
a.logo_image as image,
a.shelves as `status`
FROM live_activity a
LEFT JOIN scope_authorization au ON au.live_id = a.id
GROUP BY a.id
\ No newline at end of file
SELECT concat(id, '') as id,
concat(id, '_offlineCourse') AS recommendId,
'offlineCourse' as recommendType,
`name`,
`code`,
author_unit AS authorUnit,
`description`,
tags as keyword,
deleted,
(case status
when 2 then 0
when 0 then 1
when 1 then 2
end) * 1 as `status`,
concat(company_id, '') AS companyId,
concat(site_id, '') AS siteId,
concat(org_id, '') AS orgId,
update_time as updateTime,
create_time as createTime,
image,
index_time as indexTime
FROM offline_course
WHERE index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT concat(id, '') AS id,
`name`,
parent_id AS parentId,
layer,
left_index AS leftIndex,
right_index AS rightIndex,
deleted,
concat(company_id, '') AS companyId,
index_time as indexTime
FROM `organization`
where index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT concat(id, '') AS id,
concat(id, '_research') AS recommendId,
'research' AS recommendType,
research_no AS `code`,
`name`,
`name` AS kw_name,
keywords AS keyword,
replace(keywords, ",", " ") AS kw_keyword,
start_time AS startTime,
end_time AS endTime,
state * 1 AS `status`,
deleted,
visible_range * 1 AS visibleRange,
concat(company_id, '') AS companyId,
concat(site_id, '') AS siteId,
concat(org_id, '') AS orgId,
update_time AS updateTime,
create_time AS createTime,
image,
index_time AS indexTime
FROM research
WHERE index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT id,
recommendId,
group_concat(relationIds SEPARATOR ' ') AS relationIds,
indexTime
FROM (
(
SELECT concat(research_id, '') AS id,
concat(research_id, '_research') AS recommendId,
group_concat(relation_id SEPARATOR ' ') AS relationIds,
index_time AS indexTime
FROM tr_research_authorize
WHERE deleted = 0
AND index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY research_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT id,
recommendId,
recommendType,
`name`,
kw_name,
`status`,
visibleRange,
deleted,
companyId,
siteId,
orgId,
updateTime,
createTime,
keyword,
replace(kw_keyword, ",", " ") as kw_keyword,
belongCaseActivity,
caseActivityName,
kw_caseActivityName,
author,
image,
indexTime,
concat(GROUP_CONCAT(classifyId SEPARATOR ' '), '') AS classifyId,
GROUP_CONCAT(classifyName SEPARATOR ' ') AS classifyName
FROM (
(
SELECT concat(t_case.id, '') AS id,
concat(t_case.id, '_student_case') AS recommendId,
'case' AS recommendType,
t_case.title AS `name`,
t_case.title AS kw_name,
(CASE t_case.state WHEN 1 THEN 0 WHEN 2 THEN 1 ELSE 2 END) AS `status`,
t_case.visible_range * 1 AS visibleRange,
(CASE t_case.state WHEN 0 THEN 1 ELSE 0 END) AS deleted,
concat(t_case.company_id, '') AS companyId,
concat(t_case.site_id, '') AS siteId,
concat(t_case.org_id, '') AS orgId,
t_case.update_time AS updateTime,
t_case.create_time AS createTime,
t_case.keywords AS keyword,
t_case.keywords AS kw_keyword,
t_case.case_library_id AS belongCaseActivity,
cl.`name` AS caseActivityName,
cl.`name` AS kw_caseActivityName,
t_case.account_id AS author,
t_case.logo_url AS image,
t_case.index_time AS indexTime,
GROUP_CONCAT(classify.id SEPARATOR ' ') AS classifyId,
GROUP_CONCAT(classify.`name` SEPARATOR ' ') AS classifyName
FROM student_case t_case
LEFT JOIN case_library cl ON cl.id = t_case.case_library_id
LEFT JOIN student_case_related_classify t_cla ON t_case.id = t_cla.student_case_id
AND t_cla.state = 1
LEFT JOIN classify ON classify.id = t_cla.classify_id
WHERE (t_case.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
OR cl.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') OR
t_cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
OR classify.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
GROUP BY id
)
UNION
(
SELECT concat(t_case.id, '') AS id,
concat(t_case.id, '_student_case') AS recommendId,
'case' AS recommendType,
t_case.title AS `name`,
t_case.title AS kw_name,
(CASE t_case.state WHEN 1 THEN 0 WHEN 2 THEN 1 ELSE 2 END) AS `status`,
t_case.visible_range * 1 AS visibleRange,
(CASE t_case.state WHEN 0 THEN 1 ELSE 0 END) AS deleted,
concat(t_case.company_id, '') AS companyId,
concat(t_case.site_id, '') AS siteId,
concat(t_case.org_id, '') AS orgId,
t_case.update_time AS updateTime,
t_case.create_time AS createTime,
t_case.keywords AS keyword,
t_case.keywords AS kw_keyword,
t_case.case_library_id AS belongCaseActivity,
cl.`name` AS caseActivityName,
cl.`name` AS kw_caseActivityName,
t_case.account_id AS author,
t_case.logo_url AS image,
t_case.index_time AS indexTime,
NULL AS classifyId,
NULL AS classifyName
FROM student_case t_case
LEFT JOIN case_library cl ON cl.id = t_case.case_library_id
LEFT JOIN student_case_related_classify t_cla ON t_case.id = t_cla.student_case_id
AND t_cla.state = 1
WHERE (t_case.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
OR cl.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') OR
t_cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
GROUP BY id
)
) t
GROUP BY id
\ No newline at end of file
SELECT id,
recommendId,
GROUP_CONCAT(classifyId SEPARATOR ' ') as classifyId,
GROUP_CONCAT(classifyName SEPARATOR ' ') AS classifyName,
GROUP_CONCAT(classifyName SEPARATOR ' ') AS kw_classifyName,
indexTime
FROM (
(
SELECT concat(t_cla.student_case_id, '') AS id,
concat(t_cla.student_case_id, '_student_case') AS recommendId,
GROUP_CONCAT(t_cla.classify_id SEPARATOR ' ') AS classifyId,
GROUP_CONCAT(c.`name` SEPARATOR ' ') AS classifyName,
GROUP_CONCAT(c.`name` SEPARATOR ' ') AS kw_classifyName,
t_cla.index_time AS indexTime
FROM student_case_related_classify t_cla
left join classify c on c.id = t_cla.classify_id
WHERE t_cla.state = 1
AND (t_cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') or
c.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
GROUP BY t_cla.student_case_id
)
UNION
(
SELECT concat(t_cla.student_case_id, '') AS id,
concat(t_cla.student_case_id, '_student_case') AS recommendId,
NULL AS classifyId,
NULL AS classifyName,
NULL AS kw_classifyName,
t_cla.index_time AS indexTime
FROM student_case_related_classify t_cla
WHERE t_cla.state = 0
AND t_cla.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY t_cla.student_case_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT id,
recommendId,
GROUP_CONCAT(relationIds SEPARATOR ' ') AS relationIds,
indexTime
FROM (
(
SELECT concat(relation_id, '') AS id,
concat(relation_id, '_student_case') AS recommendId,
GROUP_CONCAT(student_case_authorize.relation_id SEPARATOR ' ') AS relationIds,
index_time AS indexTime
FROM student_case_authorize
WHERE state = 1
AND index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY student_case_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT concat(tp.id, '') AS id,
concat(tp.id, '_training_project') AS recommendId,
'trainingProject' AS recommendType,
tp.`name`,
tp.`name` AS kw_name,
tp.tp_classification_id AS classifyId,
tc.`name` AS classifyName,
tc.`name` AS kw_classifyName,
tp.start_time AS startTime,
tp.end_time AS endTime,
tp.`status` * 1 AS `status`,
ifnull(tp.`enable_enroll` * 1, 0) AS enableEnroll,
tp.release_time AS releaseTime,
tp.deleted,
tp.key_words AS keyword,
replace(tp.key_words, ",", " ") AS kw_keyword,
tp.`description`,
(CASE tp.visible_range WHEN 1 THEN 1 WHEN 0 THEN 2 END) * 1 AS visibleRange,
concat(tp.company_id, '') AS companyId,
concat(tp.site_id, '') AS siteId,
concat(tp.org_id, '') AS orgId,
tp.update_time AS updateTime,
tp.create_time AS createTime,
tp.logo_img AS image,
tp.index_time AS indexTime
FROM training_project tp
LEFT JOIN tp_classification tc ON tc.id = tp.tp_classification_id
WHERE (tp.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00') or
tc.index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00'))
\ No newline at end of file
SELECT id,
recommendId,
group_concat(relationIds SEPARATOR ' ') AS relationIds,
indexTime
FROM (
(
SELECT concat(biz_id, '') AS id,
concat(biz_id, '_training_project') AS recommendId,
group_concat(tp_authorization_range.relation_id SEPARATOR ' ') AS relationIds,
index_time AS indexTime
FROM tp_authorization_range
WHERE deleted = 0
AND index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
GROUP BY biz_id
)
) t
GROUP BY id
\ No newline at end of file
SELECT concat(id, '_training_project_auth') AS id,
'training_project_auth' AS bizType,
CONCAT(biz_id, '') AS bizId,
type * 1 AS type,
CONCAT(relation_id, '') AS relationId,
`name` AS `name`,
CONCAT(site_id, '') AS siteId,
index_time AS indexTime,
deleted * 1 AS deleted
FROM tp_authorization_range
WHERE index_time >= CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
SELECT concat(id, '') AS id,
concat(training_project_id, '') AS trainingProjectId,
concat(site_id, '') AS siteId,
concat(enroll_id, '') AS enrollId,
concat(account_id, '') AS accountId,
start_time AS startTime,
end_time AS endTime,
join_time AS joinTime
FROM tp_student_enroll_passed
WHERE join_time > CONVERT_TZ(:sql_last_value, '+00:00', '+08:00')
\ No newline at end of file
{
"template": "*",
"settings": {
"index": {
"number_of_shards": 1,
"number_of_replicas": 0
}
},
"mappings": {
"dynamic_templates": [
{
"kw_keyword_setting": {
"match_mapping_type": "string",
"match": "kw_keyword",
"mapping": {
"type": "text",
"analyzer": "whitespace"
}
}
},
{
"keyword_setting": {
"match_mapping_type": "string",
"match": "kw_*",
"mapping": {
"type": "keyword"
}
}
},
{
"code_setting": {
"match_mapping_type": "long",
"match": "code",
"mapping": {
"type": "text"
}
}
},
{
"classifyId_setting": {
"match_mapping_type": "long",
"match": "classifyId",
"mapping": {
"type": "text"
}
}
},
{
"string_setting": {
"match_mapping_type": "string",
"mapping": {
"type": "text",
"analyzer": "ik_max_word",
"search_analyzer": "ik_smart"
}
}
}
]
}
}
\ No newline at end of file
package com.fulan.esearch;
import com.yizhi.esearch.application.constant.Constant;
import com.yizhi.esearch.application.pojo.biz.EsCourse;
import com.yizhi.esearch.application.repository.CourseRepository;
import com.yizhi.esearch.application.util.ResponseHelper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.SearchScrollHits;
import org.springframework.data.elasticsearch.core.document.Document;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
import org.springframework.data.elasticsearch.core.query.UpdateResponse;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.HashMap;
import java.util.Map;
/**
* ES7.x 测试类
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class CloudEsearch7ApplicationTests {
@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;
@Autowired
private ElasticsearchOperations elasticsearchOperations;
@Autowired
private CourseRepository courseRepository;
@Test
public void testSearch() {
QueryBuilder queryBuilder = QueryBuilders.matchQuery("name", "课程");
Query query = new NativeSearchQueryBuilder()
.withPageable(PageRequest.of(0, 1000))
.withQuery(queryBuilder)
.build();
SearchHits<EsCourse> searchHits = elasticsearchRestTemplate.search(query, EsCourse.class);
// 这个是总条数
searchHits.getTotalHits();
Page<EsCourse> page = elasticsearchRestTemplate.queryForPage(query, EsCourse.class, IndexCoordinates.of("course"));
System.out.println(ResponseHelper.ok(page));
}
/**
* 测试scroll
*/
@Test
public void testScroll() {
Query query = new NativeSearchQueryBuilder()
.build();
SearchScrollHits<EsCourse> searchScrollHits = elasticsearchRestTemplate.searchScrollStart(1000, query, EsCourse.class, IndexCoordinates.of("course"));
String scrollId = searchScrollHits.getScrollId();
searchScrollHits = elasticsearchRestTemplate.searchScrollContinue(scrollId, 1000, EsCourse.class, IndexCoordinates.of("course"));
searchScrollHits = elasticsearchRestTemplate.searchScrollContinue(scrollId, 1000, EsCourse.class, IndexCoordinates.of("course"));
searchScrollHits = elasticsearchRestTemplate.searchScrollContinue(scrollId, 1000, EsCourse.class, IndexCoordinates.of("course"));
}
@Test
public void testUpdateQuery() {
Map<String, Object> params = new HashMap<>();
params.put("name", "1231fdsafdsafds23");
UpdateQuery updateQuery = UpdateQuery.builder("1224671968932433920")
// .withParams(params)
.withDocument(Document.from(params))
.build();
UpdateResponse updateResponse = elasticsearchRestTemplate.update(updateQuery, IndexCoordinates.of(Constant.INDEX_COURSE));
System.out.println(updateResponse.getResult());
System.out.println(UpdateResponse.Result.UPDATED);
System.out.println(UpdateResponse.Result.UPDATED.equals(updateResponse.getResult()));
}
}
//package com.fulan.esearch;
//
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.JSONObject;
//import com.fulan.application.vo.BusinessVo4PortalParam;
//import com.fulan.application.vo.manage.RuleItemVo;
//import com.fulan.application.vo.manage.RuleVo;
//import Constant;
//import CourseController;
//import GlobalSearchController;
//import LecturerController;
//import BizKeywordController;
//import OrganizationController;
//import RecommendController;
//import com.fulan.application.context.ContextHolder;
//import com.fulan.application.context.RequestContext;
//import com.fulan.esearch.application.pojo.biz.BizKeyword;
//import com.fulan.esearch.application.pojo.biz.Course;
//import com.fulan.esearch.application.pojo.biz.TrainingProject;
//import CourseRepository;
//import LecturerRepository;
//import OfflineCourseRepository;
//import TrainingProjectRepository;
//import org.elasticsearch.client.support.AbstractClient;
//import org.elasticsearch.index.query.QueryBuilder;
//import org.elasticsearch.index.query.QueryBuilders;
//import org.junit.Test;
//import org.junit.runner.RunWith;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.boot.test.context.SpringBootTest;
//import org.springframework.data.domain.Page;
//import org.springframework.data.domain.PageRequest;
//import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
////import org.springframework.data.elasticsearch.core.ScrolledPage;
//import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
//import org.springframework.data.elasticsearch.core.query.SearchQuery;
//import org.springframework.test.context.junit4.SpringRunner;
//
//import javax.annotation.PostConstruct;
//import java.util.*;
//
//@RunWith(SpringRunner.class)
//@SpringBootTest
//public class CloudEsearchApplicationTests {
//
// @Autowired
// private ElasticsearchTemplate elasticsearchTemplate;
// @Autowired
// private OrganizationController organizationController;
// @Autowired
// private LecturerRepository lecturerRepository;
// @Autowired
// private CourseRepository courseRepository;
// @Autowired
// private TrainingProjectRepository trainingProjectRepository;
// @Autowired
// private LecturerController lecturerController;
// @Autowired
// private GlobalSearchController globalSearchController;
// @Autowired
// private OfflineCourseRepository offlineCourseRepository;
// @Autowired
// private AbstractClient client;
// @Autowired
// private RecommendController recommendController;
// @Autowired
// private BizKeywordController bizKeywordController;
// @Autowired
// private CourseController courseController;
//
// @PostConstruct
// public void post() {
// RequestContext context = new RequestContext();
// context.setCompanyId(1314L);
// context.setOrgId(1314L);
// context.setSiteId(1314L);
// ContextHolder.set(context);
// }
//
// @Test
// public void multiCondition() {
// QueryBuilder queryBuilder = QueryBuilders.boolQuery()
// .must(QueryBuilders.termQuery("companyId", 1314L))
// .must(QueryBuilders.termQuery("siteId", 1314L));
//
// SearchQuery searchQuery = new NativeSearchQueryBuilder()
// .withIndices("training_project")
// .withTypes("training_project")
// .withQuery(queryBuilder)
// .build();
//
// List<TrainingProject> trainingProjects = elasticsearchTemplate.queryForList(searchQuery, TrainingProject.class);
// System.out.println(trainingProjects);
// }
//
// @Test
// public void orgIdListConditionForPage() {
// QueryBuilder queryBuilder = QueryBuilders.boolQuery();
//// .must(QueryBuilders.termQuery("companyId", 73L))
//// .must(QueryBuilders.termQuery("siteId", 189L))
//// .must(QueryBuilders.termsQuery("orgId", new Long[]{1314L, 641502377243248L}))
// // 查不出来
//// .must(QueryBuilders.termQuery("name", "握柜"))
// // 查不出来
//// .must(QueryBuilders.fuzzyQuery("name", "握柜"));
//
//
// SearchQuery searchQuery = new NativeSearchQueryBuilder()
// .withQuery(queryBuilder)
// // page从0开始
// .withPageable(new PageRequest(0, 10))
// .build();
//
// Page<Course> coruses = elasticsearchTemplate.queryForPage(searchQuery, Course.class);
// coruses.getContent().stream().forEach(element -> System.out.println(element));
// System.out.println(coruses);
// }
//
// @Test
// public void scrollForAll() {
//// List<Course> courses = new ArrayList<>();
////
//// QueryBuilder queryBuilder = QueryBuilders.boolQuery();
////// .must(QueryBuilders.termsQuery("orgId", new Long[]{1314L, 641502377243248L}));
////
//// SearchQuery searchQuery = new NativeSearchQueryBuilder()
//// .withQuery(queryBuilder)
//// .build();
////
//// Page<Course> scroll = elasticsearchTemplate.startScroll(1000, searchQuery, Course.class);
//// String scrollId = ((ScrolledPage) scroll).getScrollId();
//// while (scroll.hasContent()) {
//// courses.addAll(scroll.getContent());
//// scrollId = ((ScrolledPage) scroll).getScrollId();
//// scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, Course.class);
//// }
////
//// elasticsearchTemplate.clearScroll(scrollId);
////
//// courses.stream().forEach(ele -> System.out.println(ele.getOrgId()));
// }
//
// @Test
// public void AndOrCondition() {
// QueryBuilder queryBuildera = QueryBuilders.boolQuery()
// .must(QueryBuilders.termQuery("a", "a"));
//
//
// QueryBuilder queryBuilderb = QueryBuilders.boolQuery()
// .should(QueryBuilders.termQuery("b", "b"))
// .should(QueryBuilders.termQuery("c", "c"));
//
// QueryBuilder queryBuilder = QueryBuilders.boolQuery()
// .must(queryBuildera).must(queryBuilderb);
// System.out.println(queryBuilder.toString());
// }
//
// @Test
// public void testOrganization() {
//
//// List<Organization> organizations = new ArrayList<>();
//// Page<Organization> scroll = elasticsearchTemplate.startScroll(20000, new NativeSearchQueryBuilder().build(), Organization.class);
//// String scrollId = ((ScrolledPage) scroll).getScrollId();
//// while (scroll.hasContent()) {
//// organizations.addAll(scroll.getContent());
//// scrollId = ((ScrolledPage) scroll).getScrollId();
//// scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, Organization.class);
//// }
//// organizations.forEach(item -> organizationController.getParentNames(Long.valueOf(item.getId())));
// }
//
// /**
// * 测试字段类型影响
// */
// @Test
// public void testType() {
//// QueryBuilder queryBuilder = QueryBuilders.matchQuery("name", "风险的分类");
//// SearchQuery searchQuery = new NativeSearchQueryBuilder()
//// .withQuery(queryBuilder)
//// .build();
//// System.out.println(elasticsearchTemplate.queryForList(searchQuery, Course.class));
//
// QueryBuilder queryBuilder1 = QueryBuilders.matchQuery("companyId", 1098419202962055168L);
// SearchQuery searchQuery1 = new NativeSearchQueryBuilder()
// .withQuery(queryBuilder1)
// .build();
// System.out.println(elasticsearchTemplate.queryForList(searchQuery1, Course.class));
// }
//
// @Test
// public void testLecturer() {
// RequestContext context = new RequestContext();
// context.setSiteId(1016273921840132096L);
// context.setCompanyId(1016273921756246016L);
// ContextHolder.set(context);
//// lecturerController.list("你好", 1, 30);
// }
//
// @Test
// public void testGlobal() {
// RequestContext context = new RequestContext();
// context.setSiteId(1045129585634881536L);
// context.setCompanyId(1045129585563578368L);
// context.setRelationIds(Arrays.asList(1314L));
// ContextHolder.set(context);
//// globalSearchController.globalSearch("家庭财产保险及车辆保险");
// courseController.name("家庭财产保险及车辆保险", 1, 10);
// }
//
// @Test
// public void test() {
// JSONObject jsonObject = JSON.parseObject("{\"pageNo\":1,\"pageSize\":1,\"requestContext\":{\"accountFullName\":\"张龙滢\",\"accountId\":1150590360361861120,\"accountName\":\"zhangly\",\"admin\":false,\"authCode\":\"\",\"companyCode\":\"yzkj\",\"companyId\":1045129585563578368,\"companyName\":\"运营测试站点\",\"headPortrait\":\"http://wework.qpic.cn/bizmail/caQcYIibvN0Gu5mT5165RnwyrLcBEU5xWTl3icTiaAWIiaodmvnOJvacgQ/0\",\"orgId\":1193775707612479488,\"orgIds\":[],\"orgName\":\"\",\"relationIds\":[1150590360361861120,1193775707612479488,1045129585613910016],\"requestId\":\"ffdb63a8-638f-4c5c-bfac-1c756e1db743\",\"siteCode\":\"cszd\",\"siteId\":1045129585634881536,\"siteMember\":0,\"siteName\":\"运营测试站点\"},\"strategyIds\":[1241357637183594496,1241357637183594496,1241357470845865984],\"strategyRuleVoMap\":{1241357637183594496:[{\"andOr\":\"and\",\"list\":[{\"andOr\":\"and\",\"business\":\"course\",\"businessClassName\":\"course\",\"name\":\"蒋亚\",\"property\":\"keyword\",\"relation\":\"equals\",\"sort\":1,\"value\":\"1050216603733696512\"}],\"name\":\"内容规则\",\"sort\":1,\"template\":false,\"type\":1},{\"andOr\":\"and\",\"list\":[{\"andOr\":\"and\",\"business\":\"account\",\"businessClassName\":\"com.fulan.esearch.application.pojo.system.Account\",\"name\":\"张龙滢\",\"property\":\"fullName\",\"relation\":\"equals\",\"sort\":1,\"value\":\"1150590360361861120\"}],\"name\":\"用户规则\",\"sort\":2,\"template\":false,\"type\":2}],1241357470845865984:[{\"andOr\":\"and\",\"list\":[{\"andOr\":\"and\",\"business\":\"exam\",\"businessClassName\":\"exam\",\"name\":\"test\",\"property\":\"name\",\"relation\":\"equals\",\"sort\":1,\"value\":\"1239442620636475392\"}],\"name\":\"内容规则\",\"sort\":1,\"template\":false,\"type\":1},{\"andOr\":\"and\",\"list\":[{\"andOr\":\"and\",\"business\":\"account\",\"businessClassName\":\"com.fulan.esearch.application.pojo.system.Account\",\"name\":\"盛承龙\",\"property\":\"fullName\",\"relation\":\"equals\",\"sort\":1,\"value\":\"1241241099499073536\"}],\"name\":\"用户规则\",\"sort\":2,\"template\":false,\"type\":2}]}}");
// BusinessVo4PortalParam param = jsonObject.toJavaObject(BusinessVo4PortalParam.class);
// System.out.println(recommendController.recommend4Portal(param));
//
//// param.getRequestContext();
//// Map<String, String> map = new HashMap<>();
//// map.put("pageNo", "1");
//// map.put("pageSize", "20");
//// map.put("context", JSON.toJSONString(param.getRequestContext()));
//// map.put("taskType", "course");
////
//// bizKeywordController.keywordPage(map);
//
// }
//
// @Test
// public void testBizKeyword() {
// SearchQuery searchQuery = new NativeSearchQueryBuilder()
// .withQuery(QueryBuilders.matchQuery("kwValue", "健身裤"))
// .withIndices("biz_keyword")
// .build();
// List<BizKeyword> bizKeywords = elasticsearchTemplate.queryForList(searchQuery, BizKeyword.class);
// System.out.println(bizKeywords);
//
// searchQuery = new NativeSearchQueryBuilder()
// .withQuery(QueryBuilders.termQuery("kwValue", "健身裤"))
// .withIndices("biz_keyword")
// .build();
// bizKeywords = elasticsearchTemplate.queryForList(searchQuery, BizKeyword.class);
// System.out.println(bizKeywords);
//
// searchQuery = new NativeSearchQueryBuilder()
// .withQuery(QueryBuilders.matchQuery("kwValue", "健身裤的好处"))
// .withIndices("biz_keyword")
// .build();
// bizKeywords = elasticsearchTemplate.queryForList(searchQuery, BizKeyword.class);
// System.out.println(bizKeywords);
//
// searchQuery = new NativeSearchQueryBuilder()
// .withQuery(QueryBuilders.termQuery("kwValue", "健身裤的好处"))
// .withIndices("biz_keyword")
// .build();
// bizKeywords = elasticsearchTemplate.queryForList(searchQuery, BizKeyword.class);
// System.out.println(bizKeywords);
//
// searchQuery = new NativeSearchQueryBuilder()
// .withQuery(QueryBuilders.matchQuery("value", "健身裤的好处"))
// .withIndices("biz_keyword")
// .build();
// bizKeywords = elasticsearchTemplate.queryForList(searchQuery, BizKeyword.class);
// System.out.println(bizKeywords);
//
// searchQuery = new NativeSearchQueryBuilder()
// .withQuery(QueryBuilders.termQuery("value", "健身裤的好处"))
// .withIndices("biz_keyword")
// .build();
// bizKeywords = elasticsearchTemplate.queryForList(searchQuery, BizKeyword.class);
// System.out.println(bizKeywords);
// }
//
// @Test
// public void testVisible() {
// List<String> ids = new ArrayList<>();
// ids.add("1029927829222260736");
// ids.add("1038024579673030656");
// ids.add("1052457565373419520");
//
// String s = "1068470062963851264";
//
// QueryBuilder qb = QueryBuilders.matchQuery("id", s);
//
// SearchQuery searchQuery = new NativeSearchQueryBuilder()
// .withIndices(Constant.INDEX_COURSE)
// .withQuery(qb)
// .build();
//
// List<Course> courses = elasticsearchTemplate.queryForList(searchQuery, Course.class);
//
// System.out.println(courses);
// }
//
// @Test
// public void testVisibleRange() {
// RequestContext context = new RequestContext();
// context.setSiteId(1068470062963851264L);
// context.setCompanyId(1068470062963851264L);
// context.setRelationIds(Arrays.asList(1068470062963851264L));
// ContextHolder.set(context);
// courseController.name("定期寿险", 1, 10);
// }
//}
//
//package com.fulan.esearch;
//
//import Course;
//import TrainingProject;
//import io.searchbox.client.JestClient;
//import io.searchbox.core.Index;
//import io.searchbox.core.Search;
//import org.elasticsearch.index.query.QueryBuilder;
//import org.elasticsearch.index.query.QueryBuilders;
//import org.junit.Test;
//import org.junit.runner.RunWith;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.boot.test.context.SpringBootTest;
//import org.springframework.data.domain.Page;
//import org.springframework.data.domain.PageRequest;
//import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
//import org.springframework.data.elasticsearch.core.ScrolledPage;
//import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
//import org.springframework.data.elasticsearch.core.query.SearchQuery;
//import org.springframework.test.context.junit4.SpringRunner;
//
//import java.io.IOException;
//import java.util.ArrayList;
//import java.util.List;
//
//@RunWith(SpringRunner.class)
//@SpringBootTest
//public class CloudEsearchJestTests {
//
// @Autowired
// private JestClient jestClient;
//
// @Test
// public void singleTest() throws IOException {
// QueryBuilder queryBuilder = QueryBuilders.boolQuery()
// .must(QueryBuilders.termQuery("companyId", 1314L))
// .must(QueryBuilders.termQuery("siteId", 1314L));
//
// SearchQuery searchQuery = new NativeSearchQueryBuilder()
// .withQuery(queryBuilder)
// .build();
//
//
// Search search = new Search.Builder(searchQuery.toString()).build();
//
// Object o = jestClient.execute(search);
// System.out.println(o.toString());
// }
//
//}
//
package com.fulan.esearch;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import org.junit.Test;
/**
* @ClassName SingleTest
* @Description TODO
* @Author chengchenglong
* @DATE 2019-04-16 18:23
* @Version 1.0
*/
public class SingleTest {
@Test
public void test() throws NoSuchFieldException, IllegalAccessException {
String time = "2020-03-04T11:07:08.000Z";
System.out.println(DateUtil.parse(time, "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"));
}
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.yizhi</groupId>
<artifactId>wmy-parent</artifactId>
<version>1.0-SNAPSHOT</version>
<!-- 设置为空,永远从远程拉取 -->
<relativePath/>
</parent>
<artifactId>cloud-esearch</artifactId>
<packaging>pom</packaging>
<description>Elasticsearch Application</description>
<modules>
<module>cloud-esearch-api</module>
<module>cloud-esearch-service</module>
</modules>
<repositories>
<repository>
<id>wmy4.0</id>
<url>http://mvn.km365.pw/nexus/content/groups/wmy4.0-group/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
</project>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment