diff --git a/czsj-framework/src/main/java/com/czsj/framework/config/MyBatisConfig.java b/czsj-framework/src/main/java/com/czsj/framework/config/MyBatisConfig.java index 0c17794..7ed07f7 100644 --- a/czsj-framework/src/main/java/com/czsj/framework/config/MyBatisConfig.java +++ b/czsj-framework/src/main/java/com/czsj/framework/config/MyBatisConfig.java @@ -32,101 +32,101 @@ import com.czsj.common.utils.StringUtils; @Configuration public class MyBatisConfig { - @Autowired - private Environment env; - - static final String DEFAULT_RESOURCE_PATTERN = "**/*.class"; - - public static String setTypeAliasesPackage(String typeAliasesPackage) - { - ResourcePatternResolver resolver = (ResourcePatternResolver) new PathMatchingResourcePatternResolver(); - MetadataReaderFactory metadataReaderFactory = new CachingMetadataReaderFactory(resolver); - List allResult = new ArrayList(); - try - { - for (String aliasesPackage : typeAliasesPackage.split(",")) - { - List result = new ArrayList(); - aliasesPackage = ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX - + ClassUtils.convertClassNameToResourcePath(aliasesPackage.trim()) + "/" + DEFAULT_RESOURCE_PATTERN; - Resource[] resources = resolver.getResources(aliasesPackage); - if (resources != null && resources.length > 0) - { - MetadataReader metadataReader = null; - for (Resource resource : resources) - { - if (resource.isReadable()) - { - metadataReader = metadataReaderFactory.getMetadataReader(resource); - try - { - result.add(Class.forName(metadataReader.getClassMetadata().getClassName()).getPackage().getName()); - } - catch (ClassNotFoundException e) - { - e.printStackTrace(); - } - } - } - } - if (result.size() > 0) - { - HashSet hashResult = new HashSet(result); - allResult.addAll(hashResult); - } - } - if (allResult.size() > 0) - { - typeAliasesPackage = String.join(",", (String[]) allResult.toArray(new String[0])); - } - else - { - throw new RuntimeException("mybatis typeAliasesPackage 路径扫描错误,参数typeAliasesPackage:" + typeAliasesPackage + "未找到任何包"); - } - } - catch (IOException e) - { - e.printStackTrace(); - } - return typeAliasesPackage; - } - - public Resource[] resolveMapperLocations(String[] mapperLocations) - { - ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); - List resources = new ArrayList(); - if (mapperLocations != null) - { - for (String mapperLocation : mapperLocations) - { - try - { - Resource[] mappers = resourceResolver.getResources(mapperLocation); - resources.addAll(Arrays.asList(mappers)); - } - catch (IOException e) - { - // ignore - } - } - } - return resources.toArray(new Resource[resources.size()]); - } - - @Bean - public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception - { - String typeAliasesPackage = env.getProperty("mybatis.typeAliasesPackage"); - String mapperLocations = env.getProperty("mybatis.mapperLocations"); - String configLocation = env.getProperty("mybatis.configLocation"); - typeAliasesPackage = setTypeAliasesPackage(typeAliasesPackage); - VFS.addImplClass(SpringBootVFS.class); - - final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean(); - sessionFactory.setDataSource(dataSource); - sessionFactory.setTypeAliasesPackage(typeAliasesPackage); - sessionFactory.setMapperLocations(resolveMapperLocations(StringUtils.split(mapperLocations, ","))); - sessionFactory.setConfigLocation(new DefaultResourceLoader().getResource(configLocation)); - return sessionFactory.getObject(); - } +// @Autowired +// private Environment env; +// +// static final String DEFAULT_RESOURCE_PATTERN = "**/*.class"; +// +// public static String setTypeAliasesPackage(String typeAliasesPackage) +// { +// ResourcePatternResolver resolver = (ResourcePatternResolver) new PathMatchingResourcePatternResolver(); +// MetadataReaderFactory metadataReaderFactory = new CachingMetadataReaderFactory(resolver); +// List allResult = new ArrayList(); +// try +// { +// for (String aliasesPackage : typeAliasesPackage.split(",")) +// { +// List result = new ArrayList(); +// aliasesPackage = ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX +// + ClassUtils.convertClassNameToResourcePath(aliasesPackage.trim()) + "/" + DEFAULT_RESOURCE_PATTERN; +// Resource[] resources = resolver.getResources(aliasesPackage); +// if (resources != null && resources.length > 0) +// { +// MetadataReader metadataReader = null; +// for (Resource resource : resources) +// { +// if (resource.isReadable()) +// { +// metadataReader = metadataReaderFactory.getMetadataReader(resource); +// try +// { +// result.add(Class.forName(metadataReader.getClassMetadata().getClassName()).getPackage().getName()); +// } +// catch (ClassNotFoundException e) +// { +// e.printStackTrace(); +// } +// } +// } +// } +// if (result.size() > 0) +// { +// HashSet hashResult = new HashSet(result); +// allResult.addAll(hashResult); +// } +// } +// if (allResult.size() > 0) +// { +// typeAliasesPackage = String.join(",", (String[]) allResult.toArray(new String[0])); +// } +// else +// { +// throw new RuntimeException("mybatis typeAliasesPackage 路径扫描错误,参数typeAliasesPackage:" + typeAliasesPackage + "未找到任何包"); +// } +// } +// catch (IOException e) +// { +// e.printStackTrace(); +// } +// return typeAliasesPackage; +// } +// +// public Resource[] resolveMapperLocations(String[] mapperLocations) +// { +// ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); +// List resources = new ArrayList(); +// if (mapperLocations != null) +// { +// for (String mapperLocation : mapperLocations) +// { +// try +// { +// Resource[] mappers = resourceResolver.getResources(mapperLocation); +// resources.addAll(Arrays.asList(mappers)); +// } +// catch (IOException e) +// { +// // ignore +// } +// } +// } +// return resources.toArray(new Resource[resources.size()]); +// } +// +// @Bean +// public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception +// { +// String typeAliasesPackage = env.getProperty("mybatis.typeAliasesPackage"); +// String mapperLocations = env.getProperty("mybatis.mapperLocations"); +// String configLocation = env.getProperty("mybatis.configLocation"); +// typeAliasesPackage = setTypeAliasesPackage(typeAliasesPackage); +// VFS.addImplClass(SpringBootVFS.class); +// +// final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean(); +// sessionFactory.setDataSource(dataSource); +// sessionFactory.setTypeAliasesPackage(typeAliasesPackage); +// sessionFactory.setMapperLocations(resolveMapperLocations(StringUtils.split(mapperLocations, ","))); +// sessionFactory.setConfigLocation(new DefaultResourceLoader().getResource(configLocation)); +// return sessionFactory.getObject(); +// } } \ No newline at end of file diff --git a/czsj-system/pom.xml b/czsj-system/pom.xml index 881fe3a..903922c 100644 --- a/czsj-system/pom.xml +++ b/czsj-system/pom.xml @@ -23,6 +23,754 @@ czsj-common + + org.projectlombok + lombok + + + + + + + + + + + + + + + com.microsoft.sqlserver + sqljdbc4 + 4.0 + system + ${basedir}/src/main/lib/sqljdbc4-4.0.jar + + + + ch.ethz.ganymed + ganymed-ssh2 + 262 + + + + com.czsj + czsj-core + 3.8.8 + compile + + + + + com.baomidou + mybatis-plus-boot-starter + ${mybatisplus.version} + + + com.baomidou + mybatis-plus-generator + + + slf4j-api + org.slf4j + + + + + + com.baomidou + mybatis-plus + ${mybatisplus.version} + + + + javax.mail + mail + 1.4.7 + + + + org.apache.commons + commons-lang3 + 3.12.0 + + + io.swagger + swagger-annotations + 1.6.2 + compile + + + com.baomidou + mybatis-plus-extension + 3.3.1 + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + slf4j-log4j12 + org.slf4j + + + com.sun.jersey + jersey-json + + + jsr305 + com.google.code.findbugs + + + guava + com.google.guava + + + jettison + org.codehaus.jettison + + + jackson-core-asl + org.codehaus.jackson + + + jackson-mapper-asl + org.codehaus.jackson + + + slf4j-api + org.slf4j + + + commons-cli + commons-cli + + + commons-logging + commons-logging + + + commons-collections + commons-collections + + + commons-lang + commons-lang + + + curator-framework + org.apache.curator + + + log4j + log4j + + + netty + io.netty + + + servlet-api + javax.servlet + + + jsp-api + javax.servlet.jsp + + + jetty-util + org.mortbay.jetty + + + jetty + org.mortbay.jetty + + + + + + org.apache.hive + hive-jdbc + ${hive.jdbc.version} + + + jsr305 + com.google.code.findbugs + + + guava + com.google.guava + + + jettison + org.codehaus.jettison + + + commons-cli + commons-cli + + + curator-client + org.apache.curator + + + commons-compress + org.apache.commons + + + hadoop-common + org.apache.hadoop + + + slf4j-api + org.slf4j + + + hadoop-hdfs + org.apache.hadoop + + + snappy + org.iq80.snappy + + + antlr-runtime + org.antlr + + + hbase-client + org.apache.hbase + + + libthrift + org.apache.thrift + + + twill-common + org.apache.twill + + + twill-core + org.apache.twill + + + twill-discovery-api + org.apache.twill + + + twill-discovery-core + org.apache.twill + + + twill-zookeeper + org.apache.twill + + + avro + org.apache.avro + + + curator-recipes + org.apache.curator + + + hbase-common + org.apache.hbase + + + hbase-hadoop-compat + org.apache.hbase + + + hbase-hadoop2-compat + org.apache.hbase + + + hbase-server + org.apache.hbase + + + curator-framework + org.apache.curator + + + guice-servlet + com.google.inject.extensions + + + hadoop-client + org.apache.hadoop + + + hadoop-yarn-api + org.apache.hadoop + + + hadoop-yarn-common + org.apache.hadoop + + + jackson-core-asl + org.codehaus.jackson + + + jackson-mapper-asl + org.codehaus.jackson + + + jackson-jaxrs + org.codehaus.jackson + + + jackson-xc + org.codehaus.jackson + + + jersey-client + com.sun.jersey + + + jamon-runtime + org.jamon + + + servlet-api + javax.servlet + + + commons-logging + commons-logging + + + hadoop-annotations + org.apache.hadoop + + + commons-collections + commons-collections + + + jersey-guice + com.sun.jersey.contribs + + + log4j-slf4j-impl + org.apache.logging.log4j + + + hive-shims-common + org.apache.hive.shims + + + javax.servlet + org.eclipse.jetty.orbit + + + jsp-api + javax.servlet.jsp + + + jasper-compiler + tomcat + + + jetty-all + org.eclipse.jetty.aggregate + + + jetty + org.mortbay.jetty + + + jetty-util + org.mortbay.jetty + + + + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + + + guava + com.google.guava + + + commons-cli + commons-cli + + + jackson-core-asl + org.codehaus.jackson + + + jackson-mapper-asl + org.codehaus.jackson + + + servlet-api + javax.servlet + + + jetty + org.mortbay.jetty + + + jetty-util + org.mortbay.jetty + + + + + + org.apache.hbase + hbase-client + ${hbase.version} + + + guava + com.google.guava + + + commons-logging + commons-logging + + + hadoop-auth + org.apache.hadoop + + + hadoop-common + org.apache.hadoop + + + hadoop-mapreduce-client-core + org.apache.hadoop + + + hbase-annotations + org.apache.hbase + + + hbase-protocol + org.apache.hbase + + + jackson-mapper-asl + org.codehaus.jackson + + + slf4j-api + org.slf4j + + + jetty-util + org.mortbay.jetty + + + + + + org.apache.phoenix + phoenix-core + ${phoenix.version} + + + slf4j-log4j12 + org.slf4j + + + guava + com.google.guava + + + commons-cli + commons-cli + + + hadoop-common + org.apache.hadoop + + + avro + org.apache.avro + + + guice + com.google.inject + + + hadoop-yarn-api + org.apache.hadoop + + + hadoop-auth + org.apache.hadoop + + + jersey-core + com.sun.jersey + + + guice-servlet + com.google.inject.extensions + + + jersey-server + com.sun.jersey + + + jersey-json + com.sun.jersey + + + slf4j-api + org.slf4j + + + netty + io.netty + + + hbase-client + org.apache.hbase + + + commons-io + commons-io + + + hadoop-mapreduce-client-core + org.apache.hadoop + + + zookeeper + org.apache.zookeeper + + + commons-math3 + org.apache.commons + + + hadoop-annotations + org.apache.hadoop + + + hadoop-hdfs + org.apache.hadoop + + + hadoop-yarn-client + org.apache.hadoop + + + hadoop-yarn-common + org.apache.hadoop + + + hadoop-yarn-server-common + org.apache.hadoop + + + javax.ws.rs-api + javax.ws.rs + + + htrace-core + org.apache.htrace + + + jline + jline + + + fastutil + it.unimi.dsi + + + commons-lang + commons-lang + + + jsr305 + com.google.code.findbugs + + + hbase-common + org.apache.hbase + + + javax.servlet.jsp-api + javax.servlet.jsp + + + hbase-server + org.apache.hbase + + + javax.servlet-api + javax.servlet + + + jetty-io + org.eclipse.jetty + + + jetty-http + org.eclipse.jetty + + + jetty-security + org.eclipse.jetty + + + jetty-server + org.eclipse.jetty + + + jetty-servlet + org.eclipse.jetty + + + jetty-webapp + org.eclipse.jetty + + + + + + org.mongodb + mongo-java-driver + ${mongo-java-driver.version} + + + + ru.yandex.clickhouse + clickhouse-jdbc + 0.2.4 + + + guava + com.google.guava + + + slf4j-api + org.slf4j + + + + + + io.jsonwebtoken + jjwt + ${jjwt.version} + + + + com.google.guava + guava + 29.0-jre + + + org.springframework + spring-webmvc + + + io.springfox + springfox-swagger2 + 3.0.0 + compile + + + net.sourceforge.jtds + jtds + 1.3.1 + + + com.alibaba + druid + 1.2.8 + compile + + + + + + + + + + + + + + + com.alibaba.nacos + nacos-api + 2.0.4 + + + + + + + + + org.quartz-scheduler + quartz + + + org.mapstruct + mapstruct + 1.3.1.Final + + + cn.hutool + hutool-all + ${hutool.version} + compile + + + org.springframework.cloud + spring-cloud-openfeign-core + 3.0.3 + + + org.springframework.security.oauth + spring-security-oauth2 + 2.3.4.RELEASE + + + org.springframework.amqp + spring-rabbit + 2.2.12.RELEASE + + + + + + src/main/java + + **/*.properties + **/*.xml + + true + + + src/main/resources + + **/*.properties + **/*.xml + **/templates/** + + true + + + + \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/config/MybatisPlusConfig.java b/czsj-system/src/main/java/com/czsj/bigdata/config/MybatisPlusConfig.java new file mode 100644 index 0000000..f519322 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/config/MybatisPlusConfig.java @@ -0,0 +1,45 @@ +package com.czsj.bigdata.config; + +import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector; +import com.baomidou.mybatisplus.core.injector.ISqlInjector; +import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +/** + * + * @Author: czsj + * @Date: 2022/9/16 11:14 + * @Description: + **/ +@EnableTransactionManagement +@Configuration +@MapperScan("com.czsj.bigdata.mapper") +public class MybatisPlusConfig { + + /** + * 分页插件 + */ + @Bean + public PaginationInterceptor paginationInterceptor() { + + PaginationInterceptor paginationInterceptor = new PaginationInterceptor(); + return paginationInterceptor.setOverflow(true); + } + + /** + * MyBatisPlus逻辑删除 ,需要在 yml 中配置开启 + * 3.0.7.1版本的LogicSqlInjector里面什么都没做只是 extends DefaultSqlInjector + * 以后版本直接去的了LogicSqlInjector + * + * @return + */ + @Bean + public ISqlInjector sqlInjector() { + return new DefaultSqlInjector(); + } + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/conf/ExcecutorConfig.java b/czsj-system/src/main/java/com/czsj/bigdata/core/conf/ExcecutorConfig.java new file mode 100644 index 0000000..d94879f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/conf/ExcecutorConfig.java @@ -0,0 +1,83 @@ +package com.czsj.bigdata.core.conf; + + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +/** + * + * + * @Date: 2022/1/20 22:52 + * @Description: + **/ +@Component +public class ExcecutorConfig implements InitializingBean, DisposableBean { + + private static ExcecutorConfig excecutorConfig = null; + + public static ExcecutorConfig getExcecutorConfig() { + return excecutorConfig; + } + + @Override + public void afterPropertiesSet() throws Exception { + excecutorConfig = this; + } + + @Override + public void destroy() throws Exception { + } + + + + @Value("${spring.datasource.url}") + private String url; + + @Value("${spring.datasource.driver-class-name}") + private String driverClassname; + + @Value("${spring.datasource.username}") + private String username; + + @Value("${spring.datasource.password}") + private String password; + + + public static void setExcecutorConfig(ExcecutorConfig excecutorConfig) { + ExcecutorConfig.excecutorConfig = excecutorConfig; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getDriverClassname() { + return driverClassname; + } + + public void setDriverClassname(String driverClassname) { + this.driverClassname = driverClassname; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/conf/JobAdminConfig.java b/czsj-system/src/main/java/com/czsj/bigdata/core/conf/JobAdminConfig.java new file mode 100644 index 0000000..c989c31 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/conf/JobAdminConfig.java @@ -0,0 +1,166 @@ +package com.czsj.bigdata.core.conf; + + +import com.czsj.bigdata.core.scheduler.JobScheduler; +import com.czsj.bigdata.core.util.EmailUtil; +import com.czsj.bigdata.mapper.*; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import org.springframework.beans.factory.annotation.Autowired; +import javax.sql.DataSource; + +/** + * xxl-job config + * + * @author xuxueli 2017-04-28 + */ + +@Component +public class JobAdminConfig implements InitializingBean, DisposableBean { + + private static JobAdminConfig adminConfig = null; + + public static JobAdminConfig getAdminConfig() { + return adminConfig; + } + + + // ---------------------- XxlJobScheduler ---------------------- + + private JobScheduler xxlJobScheduler; + + @Override + public void afterPropertiesSet() throws Exception { + adminConfig = this; + + xxlJobScheduler = new JobScheduler(); + xxlJobScheduler.init(); + } + + @Override + public void destroy() throws Exception { + xxlJobScheduler.destroy(); + } + + + // ---------------------- XxlJobScheduler ---------------------- + + // conf + @Value("${datax.job.i18n}") + private String i18n; + + @Value("${datax.job.accessToken}") + private String accessToken; + + @Value("${spring.mail.username}") + private String emailUserName; + + @Value("${spring.mail.password}") + private String emailPassword; + + @Value("${spring.mail.authorization}") + private String emailAuthorization; + + @Value("${datax.job.triggerpool.fast.max}") + private int triggerPoolFastMax; + + @Value("${datax.job.triggerpool.slow.max}") + private int triggerPoolSlowMax; + + @Value("${datax.job.logretentiondays}") + private int logretentiondays; + + @Value("${datasource.aes.key}") + private String dataSourceAESKey; + + + // dao, service + + @Autowired + private JobLogMapper jobLogMapper; + @Autowired + private JobInfoMapper jobInfoMapper; + @Autowired + private JobRegistryMapper jobRegistryMapper; + @Autowired + private JobGroupMapper jobGroupMapper; + @Autowired + private JobLogReportMapper jobLogReportMapper; + + @Autowired + private DataSource dataSource; + @Autowired + private JobDatasourceMapper jobDatasourceMapper; + + public String getI18n() { + return i18n; + } + + public String getAccessToken() { + return accessToken; + } + + public String getEmailUserName() { + return emailUserName; + } + + public int getTriggerPoolFastMax() { + return triggerPoolFastMax < 200 ? 200 : triggerPoolFastMax; + } + + public int getTriggerPoolSlowMax() { + return triggerPoolSlowMax < 100 ? 100 : triggerPoolSlowMax; + } + + public int getLogretentiondays() { + return logretentiondays < 7 ? -1 : logretentiondays; + } + + public JobLogMapper getJobLogMapper() { + return jobLogMapper; + } + + public JobInfoMapper getJobInfoMapper() { + return jobInfoMapper; + } + + public JobRegistryMapper getJobRegistryMapper() { + return jobRegistryMapper; + } + + public JobGroupMapper getJobGroupMapper() { + return jobGroupMapper; + } + + public JobLogReportMapper getJobLogReportMapper() { + return jobLogReportMapper; + } + + public String getEmailPassword() { + return emailPassword; + } + + public DataSource getDataSource() { + return dataSource; + } + + public JobDatasourceMapper getJobDatasourceMapper() { + return jobDatasourceMapper; + } + + public String getDataSourceAESKey() { + + return dataSourceAESKey; + } + + public void setDataSourceAESKey(String dataSourceAESKey) { + this.dataSourceAESKey = dataSourceAESKey; + } + + public String getEmailAuthorization() { + return emailAuthorization; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/cron/CronExpression.java b/czsj-system/src/main/java/com/czsj/bigdata/core/cron/CronExpression.java new file mode 100644 index 0000000..6b576b3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/cron/CronExpression.java @@ -0,0 +1,1657 @@ +/* + * All content copyright Terracotta, Inc., unless otherwise indicated. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy + * of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + */ + +package com.czsj.bigdata.core.cron; + +import java.io.Serializable; +import java.text.ParseException; +import java.util.*; + +/** + * Provides a parser and evaluator for unix-like cron expressions. Cron + * expressions provide the ability to specify complex time combinations such as + * "At 8:00am every Monday through Friday" or "At 1:30am every + * last Friday of the month". + *

+ * Cron expressions are comprised of 6 required fields and one optional field + * separated by white space. The fields respectively are described as follows: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Field Name Allowed Values Allowed Special Characters
Seconds  + * 0-59  + * , - * /
Minutes  + * 0-59  + * , - * /
Hours  + * 0-23  + * , - * /
Day-of-month  + * 1-31  + * , - * ? / L W
Month  + * 0-11 or JAN-DEC  + * , - * /
Day-of-Week  + * 1-7 or SUN-SAT  + * , - * ? / L #
Year (Optional)  + * empty, 1970-2199  + * , - * /
+ *

+ * The '*' character is used to specify all values. For example, "*" + * in the minute field means "every minute". + *

+ * The '?' character is allowed for the day-of-month and day-of-week fields. It + * is used to specify 'no specific value'. This is useful when you need to + * specify something in one of the two fields, but not the other. + *

+ * The '-' character is used to specify ranges For example "10-12" in + * the hour field means "the hours 10, 11 and 12". + *

+ * The ',' character is used to specify additional values. For example + * "MON,WED,FRI" in the day-of-week field means "the days Monday, + * Wednesday, and Friday". + *

+ * The '/' character is used to specify increments. For example "0/15" + * in the seconds field means "the seconds 0, 15, 30, and 45". And + * "5/15" in the seconds field means "the seconds 5, 20, 35, and + * 50". Specifying '*' before the '/' is equivalent to specifying 0 is + * the value to start with. Essentially, for each field in the expression, there + * is a set of numbers that can be turned on or off. For seconds and minutes, + * the numbers range from 0 to 59. For hours 0 to 23, for days of the month 0 to + * 31, and for months 0 to 11 (JAN to DEC). The "/" character simply helps you turn + * on every "nth" value in the given set. Thus "7/6" in the + * month field only turns on month "7", it does NOT mean every 6th + * month, please note that subtlety. + *

+ * The 'L' character is allowed for the day-of-month and day-of-week fields. + * This character is short-hand for "last", but it has different + * meaning in each of the two fields. For example, the value "L" in + * the day-of-month field means "the last day of the month" - day 31 + * for January, day 28 for February on non-leap years. If used in the + * day-of-week field by itself, it simply means "7" or + * "SAT". But if used in the day-of-week field after another value, it + * means "the last xxx day of the month" - for example "6L" + * means "the last friday of the month". You can also specify an offset + * from the last day of the month, such as "L-3" which would mean the third-to-last + * day of the calendar month. When using the 'L' option, it is important not to + * specify lists, or ranges of values, as you'll get confusing/unexpected results. + *

+ * The 'W' character is allowed for the day-of-month field. This character + * is used to specify the weekday (Monday-Friday) nearest the given day. As an + * example, if you were to specify "15W" as the value for the + * day-of-month field, the meaning is: "the nearest weekday to the 15th of + * the month". So if the 15th is a Saturday, the trigger will fire on + * Friday the 14th. If the 15th is a Sunday, the trigger will fire on Monday the + * 16th. If the 15th is a Tuesday, then it will fire on Tuesday the 15th. + * However if you specify "1W" as the value for day-of-month, and the + * 1st is a Saturday, the trigger will fire on Monday the 3rd, as it will not + * 'jump' over the boundary of a month's days. The 'W' character can only be + * specified when the day-of-month is a single day, not a range or list of days. + *

+ * The 'L' and 'W' characters can also be combined for the day-of-month + * expression to yield 'LW', which translates to "last weekday of the + * month". + *

+ * The '#' character is allowed for the day-of-week field. This character is + * used to specify "the nth" XXX day of the month. For example, the + * value of "6#3" in the day-of-week field means the third Friday of + * the month (day 6 = Friday and "#3" = the 3rd one in the month). + * Other examples: "2#1" = the first Monday of the month and + * "4#5" = the fifth Wednesday of the month. Note that if you specify + * "#5" and there is not 5 of the given day-of-week in the month, then + * no firing will occur that month. If the '#' character is used, there can + * only be one expression in the day-of-week field ("3#1,6#3" is + * not valid, since there are two expressions). + *

+ * + *

+ * The legal characters and the names of months and days of the week are not + * case sensitive. + * + *

+ * NOTES: + *

    + *
  • Support for specifying both a day-of-week and a day-of-month value is + * not complete (you'll need to use the '?' character in one of these fields). + *
  • + *
  • Overflowing ranges is supported - that is, having a larger number on + * the left hand side than the right. You might do 22-2 to catch 10 o'clock + * at night until 2 o'clock in the morning, or you might have NOV-FEB. It is + * very important to note that overuse of overflowing ranges creates ranges + * that don't make sense and no effort has been made to determine which + * interpretation CronExpression chooses. An example would be + * "0 0 14-6 ? * FRI-MON".
  • + *
+ *

+ * + * + * @author Sharada Jambula, James House + * @author Contributions from Mads Henderson + * @author Refactoring from CronTrigger to CronExpression by Aaron Craven + * + * Borrowed from quartz v2.3.1 + * + */ +public final class CronExpression implements Serializable, Cloneable { + + private static final long serialVersionUID = 12423409423L; + + protected static final int SECOND = 0; + protected static final int MINUTE = 1; + protected static final int HOUR = 2; + protected static final int DAY_OF_MONTH = 3; + protected static final int MONTH = 4; + protected static final int DAY_OF_WEEK = 5; + protected static final int YEAR = 6; + protected static final int ALL_SPEC_INT = 99; // '*' + protected static final int NO_SPEC_INT = 98; // '?' + protected static final Integer ALL_SPEC = ALL_SPEC_INT; + protected static final Integer NO_SPEC = NO_SPEC_INT; + + protected static final Map monthMap = new HashMap(20); + protected static final Map dayMap = new HashMap(60); + static { + monthMap.put("JAN", 0); + monthMap.put("FEB", 1); + monthMap.put("MAR", 2); + monthMap.put("APR", 3); + monthMap.put("MAY", 4); + monthMap.put("JUN", 5); + monthMap.put("JUL", 6); + monthMap.put("AUG", 7); + monthMap.put("SEP", 8); + monthMap.put("OCT", 9); + monthMap.put("NOV", 10); + monthMap.put("DEC", 11); + + dayMap.put("SUN", 1); + dayMap.put("MON", 2); + dayMap.put("TUE", 3); + dayMap.put("WED", 4); + dayMap.put("THU", 5); + dayMap.put("FRI", 6); + dayMap.put("SAT", 7); + } + + private final String cronExpression; + private TimeZone timeZone = null; + protected transient TreeSet seconds; + protected transient TreeSet minutes; + protected transient TreeSet hours; + protected transient TreeSet daysOfMonth; + protected transient TreeSet months; + protected transient TreeSet daysOfWeek; + protected transient TreeSet years; + + protected transient boolean lastdayOfWeek = false; + protected transient int nthdayOfWeek = 0; + protected transient boolean lastdayOfMonth = false; + protected transient boolean nearestWeekday = false; + protected transient int lastdayOffset = 0; + protected transient boolean expressionParsed = false; + + public static final int MAX_YEAR = Calendar.getInstance().get(Calendar.YEAR) + 100; + + /** + * Constructs a new CronExpression based on the specified + * parameter. + * + * @param cronExpression String representation of the cron expression the + * new object should represent + * @throws ParseException + * if the string expression cannot be parsed into a valid + * CronExpression + */ + public CronExpression(String cronExpression) throws ParseException { + if (cronExpression == null) { + throw new IllegalArgumentException("cronExpression cannot be null"); + } + + this.cronExpression = cronExpression.toUpperCase(Locale.US); + + buildExpression(this.cronExpression); + } + + /** + * Constructs a new {@code CronExpression} as a copy of an existing + * instance. + * + * @param expression + * The existing cron expression to be copied + */ + public CronExpression(CronExpression expression) { + /* + * We don't call the other constructor here since we need to swallow the + * ParseException. We also elide some of the sanity checking as it is + * not logically trippable. + */ + this.cronExpression = expression.getCronExpression(); + try { + buildExpression(cronExpression); + } catch (ParseException ex) { + throw new AssertionError(); + } + if (expression.getTimeZone() != null) { + setTimeZone((TimeZone) expression.getTimeZone().clone()); + } + } + + /** + * Indicates whether the given date satisfies the cron expression. Note that + * milliseconds are ignored, so two Dates falling on different milliseconds + * of the same second will always have the same result here. + * + * @param date the date to evaluate + * @return a boolean indicating whether the given date satisfies the cron + * expression + */ + public boolean isSatisfiedBy(Date date) { + Calendar testDateCal = Calendar.getInstance(getTimeZone()); + testDateCal.setTime(date); + testDateCal.set(Calendar.MILLISECOND, 0); + Date originalDate = testDateCal.getTime(); + + testDateCal.add(Calendar.SECOND, -1); + + Date timeAfter = getTimeAfter(testDateCal.getTime()); + + return ((timeAfter != null) && (timeAfter.equals(originalDate))); + } + + /** + * Returns the next date/time after the given date/time which + * satisfies the cron expression. + * + * @param date the date/time at which to begin the search for the next valid + * date/time + * @return the next valid date/time + */ + public Date getNextValidTimeAfter(Date date) { + return getTimeAfter(date); + } + + /** + * Returns the next date/time after the given date/time which does + * not satisfy the expression + * + * @param date the date/time at which to begin the search for the next + * invalid date/time + * @return the next valid date/time + */ + public Date getNextInvalidTimeAfter(Date date) { + long difference = 1000; + + //move back to the nearest second so differences will be accurate + Calendar adjustCal = Calendar.getInstance(getTimeZone()); + adjustCal.setTime(date); + adjustCal.set(Calendar.MILLISECOND, 0); + Date lastDate = adjustCal.getTime(); + + Date newDate; + + //FUTURE_TODO: (QUARTZ-481) IMPROVE THIS! The following is a BAD solution to this problem. Performance will be very bad here, depending on the cron expression. It is, however A solution. + + //keep getting the next included time until it's farther than one second + // apart. At that point, lastDate is the last valid fire time. We return + // the second immediately following it. + while (difference == 1000) { + newDate = getTimeAfter(lastDate); + if(newDate == null) + break; + + difference = newDate.getTime() - lastDate.getTime(); + + if (difference == 1000) { + lastDate = newDate; + } + } + + return new Date(lastDate.getTime() + 1000); + } + + /** + * Returns the time zone for which this CronExpression + * will be resolved. + */ + public TimeZone getTimeZone() { + if (timeZone == null) { + timeZone = TimeZone.getDefault(); + } + + return timeZone; + } + + /** + * Sets the time zone for which this CronExpression + * will be resolved. + */ + public void setTimeZone(TimeZone timeZone) { + this.timeZone = timeZone; + } + + /** + * Returns the string representation of the CronExpression + * + * @return a string representation of the CronExpression + */ + @Override + public String toString() { + return cronExpression; + } + + /** + * Indicates whether the specified cron expression can be parsed into a + * valid cron expression + * + * @param cronExpression the expression to evaluate + * @return a boolean indicating whether the given expression is a valid cron + * expression + */ + public static boolean isValidExpression(String cronExpression) { + + try { + new CronExpression(cronExpression); + } catch (ParseException pe) { + return false; + } + + return true; + } + + public static void validateExpression(String cronExpression) throws ParseException { + + new CronExpression(cronExpression); + } + + + //////////////////////////////////////////////////////////////////////////// + // + // Expression Parsing Functions + // + //////////////////////////////////////////////////////////////////////////// + + protected void buildExpression(String expression) throws ParseException { + expressionParsed = true; + + try { + + if (seconds == null) { + seconds = new TreeSet(); + } + if (minutes == null) { + minutes = new TreeSet(); + } + if (hours == null) { + hours = new TreeSet(); + } + if (daysOfMonth == null) { + daysOfMonth = new TreeSet(); + } + if (months == null) { + months = new TreeSet(); + } + if (daysOfWeek == null) { + daysOfWeek = new TreeSet(); + } + if (years == null) { + years = new TreeSet(); + } + + int exprOn = SECOND; + + StringTokenizer exprsTok = new StringTokenizer(expression, " \t", + false); + + while (exprsTok.hasMoreTokens() && exprOn <= YEAR) { + String expr = exprsTok.nextToken().trim(); + + // throw an exception if L is used with other days of the month + if(exprOn == DAY_OF_MONTH && expr.indexOf('L') != -1 && expr.length() > 1 && expr.contains(",")) { + throw new ParseException("Support for specifying 'L' and 'LW' with other days of the month is not implemented", -1); + } + // throw an exception if L is used with other days of the week + if(exprOn == DAY_OF_WEEK && expr.indexOf('L') != -1 && expr.length() > 1 && expr.contains(",")) { + throw new ParseException("Support for specifying 'L' with other days of the week is not implemented", -1); + } + if(exprOn == DAY_OF_WEEK && expr.indexOf('#') != -1 && expr.indexOf('#', expr.indexOf('#') +1) != -1) { + throw new ParseException("Support for specifying multiple \"nth\" days is not implemented.", -1); + } + + StringTokenizer vTok = new StringTokenizer(expr, ","); + while (vTok.hasMoreTokens()) { + String v = vTok.nextToken(); + storeExpressionVals(0, v, exprOn); + } + + exprOn++; + } + + if (exprOn <= DAY_OF_WEEK) { + throw new ParseException("Unexpected end of expression.", + expression.length()); + } + + if (exprOn <= YEAR) { + storeExpressionVals(0, "*", YEAR); + } + + TreeSet dow = getSet(DAY_OF_WEEK); + TreeSet dom = getSet(DAY_OF_MONTH); + + // Copying the logic from the UnsupportedOperationException below + boolean dayOfMSpec = !dom.contains(NO_SPEC); + boolean dayOfWSpec = !dow.contains(NO_SPEC); + + if (!dayOfMSpec || dayOfWSpec) { + if (!dayOfWSpec || dayOfMSpec) { + throw new ParseException( + "Support for specifying both a day-of-week AND a day-of-month parameter is not implemented.", 0); + } + } + } catch (ParseException pe) { + throw pe; + } catch (Exception e) { + throw new ParseException("Illegal cron expression format (" + + e.toString() + ")", 0); + } + } + + protected int storeExpressionVals(int pos, String s, int type) + throws ParseException { + + int incr = 0; + int i = skipWhiteSpace(pos, s); + if (i >= s.length()) { + return i; + } + char c = s.charAt(i); + if ((c >= 'A') && (c <= 'Z') && (!s.equals("L")) && (!s.equals("LW")) && (!s.matches("^L-[0-9]*[W]?"))) { + String sub = s.substring(i, i + 3); + int sval = -1; + int eval = -1; + if (type == MONTH) { + sval = getMonthNumber(sub) + 1; + if (sval <= 0) { + throw new ParseException("Invalid Month value: '" + sub + "'", i); + } + if (s.length() > i + 3) { + c = s.charAt(i + 3); + if (c == '-') { + i += 4; + sub = s.substring(i, i + 3); + eval = getMonthNumber(sub) + 1; + if (eval <= 0) { + throw new ParseException("Invalid Month value: '" + sub + "'", i); + } + } + } + } else if (type == DAY_OF_WEEK) { + sval = getDayOfWeekNumber(sub); + if (sval < 0) { + throw new ParseException("Invalid Day-of-Week value: '" + + sub + "'", i); + } + if (s.length() > i + 3) { + c = s.charAt(i + 3); + if (c == '-') { + i += 4; + sub = s.substring(i, i + 3); + eval = getDayOfWeekNumber(sub); + if (eval < 0) { + throw new ParseException( + "Invalid Day-of-Week value: '" + sub + + "'", i); + } + } else if (c == '#') { + try { + i += 4; + nthdayOfWeek = Integer.parseInt(s.substring(i)); + if (nthdayOfWeek < 1 || nthdayOfWeek > 5) { + throw new Exception(); + } + } catch (Exception e) { + throw new ParseException( + "A numeric value between 1 and 5 must follow the '#' option", + i); + } + } else if (c == 'L') { + lastdayOfWeek = true; + i++; + } + } + + } else { + throw new ParseException( + "Illegal characters for this position: '" + sub + "'", + i); + } + if (eval != -1) { + incr = 1; + } + addToSet(sval, eval, incr, type); + return (i + 3); + } + + if (c == '?') { + i++; + if ((i + 1) < s.length() + && (s.charAt(i) != ' ' && s.charAt(i + 1) != '\t')) { + throw new ParseException("Illegal character after '?': " + + s.charAt(i), i); + } + if (type != DAY_OF_WEEK && type != DAY_OF_MONTH) { + throw new ParseException( + "'?' can only be specified for Day-of-Month or Day-of-Week.", + i); + } + if (type == DAY_OF_WEEK && !lastdayOfMonth) { + int val = daysOfMonth.last(); + if (val == NO_SPEC_INT) { + throw new ParseException( + "'?' can only be specified for Day-of-Month -OR- Day-of-Week.", + i); + } + } + + addToSet(NO_SPEC_INT, -1, 0, type); + return i; + } + + if (c == '*' || c == '/') { + if (c == '*' && (i + 1) >= s.length()) { + addToSet(ALL_SPEC_INT, -1, incr, type); + return i + 1; + } else if (c == '/' + && ((i + 1) >= s.length() || s.charAt(i + 1) == ' ' || s + .charAt(i + 1) == '\t')) { + throw new ParseException("'/' must be followed by an integer.", i); + } else if (c == '*') { + i++; + } + c = s.charAt(i); + if (c == '/') { // is an increment specified? + i++; + if (i >= s.length()) { + throw new ParseException("Unexpected end of string.", i); + } + + incr = getNumericValue(s, i); + + i++; + if (incr > 10) { + i++; + } + checkIncrementRange(incr, type, i); + } else { + incr = 1; + } + + addToSet(ALL_SPEC_INT, -1, incr, type); + return i; + } else if (c == 'L') { + i++; + if (type == DAY_OF_MONTH) { + lastdayOfMonth = true; + } + if (type == DAY_OF_WEEK) { + addToSet(7, 7, 0, type); + } + if(type == DAY_OF_MONTH && s.length() > i) { + c = s.charAt(i); + if(c == '-') { + ValueSet vs = getValue(0, s, i+1); + lastdayOffset = vs.value; + if(lastdayOffset > 30) + throw new ParseException("Offset from last day must be <= 30", i+1); + i = vs.pos; + } + if(s.length() > i) { + c = s.charAt(i); + if(c == 'W') { + nearestWeekday = true; + i++; + } + } + } + return i; + } else if (c >= '0' && c <= '9') { + int val = Integer.parseInt(String.valueOf(c)); + i++; + if (i >= s.length()) { + addToSet(val, -1, -1, type); + } else { + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(val, s, i); + val = vs.value; + i = vs.pos; + } + i = checkNext(i, s, val, type); + return i; + } + } else { + throw new ParseException("Unexpected character: " + c, i); + } + + return i; + } + + private void checkIncrementRange(int incr, int type, int idxPos) throws ParseException { + if (incr > 59 && (type == SECOND || type == MINUTE)) { + throw new ParseException("Increment > 60 : " + incr, idxPos); + } else if (incr > 23 && (type == HOUR)) { + throw new ParseException("Increment > 24 : " + incr, idxPos); + } else if (incr > 31 && (type == DAY_OF_MONTH)) { + throw new ParseException("Increment > 31 : " + incr, idxPos); + } else if (incr > 7 && (type == DAY_OF_WEEK)) { + throw new ParseException("Increment > 7 : " + incr, idxPos); + } else if (incr > 12 && (type == MONTH)) { + throw new ParseException("Increment > 12 : " + incr, idxPos); + } + } + + protected int checkNext(int pos, String s, int val, int type) + throws ParseException { + + int end = -1; + int i = pos; + + if (i >= s.length()) { + addToSet(val, end, -1, type); + return i; + } + + char c = s.charAt(pos); + + if (c == 'L') { + if (type == DAY_OF_WEEK) { + if(val < 1 || val > 7) + throw new ParseException("Day-of-Week values must be between 1 and 7", -1); + lastdayOfWeek = true; + } else { + throw new ParseException("'L' option is not valid here. (pos=" + i + ")", i); + } + TreeSet set = getSet(type); + set.add(val); + i++; + return i; + } + + if (c == 'W') { + if (type == DAY_OF_MONTH) { + nearestWeekday = true; + } else { + throw new ParseException("'W' option is not valid here. (pos=" + i + ")", i); + } + if(val > 31) + throw new ParseException("The 'W' option does not make sense with values larger than 31 (max number of days in a month)", i); + TreeSet set = getSet(type); + set.add(val); + i++; + return i; + } + + if (c == '#') { + if (type != DAY_OF_WEEK) { + throw new ParseException("'#' option is not valid here. (pos=" + i + ")", i); + } + i++; + try { + nthdayOfWeek = Integer.parseInt(s.substring(i)); + if (nthdayOfWeek < 1 || nthdayOfWeek > 5) { + throw new Exception(); + } + } catch (Exception e) { + throw new ParseException( + "A numeric value between 1 and 5 must follow the '#' option", + i); + } + + TreeSet set = getSet(type); + set.add(val); + i++; + return i; + } + + if (c == '-') { + i++; + c = s.charAt(i); + int v = Integer.parseInt(String.valueOf(c)); + end = v; + i++; + if (i >= s.length()) { + addToSet(val, end, 1, type); + return i; + } + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(v, s, i); + end = vs.value; + i = vs.pos; + } + if (i < s.length() && ((c = s.charAt(i)) == '/')) { + i++; + c = s.charAt(i); + int v2 = Integer.parseInt(String.valueOf(c)); + i++; + if (i >= s.length()) { + addToSet(val, end, v2, type); + return i; + } + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(v2, s, i); + int v3 = vs.value; + addToSet(val, end, v3, type); + i = vs.pos; + return i; + } else { + addToSet(val, end, v2, type); + return i; + } + } else { + addToSet(val, end, 1, type); + return i; + } + } + + if (c == '/') { + if ((i + 1) >= s.length() || s.charAt(i + 1) == ' ' || s.charAt(i + 1) == '\t') { + throw new ParseException("'/' must be followed by an integer.", i); + } + + i++; + c = s.charAt(i); + int v2 = Integer.parseInt(String.valueOf(c)); + i++; + if (i >= s.length()) { + checkIncrementRange(v2, type, i); + addToSet(val, end, v2, type); + return i; + } + c = s.charAt(i); + if (c >= '0' && c <= '9') { + ValueSet vs = getValue(v2, s, i); + int v3 = vs.value; + checkIncrementRange(v3, type, i); + addToSet(val, end, v3, type); + i = vs.pos; + return i; + } else { + throw new ParseException("Unexpected character '" + c + "' after '/'", i); + } + } + + addToSet(val, end, 0, type); + i++; + return i; + } + + public String getCronExpression() { + return cronExpression; + } + + public String getExpressionSummary() { + StringBuilder buf = new StringBuilder(); + + buf.append("seconds: "); + buf.append(getExpressionSetSummary(seconds)); + buf.append("\n"); + buf.append("minutes: "); + buf.append(getExpressionSetSummary(minutes)); + buf.append("\n"); + buf.append("hours: "); + buf.append(getExpressionSetSummary(hours)); + buf.append("\n"); + buf.append("daysOfMonth: "); + buf.append(getExpressionSetSummary(daysOfMonth)); + buf.append("\n"); + buf.append("months: "); + buf.append(getExpressionSetSummary(months)); + buf.append("\n"); + buf.append("daysOfWeek: "); + buf.append(getExpressionSetSummary(daysOfWeek)); + buf.append("\n"); + buf.append("lastdayOfWeek: "); + buf.append(lastdayOfWeek); + buf.append("\n"); + buf.append("nearestWeekday: "); + buf.append(nearestWeekday); + buf.append("\n"); + buf.append("NthDayOfWeek: "); + buf.append(nthdayOfWeek); + buf.append("\n"); + buf.append("lastdayOfMonth: "); + buf.append(lastdayOfMonth); + buf.append("\n"); + buf.append("years: "); + buf.append(getExpressionSetSummary(years)); + buf.append("\n"); + + return buf.toString(); + } + + protected String getExpressionSetSummary(Set set) { + + if (set.contains(NO_SPEC)) { + return "?"; + } + if (set.contains(ALL_SPEC)) { + return "*"; + } + + StringBuilder buf = new StringBuilder(); + + Iterator itr = set.iterator(); + boolean first = true; + while (itr.hasNext()) { + Integer iVal = itr.next(); + String val = iVal.toString(); + if (!first) { + buf.append(","); + } + buf.append(val); + first = false; + } + + return buf.toString(); + } + + protected String getExpressionSetSummary(ArrayList list) { + + if (list.contains(NO_SPEC)) { + return "?"; + } + if (list.contains(ALL_SPEC)) { + return "*"; + } + + StringBuilder buf = new StringBuilder(); + + Iterator itr = list.iterator(); + boolean first = true; + while (itr.hasNext()) { + Integer iVal = itr.next(); + String val = iVal.toString(); + if (!first) { + buf.append(","); + } + buf.append(val); + first = false; + } + + return buf.toString(); + } + + protected int skipWhiteSpace(int i, String s) { + for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) { + } + + return i; + } + + protected int findNextWhiteSpace(int i, String s) { + for (; i < s.length() && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) { + } + + return i; + } + + protected void addToSet(int val, int end, int incr, int type) + throws ParseException { + + TreeSet set = getSet(type); + + if (type == SECOND || type == MINUTE) { + if ((val < 0 || val > 59 || end > 59) && (val != ALL_SPEC_INT)) { + throw new ParseException( + "Minute and Second values must be between 0 and 59", + -1); + } + } else if (type == HOUR) { + if ((val < 0 || val > 23 || end > 23) && (val != ALL_SPEC_INT)) { + throw new ParseException( + "Hour values must be between 0 and 23", -1); + } + } else if (type == DAY_OF_MONTH) { + if ((val < 1 || val > 31 || end > 31) && (val != ALL_SPEC_INT) + && (val != NO_SPEC_INT)) { + throw new ParseException( + "Day of month values must be between 1 and 31", -1); + } + } else if (type == MONTH) { + if ((val < 1 || val > 12 || end > 12) && (val != ALL_SPEC_INT)) { + throw new ParseException( + "Month values must be between 1 and 12", -1); + } + } else if (type == DAY_OF_WEEK) { + if ((val == 0 || val > 7 || end > 7) && (val != ALL_SPEC_INT) + && (val != NO_SPEC_INT)) { + throw new ParseException( + "Day-of-Week values must be between 1 and 7", -1); + } + } + + if ((incr == 0 || incr == -1) && val != ALL_SPEC_INT) { + if (val != -1) { + set.add(val); + } else { + set.add(NO_SPEC); + } + + return; + } + + int startAt = val; + int stopAt = end; + + if (val == ALL_SPEC_INT && incr <= 0) { + incr = 1; + set.add(ALL_SPEC); // put in a marker, but also fill values + } + + if (type == SECOND || type == MINUTE) { + if (stopAt == -1) { + stopAt = 59; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 0; + } + } else if (type == HOUR) { + if (stopAt == -1) { + stopAt = 23; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 0; + } + } else if (type == DAY_OF_MONTH) { + if (stopAt == -1) { + stopAt = 31; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1; + } + } else if (type == MONTH) { + if (stopAt == -1) { + stopAt = 12; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1; + } + } else if (type == DAY_OF_WEEK) { + if (stopAt == -1) { + stopAt = 7; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1; + } + } else if (type == YEAR) { + if (stopAt == -1) { + stopAt = MAX_YEAR; + } + if (startAt == -1 || startAt == ALL_SPEC_INT) { + startAt = 1970; + } + } + + // if the end of the range is before the start, then we need to overflow into + // the next day, month etc. This is done by adding the maximum amount for that + // type, and using modulus max to determine the value being added. + int max = -1; + if (stopAt < startAt) { + switch (type) { + case SECOND : max = 60; break; + case MINUTE : max = 60; break; + case HOUR : max = 24; break; + case MONTH : max = 12; break; + case DAY_OF_WEEK : max = 7; break; + case DAY_OF_MONTH : max = 31; break; + case YEAR : throw new IllegalArgumentException("Start year must be less than stop year"); + default : throw new IllegalArgumentException("Unexpected type encountered"); + } + stopAt += max; + } + + for (int i = startAt; i <= stopAt; i += incr) { + if (max == -1) { + // ie: there's no max to overflow over + set.add(i); + } else { + // take the modulus to get the real value + int i2 = i % max; + + // 1-indexed ranges should not include 0, and should include their max + if (i2 == 0 && (type == MONTH || type == DAY_OF_WEEK || type == DAY_OF_MONTH) ) { + i2 = max; + } + + set.add(i2); + } + } + } + + TreeSet getSet(int type) { + switch (type) { + case SECOND: + return seconds; + case MINUTE: + return minutes; + case HOUR: + return hours; + case DAY_OF_MONTH: + return daysOfMonth; + case MONTH: + return months; + case DAY_OF_WEEK: + return daysOfWeek; + case YEAR: + return years; + default: + return null; + } + } + + protected ValueSet getValue(int v, String s, int i) { + char c = s.charAt(i); + StringBuilder s1 = new StringBuilder(String.valueOf(v)); + while (c >= '0' && c <= '9') { + s1.append(c); + i++; + if (i >= s.length()) { + break; + } + c = s.charAt(i); + } + ValueSet val = new ValueSet(); + + val.pos = (i < s.length()) ? i : i + 1; + val.value = Integer.parseInt(s1.toString()); + return val; + } + + protected int getNumericValue(String s, int i) { + int endOfVal = findNextWhiteSpace(i, s); + String val = s.substring(i, endOfVal); + return Integer.parseInt(val); + } + + protected int getMonthNumber(String s) { + Integer integer = monthMap.get(s); + + if (integer == null) { + return -1; + } + + return integer; + } + + protected int getDayOfWeekNumber(String s) { + Integer integer = dayMap.get(s); + + if (integer == null) { + return -1; + } + + return integer; + } + + //////////////////////////////////////////////////////////////////////////// + // + // Computation Functions + // + //////////////////////////////////////////////////////////////////////////// + + public Date getTimeAfter(Date afterTime) { + + // Computation is based on Gregorian year only. + Calendar cl = new GregorianCalendar(getTimeZone()); + + // move ahead one second, since we're computing the time *after* the + // given time + afterTime = new Date(afterTime.getTime() + 1000); + // CronTrigger does not deal with milliseconds + cl.setTime(afterTime); + cl.set(Calendar.MILLISECOND, 0); + + boolean gotOne = false; + // loop until we've computed the next time, or we've past the endTime + while (!gotOne) { + + //if (endTime != null && cl.getTime().after(endTime)) return null; + if(cl.get(Calendar.YEAR) > 2999) { // prevent endless loop... + return null; + } + + SortedSet st = null; + int t = 0; + + int sec = cl.get(Calendar.SECOND); + int min = cl.get(Calendar.MINUTE); + + // get second................................................. + st = seconds.tailSet(sec); + if (st != null && st.size() != 0) { + sec = st.first(); + } else { + sec = seconds.first(); + min++; + cl.set(Calendar.MINUTE, min); + } + cl.set(Calendar.SECOND, sec); + + min = cl.get(Calendar.MINUTE); + int hr = cl.get(Calendar.HOUR_OF_DAY); + t = -1; + + // get minute................................................. + st = minutes.tailSet(min); + if (st != null && st.size() != 0) { + t = min; + min = st.first(); + } else { + min = minutes.first(); + hr++; + } + if (min != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, min); + setCalendarHour(cl, hr); + continue; + } + cl.set(Calendar.MINUTE, min); + + hr = cl.get(Calendar.HOUR_OF_DAY); + int day = cl.get(Calendar.DAY_OF_MONTH); + t = -1; + + // get hour................................................... + st = hours.tailSet(hr); + if (st != null && st.size() != 0) { + t = hr; + hr = st.first(); + } else { + hr = hours.first(); + day++; + } + if (hr != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + setCalendarHour(cl, hr); + continue; + } + cl.set(Calendar.HOUR_OF_DAY, hr); + + day = cl.get(Calendar.DAY_OF_MONTH); + int mon = cl.get(Calendar.MONTH) + 1; + // '+ 1' because calendar is 0-based for this field, and we are + // 1-based + t = -1; + int tmon = mon; + + // get day................................................... + boolean dayOfMSpec = !daysOfMonth.contains(NO_SPEC); + boolean dayOfWSpec = !daysOfWeek.contains(NO_SPEC); + if (dayOfMSpec && !dayOfWSpec) { // get day by day of month rule + st = daysOfMonth.tailSet(day); + if (lastdayOfMonth) { + if(!nearestWeekday) { + t = day; + day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + day -= lastdayOffset; + if(t > day) { + mon++; + if(mon > 12) { + mon = 1; + tmon = 3333; // ensure test of mon != tmon further below fails + cl.add(Calendar.YEAR, 1); + } + day = 1; + } + } else { + t = day; + day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + day -= lastdayOffset; + + Calendar tcal = Calendar.getInstance(getTimeZone()); + tcal.set(Calendar.SECOND, 0); + tcal.set(Calendar.MINUTE, 0); + tcal.set(Calendar.HOUR_OF_DAY, 0); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); + + int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int dow = tcal.get(Calendar.DAY_OF_WEEK); + + if(dow == Calendar.SATURDAY && day == 1) { + day += 2; + } else if(dow == Calendar.SATURDAY) { + day -= 1; + } else if(dow == Calendar.SUNDAY && day == ldom) { + day -= 2; + } else if(dow == Calendar.SUNDAY) { + day += 1; + } + + tcal.set(Calendar.SECOND, sec); + tcal.set(Calendar.MINUTE, min); + tcal.set(Calendar.HOUR_OF_DAY, hr); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + Date nTime = tcal.getTime(); + if(nTime.before(afterTime)) { + day = 1; + mon++; + } + } + } else if(nearestWeekday) { + t = day; + day = daysOfMonth.first(); + + Calendar tcal = Calendar.getInstance(getTimeZone()); + tcal.set(Calendar.SECOND, 0); + tcal.set(Calendar.MINUTE, 0); + tcal.set(Calendar.HOUR_OF_DAY, 0); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); + + int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int dow = tcal.get(Calendar.DAY_OF_WEEK); + + if(dow == Calendar.SATURDAY && day == 1) { + day += 2; + } else if(dow == Calendar.SATURDAY) { + day -= 1; + } else if(dow == Calendar.SUNDAY && day == ldom) { + day -= 2; + } else if(dow == Calendar.SUNDAY) { + day += 1; + } + + + tcal.set(Calendar.SECOND, sec); + tcal.set(Calendar.MINUTE, min); + tcal.set(Calendar.HOUR_OF_DAY, hr); + tcal.set(Calendar.DAY_OF_MONTH, day); + tcal.set(Calendar.MONTH, mon - 1); + Date nTime = tcal.getTime(); + if(nTime.before(afterTime)) { + day = daysOfMonth.first(); + mon++; + } + } else if (st != null && st.size() != 0) { + t = day; + day = st.first(); + // make sure we don't over-run a short month, such as february + int lastDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + if (day > lastDay) { + day = daysOfMonth.first(); + mon++; + } + } else { + day = daysOfMonth.first(); + mon++; + } + + if (day != t || mon != tmon) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, and we + // are 1-based + continue; + } + } else if (dayOfWSpec && !dayOfMSpec) { // get day by day of week rule + if (lastdayOfWeek) { // are we looking for the last XXX day of + // the month? + int dow = daysOfWeek.first(); // desired + // d-o-w + int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int daysToAdd = 0; + if (cDow < dow) { + daysToAdd = dow - cDow; + } + if (cDow > dow) { + daysToAdd = dow + (7 - cDow); + } + + int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + + if (day + daysToAdd > lDay) { // did we already miss the + // last one? + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon); + // no '- 1' here because we are promoting the month + continue; + } + + // find date of last occurrence of this day in this month... + while ((day + daysToAdd + 7) <= lDay) { + daysToAdd += 7; + } + + day += daysToAdd; + + if (daysToAdd > 0) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' here because we are not promoting the month + continue; + } + + } else if (nthdayOfWeek != 0) { + // are we looking for the Nth XXX day in the month? + int dow = daysOfWeek.first(); // desired + // d-o-w + int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int daysToAdd = 0; + if (cDow < dow) { + daysToAdd = dow - cDow; + } else if (cDow > dow) { + daysToAdd = dow + (7 - cDow); + } + + boolean dayShifted = false; + if (daysToAdd > 0) { + dayShifted = true; + } + + day += daysToAdd; + int weekOfMonth = day / 7; + if (day % 7 > 0) { + weekOfMonth++; + } + + daysToAdd = (nthdayOfWeek - weekOfMonth) * 7; + day += daysToAdd; + if (daysToAdd < 0 + || day > getLastDayOfMonth(mon, cl + .get(Calendar.YEAR))) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon); + // no '- 1' here because we are promoting the month + continue; + } else if (daysToAdd > 0 || dayShifted) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' here because we are NOT promoting the month + continue; + } + } else { + int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int dow = daysOfWeek.first(); // desired + // d-o-w + st = daysOfWeek.tailSet(cDow); + if (st != null && st.size() > 0) { + dow = st.first(); + } + + int daysToAdd = 0; + if (cDow < dow) { + daysToAdd = dow - cDow; + } + if (cDow > dow) { + daysToAdd = dow + (7 - cDow); + } + + int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + + if (day + daysToAdd > lDay) { // will we pass the end of + // the month? + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon); + // no '- 1' here because we are promoting the month + continue; + } else if (daysToAdd > 0) { // are we swithing days? + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, day + daysToAdd); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, + // and we are 1-based + continue; + } + } + } else { // dayOfWSpec && !dayOfMSpec + throw new UnsupportedOperationException( + "Support for specifying both a day-of-week AND a day-of-month parameter is not implemented."); + } + cl.set(Calendar.DAY_OF_MONTH, day); + + mon = cl.get(Calendar.MONTH) + 1; + // '+ 1' because calendar is 0-based for this field, and we are + // 1-based + int year = cl.get(Calendar.YEAR); + t = -1; + + // test for expressions that never generate a valid fire date, + // but keep looping... + if (year > MAX_YEAR) { + return null; + } + + // get month................................................... + st = months.tailSet(mon); + if (st != null && st.size() != 0) { + t = mon; + mon = st.first(); + } else { + mon = months.first(); + year++; + } + if (mon != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, and we are + // 1-based + cl.set(Calendar.YEAR, year); + continue; + } + cl.set(Calendar.MONTH, mon - 1); + // '- 1' because calendar is 0-based for this field, and we are + // 1-based + + year = cl.get(Calendar.YEAR); + t = -1; + + // get year................................................... + st = years.tailSet(year); + if (st != null && st.size() != 0) { + t = year; + year = st.first(); + } else { + return null; // ran out of years... + } + + if (year != t) { + cl.set(Calendar.SECOND, 0); + cl.set(Calendar.MINUTE, 0); + cl.set(Calendar.HOUR_OF_DAY, 0); + cl.set(Calendar.DAY_OF_MONTH, 1); + cl.set(Calendar.MONTH, 0); + // '- 1' because calendar is 0-based for this field, and we are + // 1-based + cl.set(Calendar.YEAR, year); + continue; + } + cl.set(Calendar.YEAR, year); + + gotOne = true; + } // while( !done ) + + return cl.getTime(); + } + + /** + * Advance the calendar to the particular hour paying particular attention + * to daylight saving problems. + * + * @param cal the calendar to operate on + * @param hour the hour to set + */ + protected void setCalendarHour(Calendar cal, int hour) { + cal.set(Calendar.HOUR_OF_DAY, hour); + if (cal.get(Calendar.HOUR_OF_DAY) != hour && hour != 24) { + cal.set(Calendar.HOUR_OF_DAY, hour + 1); + } + } + + /** + * NOT YET IMPLEMENTED: Returns the time before the given time + * that the CronExpression matches. + */ + public Date getTimeBefore(Date endTime) { + // FUTURE_TODO: implement QUARTZ-423 + return null; + } + + /** + * NOT YET IMPLEMENTED: Returns the final time that the + * CronExpression will match. + */ + public Date getFinalFireTime() { + // FUTURE_TODO: implement QUARTZ-423 + return null; + } + + protected boolean isLeapYear(int year) { + return ((year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)); + } + + protected int getLastDayOfMonth(int monthNum, int year) { + + switch (monthNum) { + case 1: + return 31; + case 2: + return (isLeapYear(year)) ? 29 : 28; + case 3: + return 31; + case 4: + return 30; + case 5: + return 31; + case 6: + return 30; + case 7: + return 31; + case 8: + return 31; + case 9: + return 30; + case 10: + return 31; + case 11: + return 30; + case 12: + return 31; + default: + throw new IllegalArgumentException("Illegal month number: " + + monthNum); + } + } + + + private void readObject(java.io.ObjectInputStream stream) + throws java.io.IOException, ClassNotFoundException { + + stream.defaultReadObject(); + try { + buildExpression(cronExpression); + } catch (Exception ignore) { + } // never happens + } + + @Override + @Deprecated + public Object clone() { + return new CronExpression(this); + } +} + +class ValueSet { + public int value; + + public int pos; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/handler/AESEncryptHandler.java b/czsj-system/src/main/java/com/czsj/bigdata/core/handler/AESEncryptHandler.java new file mode 100644 index 0000000..4cf6bb6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/handler/AESEncryptHandler.java @@ -0,0 +1,50 @@ +package com.czsj.bigdata.core.handler; + + +import com.czsj.bigdata.util.AESUtil; +import org.apache.commons.lang3.StringUtils; +import org.apache.ibatis.type.BaseTypeHandler; +import org.apache.ibatis.type.JdbcType; +import org.apache.ibatis.type.MappedTypes; + +import java.sql.CallableStatement; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +/** + * @author water + * @date 20-03-17 下午5:38 + */ +@MappedTypes({String.class}) +public class AESEncryptHandler extends BaseTypeHandler { + + + @Override + public void setNonNullParameter(PreparedStatement ps, int i, String parameter, JdbcType jdbcType) throws SQLException { + if(StringUtils.isNotBlank(parameter)){ + ps.setString(i, AESUtil.encrypt(parameter)); + }else{ + ps.setString(i, null); + } + } + + @Override + public String getNullableResult(ResultSet rs, String columnName) throws SQLException { + String columnValue = rs.getString(columnName); + return AESUtil.decrypt(columnValue); + } + + @Override + public String getNullableResult(ResultSet rs, int columnIndex) throws SQLException { + String columnValue = rs.getString(columnIndex); + return AESUtil.decrypt(columnValue); + } + + @Override + public String getNullableResult(CallableStatement cs, int columnIndex) + throws SQLException { + String columnValue = cs.getString(columnIndex); + return AESUtil.decrypt(columnValue); + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/kill/KillJob.java b/czsj-system/src/main/java/com/czsj/bigdata/core/kill/KillJob.java new file mode 100644 index 0000000..3a9cdc3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/kill/KillJob.java @@ -0,0 +1,42 @@ +package com.czsj.bigdata.core.kill; + + + +import com.czsj.bigdata.core.trigger.JobTrigger; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; +import com.czsj.core.enums.ExecutorBlockStrategyEnum; +import com.czsj.core.glue.GlueTypeEnum; + +import java.util.Date; + +/** + * datax-job trigger + * Created by jingwk on 2019/12/15. + */ +public class KillJob { + + /** + * @param logId + * @param address + * @param processId + */ + public static ReturnT trigger(long logId, Date triggerTime, String address, String processId) { + ReturnT triggerResult; + TriggerParam triggerParam = new TriggerParam(); + triggerParam.setJobId(-1); + triggerParam.setExecutorHandler("killJobHandler"); + triggerParam.setProcessId(processId); + triggerParam.setLogId(logId); + triggerParam.setGlueType(GlueTypeEnum.BEAN.getDesc()); + triggerParam.setExecutorBlockStrategy(ExecutorBlockStrategyEnum.SERIAL_EXECUTION.getTitle()); + triggerParam.setLogDateTime(triggerTime.getTime()); + if (address != null) { + triggerResult = JobTrigger.runExecutor(triggerParam, address); + } else { + triggerResult = new ReturnT<>(ReturnT.FAIL_CODE, null); + } + return triggerResult; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/ExecutorRouteStrategyEnum.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/ExecutorRouteStrategyEnum.java new file mode 100644 index 0000000..62d1954 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/ExecutorRouteStrategyEnum.java @@ -0,0 +1,49 @@ +package com.czsj.bigdata.core.route; + + +import com.czsj.bigdata.core.route.strategy.*; +import com.czsj.bigdata.core.util.I18nUtil; + +/** + * Created by xuxueli on 17/3/10. + */ +public enum ExecutorRouteStrategyEnum { + + FIRST(I18nUtil.getString("jobconf_route_first"), new ExecutorRouteFirst()), + LAST(I18nUtil.getString("jobconf_route_last"), new ExecutorRouteLast()), + ROUND(I18nUtil.getString("jobconf_route_round"), new ExecutorRouteRound()), + RANDOM(I18nUtil.getString("jobconf_route_random"), new ExecutorRouteRandom()), + CONSISTENT_HASH(I18nUtil.getString("jobconf_route_consistenthash"), new ExecutorRouteConsistentHash()), + LEAST_FREQUENTLY_USED(I18nUtil.getString("jobconf_route_lfu"), new ExecutorRouteLFU()), + LEAST_RECENTLY_USED(I18nUtil.getString("jobconf_route_lru"), new ExecutorRouteLRU()), + FAILOVER(I18nUtil.getString("jobconf_route_failover"), new ExecutorRouteFailover()), + BUSYOVER(I18nUtil.getString("jobconf_route_busyover"), new ExecutorRouteBusyover()), + SHARDING_BROADCAST(I18nUtil.getString("jobconf_route_shard"), null); + + ExecutorRouteStrategyEnum(String title, ExecutorRouter router) { + this.title = title; + this.router = router; + } + + private String title; + private ExecutorRouter router; + + public String getTitle() { + return title; + } + public ExecutorRouter getRouter() { + return router; + } + + public static ExecutorRouteStrategyEnum match(String name, ExecutorRouteStrategyEnum defaultItem){ + if (name != null) { + for (ExecutorRouteStrategyEnum item: ExecutorRouteStrategyEnum.values()) { + if (item.name().equals(name)) { + return item; + } + } + } + return defaultItem; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/ExecutorRouter.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/ExecutorRouter.java new file mode 100644 index 0000000..e41138a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/ExecutorRouter.java @@ -0,0 +1,24 @@ +package com.czsj.bigdata.core.route; + +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +/** + * Created by xuxueli on 17/3/10. + */ +public abstract class ExecutorRouter { + protected static Logger logger = LoggerFactory.getLogger(ExecutorRouter.class); + + /** + * route address + * + * @param addressList + * @return ReturnT.content=address + */ + public abstract ReturnT route(TriggerParam triggerParam, List addressList); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteBusyover.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteBusyover.java new file mode 100644 index 0000000..d747542 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteBusyover.java @@ -0,0 +1,49 @@ +package com.czsj.bigdata.core.route.strategy; + + + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.bigdata.core.scheduler.JobScheduler; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.core.biz.ExecutorBiz; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.List; + +/** + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteBusyover extends ExecutorRouter { + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + StringBuilder idleBeatResultSB = new StringBuilder(); + for (String address : addressList) { + // beat + ReturnT idleBeatResult = null; + try { + ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address); + idleBeatResult = executorBiz.idleBeat(triggerParam.getJobId()); + } catch (Exception e) { + logger.error(e.getMessage(), e); + idleBeatResult = new ReturnT(ReturnT.FAIL_CODE, ""+e ); + } + idleBeatResultSB.append( (idleBeatResultSB.length()>0)?"

":"") + .append(I18nUtil.getString("jobconf_idleBeat") + ":") + .append("
address:").append(address) + .append("
code:").append(idleBeatResult.getCode()) + .append("
msg:").append(idleBeatResult.getMsg()); + + // beat success + if (idleBeatResult.getCode() == ReturnT.SUCCESS_CODE) { + idleBeatResult.setMsg(idleBeatResultSB.toString()); + idleBeatResult.setContent(address); + return idleBeatResult; + } + } + + return new ReturnT(ReturnT.FAIL_CODE, idleBeatResultSB.toString()); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteConsistentHash.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteConsistentHash.java new file mode 100644 index 0000000..9d0d027 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteConsistentHash.java @@ -0,0 +1,86 @@ +package com.czsj.bigdata.core.route.strategy; + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + + +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; +import java.util.SortedMap; +import java.util.TreeMap; + +/** + * 分组下机器地址相同,不同JOB均匀散列在不同机器上,保证分组下机器分配JOB平均;且每个JOB固定调度其中一台机器; + * a、virtual node:解决不均衡问题 + * b、hash method replace hashCode:String的hashCode可能重复,需要进一步扩大hashCode的取值范围 + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteConsistentHash extends ExecutorRouter { + + private static int VIRTUAL_NODE_NUM = 100; + + /** + * get hash code on 2^32 ring (md5散列的方式计算hash值) + * @param key + * @return + */ + private static long hash(String key) { + + // md5 byte + MessageDigest md5; + try { + md5 = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("MD5 not supported", e); + } + md5.reset(); + byte[] keyBytes = null; + try { + keyBytes = key.getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("Unknown string :" + key, e); + } + + md5.update(keyBytes); + byte[] digest = md5.digest(); + + // hash code, Truncate to 32-bits + long hashCode = ((long) (digest[3] & 0xFF) << 24) + | ((long) (digest[2] & 0xFF) << 16) + | ((long) (digest[1] & 0xFF) << 8) + | (digest[0] & 0xFF); + + long truncateHashCode = hashCode & 0xffffffffL; + return truncateHashCode; + } + + public String hashJob(int jobId, List addressList) { + + // ------A1------A2-------A3------ + // -----------J1------------------ + TreeMap addressRing = new TreeMap(); + for (String address: addressList) { + for (int i = 0; i < VIRTUAL_NODE_NUM; i++) { + long addressHash = hash("SHARD-" + address + "-NODE-" + i); + addressRing.put(addressHash, address); + } + } + + long jobHash = hash(String.valueOf(jobId)); + SortedMap lastRing = addressRing.tailMap(jobHash); + if (!lastRing.isEmpty()) { + return lastRing.get(lastRing.firstKey()); + } + return addressRing.firstEntry().getValue(); + } + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + String address = hashJob(triggerParam.getJobId(), addressList); + return new ReturnT(address); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteFailover.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteFailover.java new file mode 100644 index 0000000..dca7603 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteFailover.java @@ -0,0 +1,50 @@ +package com.czsj.bigdata.core.route.strategy; + + + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.bigdata.core.scheduler.JobScheduler; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.core.biz.ExecutorBiz; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.List; + +/** + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteFailover extends ExecutorRouter { + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + + StringBuilder beatResultSB = new StringBuilder(); + for (String address : addressList) { + // beat + ReturnT beatResult = null; + try { + ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address); + beatResult = executorBiz.beat(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + beatResult = new ReturnT(ReturnT.FAIL_CODE, ""+e ); + } + beatResultSB.append( (beatResultSB.length()>0)?"

":"") + .append(I18nUtil.getString("jobconf_beat") + ":") + .append("
address:").append(address) + .append("
code:").append(beatResult.getCode()) + .append("
msg:").append(beatResult.getMsg()); + + // beat success + if (beatResult.getCode() == ReturnT.SUCCESS_CODE) { + + beatResult.setMsg(beatResultSB.toString()); + beatResult.setContent(address); + return beatResult; + } + } + return new ReturnT(ReturnT.FAIL_CODE, beatResultSB.toString()); + + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteFirst.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteFirst.java new file mode 100644 index 0000000..b3e5425 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteFirst.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.core.route.strategy; + + + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.List; + +/** + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteFirst extends ExecutorRouter { + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList){ + return new ReturnT(addressList.get(0)); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLFU.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLFU.java new file mode 100644 index 0000000..e47a40f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLFU.java @@ -0,0 +1,79 @@ +package com.czsj.bigdata.core.route.strategy; + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * 单个JOB对应的每个执行器,使用频率最低的优先被选举 + * a(*)、LFU(Least Frequently Used):最不经常使用,频率/次数 + * b、LRU(Least Recently Used):最近最久未使用,时间 + * + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteLFU extends ExecutorRouter { + + private static ConcurrentMap> jobLfuMap = new ConcurrentHashMap>(); + private static long CACHE_VALID_TIME = 0; + + public String route(int jobId, List addressList) { + + // cache clear + if (System.currentTimeMillis() > CACHE_VALID_TIME) { + jobLfuMap.clear(); + CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24; + } + + // lfu item init + HashMap lfuItemMap = jobLfuMap.get(jobId); // Key排序可以用TreeMap+构造入参Compare;Value排序暂时只能通过ArrayList; + if (lfuItemMap == null) { + lfuItemMap = new HashMap(); + jobLfuMap.putIfAbsent(jobId, lfuItemMap); // 避免重复覆盖 + } + + // put new + for (String address: addressList) { + if (!lfuItemMap.containsKey(address) || lfuItemMap.get(address) >1000000 ) { + lfuItemMap.put(address, new Random().nextInt(addressList.size())); // 初始化时主动Random一次,缓解首次压力 + } + } + // remove old + List delKeys = new ArrayList<>(); + for (String existKey: lfuItemMap.keySet()) { + if (!addressList.contains(existKey)) { + delKeys.add(existKey); + } + } + if (delKeys.size() > 0) { + for (String delKey: delKeys) { + lfuItemMap.remove(delKey); + } + } + + // load least userd count address + List> lfuItemList = new ArrayList>(lfuItemMap.entrySet()); + Collections.sort(lfuItemList, new Comparator>() { + @Override + public int compare(Map.Entry o1, Map.Entry o2) { + return o1.getValue().compareTo(o2.getValue()); + } + }); + + Map.Entry addressItem = lfuItemList.get(0); + String minAddress = addressItem.getKey(); + addressItem.setValue(addressItem.getValue() + 1); + + return addressItem.getKey(); + } + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + String address = route(triggerParam.getJobId(), addressList); + return new ReturnT(address); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLRU.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLRU.java new file mode 100644 index 0000000..93f458b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLRU.java @@ -0,0 +1,76 @@ +package com.czsj.bigdata.core.route.strategy; + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * 单个JOB对应的每个执行器,最久为使用的优先被选举 + * a、LFU(Least Frequently Used):最不经常使用,频率/次数 + * b(*)、LRU(Least Recently Used):最近最久未使用,时间 + * + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteLRU extends ExecutorRouter { + + private static ConcurrentMap> jobLRUMap = new ConcurrentHashMap>(); + private static long CACHE_VALID_TIME = 0; + + public String route(int jobId, List addressList) { + + // cache clear + if (System.currentTimeMillis() > CACHE_VALID_TIME) { + jobLRUMap.clear(); + CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24; + } + + // init lru + LinkedHashMap lruItem = jobLRUMap.get(jobId); + if (lruItem == null) { + /** + * LinkedHashMap + * a、accessOrder:true=访问顺序排序(get/put时排序);false=插入顺序排期; + * b、removeEldestEntry:新增元素时将会调用,返回true时会删除最老元素;可封装LinkedHashMap并重写该方法,比如定义最大容量,超出是返回true即可实现固定长度的LRU算法; + */ + lruItem = new LinkedHashMap(16, 0.75f, true); + jobLRUMap.putIfAbsent(jobId, lruItem); + } + + // put new + for (String address: addressList) { + if (!lruItem.containsKey(address)) { + lruItem.put(address, address); + } + } + // remove old + List delKeys = new ArrayList<>(); + for (String existKey: lruItem.keySet()) { + if (!addressList.contains(existKey)) { + delKeys.add(existKey); + } + } + if (delKeys.size() > 0) { + for (String delKey: delKeys) { + lruItem.remove(delKey); + } + } + + // load + String eldestKey = lruItem.entrySet().iterator().next().getKey(); + String eldestValue = lruItem.get(eldestKey); + return eldestValue; + } + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + String address = route(triggerParam.getJobId(), addressList); + return new ReturnT(address); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLast.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLast.java new file mode 100644 index 0000000..ea55806 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteLast.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.core.route.strategy; + + + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.List; + +/** + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteLast extends ExecutorRouter { + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + return new ReturnT(addressList.get(addressList.size()-1)); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteRandom.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteRandom.java new file mode 100644 index 0000000..5104d43 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteRandom.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.core.route.strategy; + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + +import java.util.List; +import java.util.Random; + +/** + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteRandom extends ExecutorRouter { + + private static Random localRandom = new Random(); + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + String address = addressList.get(localRandom.nextInt(addressList.size())); + return new ReturnT(address); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteRound.java b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteRound.java new file mode 100644 index 0000000..b4c91a1 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/route/strategy/ExecutorRouteRound.java @@ -0,0 +1,40 @@ +package com.czsj.bigdata.core.route.strategy; + +import com.czsj.bigdata.core.route.ExecutorRouter; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; + + +import java.util.List; +import java.util.Random; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * Created by xuxueli on 17/3/10. + */ +public class ExecutorRouteRound extends ExecutorRouter { + + private static ConcurrentMap routeCountEachJob = new ConcurrentHashMap(); + private static long CACHE_VALID_TIME = 0; + private static int count(int jobId) { + // cache clear + if (System.currentTimeMillis() > CACHE_VALID_TIME) { + routeCountEachJob.clear(); + CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24; + } + + // count++ + Integer count = routeCountEachJob.get(jobId); + count = (count==null || count>1000000)?(new Random().nextInt(100)):++count; // 初始化时主动Random一次,缓解首次压力 + routeCountEachJob.put(jobId, count); + return count; + } + + @Override + public ReturnT route(TriggerParam triggerParam, List addressList) { + String address = addressList.get(count(triggerParam.getJobId())%addressList.size()); + return new ReturnT(address); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/scheduler/JobScheduler.java b/czsj-system/src/main/java/com/czsj/bigdata/core/scheduler/JobScheduler.java new file mode 100644 index 0000000..9706ada --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/scheduler/JobScheduler.java @@ -0,0 +1,114 @@ +package com.czsj.bigdata.core.scheduler; + + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.core.thread.*; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.core.biz.ExecutorBiz; +import com.czsj.core.enums.ExecutorBlockStrategyEnum; +import com.czsj.rpc.remoting.invoker.call.CallType; +import com.czsj.rpc.remoting.invoker.reference.XxlRpcReferenceBean; +import com.czsj.rpc.remoting.invoker.route.LoadBalance; +import com.czsj.rpc.remoting.net.impl.netty_http.client.NettyHttpClient; +import com.czsj.rpc.serialize.impl.HessianSerializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * @author xuxueli 2018-10-28 00:18:17 + */ + +public class JobScheduler { + private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class); + + + public void init() throws Exception { + // init i18n + initI18n(); + + // admin registry monitor run + JobRegistryMonitorHelper.getInstance().start(); + + // admin monitor run + JobFailMonitorHelper.getInstance().start(); + + // admin trigger pool start + JobTriggerPoolHelper.toStart(); + + // admin log report start + JobLogReportHelper.getInstance().start(); + + // start-schedule + JobScheduleHelper.getInstance().start(); + + logger.info(">>>>>>>>> init czsj-ground admin success."); + } + + + public void destroy() throws Exception { + + // stop-schedule + JobScheduleHelper.getInstance().toStop(); + + // admin log report stop + JobLogReportHelper.getInstance().toStop(); + + // admin trigger pool stop + JobTriggerPoolHelper.toStop(); + + // admin monitor stop + JobFailMonitorHelper.getInstance().toStop(); + + // admin registry stop + JobRegistryMonitorHelper.getInstance().toStop(); + + } + + // ---------------------- I18n ---------------------- + + private void initI18n() { + for (ExecutorBlockStrategyEnum item : ExecutorBlockStrategyEnum.values()) { + item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name()))); + } + } + + // ---------------------- executor-client ---------------------- + private static ConcurrentMap executorBizRepository = new ConcurrentHashMap<>(); + + public static ExecutorBiz getExecutorBiz(String address) throws Exception { + // valid + if (address == null || address.trim().length() == 0) { + return null; + } + + // load-cache + address = address.trim(); + ExecutorBiz executorBiz = executorBizRepository.get(address); + if (executorBiz != null) { + return executorBiz; + } + + // set-cache + XxlRpcReferenceBean referenceBean = new XxlRpcReferenceBean(); + referenceBean.setClient(NettyHttpClient.class); + referenceBean.setSerializer(HessianSerializer.class); + referenceBean.setCallType(CallType.SYNC); + referenceBean.setLoadBalance(LoadBalance.ROUND); + referenceBean.setIface(ExecutorBiz.class); + referenceBean.setVersion(null); + referenceBean.setTimeout(3000); + referenceBean.setAddress(address); + referenceBean.setAccessToken(JobAdminConfig.getAdminConfig().getAccessToken()); + referenceBean.setInvokeCallback(null); + referenceBean.setInvokerFactory(null); + + executorBiz = (ExecutorBiz) referenceBean.getObject(); + + executorBizRepository.put(address, executorBiz); + return executorBiz; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobFailMonitorHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobFailMonitorHelper.java new file mode 100644 index 0000000..36a30f8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobFailMonitorHelper.java @@ -0,0 +1,201 @@ +package com.czsj.bigdata.core.thread; + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.core.trigger.TriggerTypeEnum; +import com.czsj.bigdata.core.util.EmailUtil; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.bigdata.entity.JobGroup; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.bigdata.entity.JobLog; +import com.czsj.core.biz.model.ReturnT; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.mail.internet.MimeMessage; +import java.text.MessageFormat; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +/** + * job monitor instance + * + * @author xuxueli 2015-9-1 18:05:56 + */ +public class JobFailMonitorHelper { + private static Logger logger = LoggerFactory.getLogger(JobFailMonitorHelper.class); + + private static JobFailMonitorHelper instance = new JobFailMonitorHelper(); + public static JobFailMonitorHelper getInstance(){ + return instance; + } + + // ---------------------- monitor ---------------------- + + private Thread monitorThread; + private volatile boolean toStop = false; + public void start(){ + monitorThread = new Thread(new Runnable() { + + @Override + public void run() { + + // monitor + while (!toStop) { + try { + + List failLogIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findFailJobLogIds(1000); + if (failLogIds!=null && !failLogIds.isEmpty()) { + for (long failLogId: failLogIds) { + + // lock log + int lockRet = JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, 0, -1); + if (lockRet < 1) { + continue; + } + JobLog log = JobAdminConfig.getAdminConfig().getJobLogMapper().load(failLogId); + JobInfo info = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(log.getJobId()); + + // 1、fail retry monitor + if (log.getExecutorFailRetryCount() > 0) { + JobTriggerPoolHelper.trigger(log.getJobId(), TriggerTypeEnum.RETRY, (log.getExecutorFailRetryCount()-1), log.getExecutorShardingParam(), log.getExecutorParam()); + String retryMsg = "

>>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_type_retry") +"<<<<<<<<<<<
"; + log.setTriggerMsg(log.getTriggerMsg() + retryMsg); + JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(log); + } + + // 2、fail alarm monitor + int newAlarmStatus = 0; // 告警状态:0-默认、-1=锁定状态、1-无需告警、2-告警成功、3-告警失败 + if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) { + boolean alarmResult = true; + try { + alarmResult = failAlarm(info, log); + } catch (Exception e) { + alarmResult = false; + logger.error(e.getMessage(), e); + } + newAlarmStatus = alarmResult?2:3; + } else { + newAlarmStatus = 1; + } + + JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, -1, newAlarmStatus); + } + } + + } catch (Exception e) { + if (!toStop) { + logger.error(">>>>>>>>>>> czsj-ground, job fail monitor thread error:{0}", e); + } + } + + try { + TimeUnit.SECONDS.sleep(10); + } catch (Exception e) { + if (!toStop) { + logger.error(e.getMessage(), e); + } + } + + } + + logger.info(">>>>>>>>>>> czsj-ground, job fail monitor thread stop"); + + } + }); + monitorThread.setDaemon(true); + monitorThread.setName("czsj-ground, admin JobFailMonitorHelper"); + monitorThread.start(); + } + + public void toStop(){ + toStop = true; + // interrupt and wait + monitorThread.interrupt(); + try { + monitorThread.join(); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + } + + + // ---------------------- alarm ---------------------- + + // email alarm template + private static final String mailBodyTemplate = "
" + I18nUtil.getString("jobconf_monitor_detail") + ":" + + "\n" + + " " + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + "
"+ I18nUtil.getString("jobinfo_field_jobgroup") +""+ I18nUtil.getString("jobinfo_field_id") +""+ I18nUtil.getString("jobinfo_field_jobdesc") +""+ I18nUtil.getString("jobconf_monitor_alarm_title") +""+ I18nUtil.getString("jobconf_monitor_alarm_content") +"
{0}{1}{2}"+ I18nUtil.getString("jobconf_monitor_alarm_type") +"{3}
"; + + /** + * fail alarm + * + * @param jobLog + */ + private boolean failAlarm(JobInfo info, JobLog jobLog){ + boolean alarmResult = true; + + // send monitor email + if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) { + + // alarmContent + String alarmContent = "Alarm Job LogId=" + jobLog.getId(); + if (jobLog.getTriggerCode() != ReturnT.SUCCESS_CODE) { + alarmContent += "
TriggerMsg=
" + jobLog.getTriggerMsg(); + } + if (jobLog.getHandleCode()>0 && jobLog.getHandleCode() != ReturnT.SUCCESS_CODE) { + alarmContent += "
HandleCode=" + jobLog.getHandleMsg(); + } + + // email info + JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(Integer.valueOf(info.getJobGroup())); + String personal = I18nUtil.getString("admin_name_full"); + String title = I18nUtil.getString("jobconf_monitor"); + String content = MessageFormat.format(mailBodyTemplate, + group!=null?group.getTitle():"null", + info.getId(), + info.getJobDesc(), + alarmContent); + + Set emailSet = new HashSet(Arrays.asList(info.getAlarmEmail().split(","))); + for (String email: emailSet) { + + // make mail + try { + EmailUtil.send(JobAdminConfig.getAdminConfig().getEmailUserName(), JobAdminConfig.getAdminConfig().getEmailPassword(),JobAdminConfig.getAdminConfig().getEmailAuthorization(),email,title,content); + } catch (Exception e) { + logger.error(">>>>>>>>>>> czsj-ground, job fail alarm email send error, JobLogId:{}", jobLog.getId(), e); + + alarmResult = false; + } + + } + } + + // do something, custom alarm strategy, such as sms + + + return alarmResult; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobLogReportHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobLogReportHelper.java new file mode 100644 index 0000000..a52c3cb --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobLogReportHelper.java @@ -0,0 +1,153 @@ +package com.czsj.bigdata.core.thread; + + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.entity.JobLogReport; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +/** + * job log report helper + * + * @author xuxueli 2019-11-22 + */ +public class JobLogReportHelper { + private static Logger logger = LoggerFactory.getLogger(JobLogReportHelper.class); + + private static JobLogReportHelper instance = new JobLogReportHelper(); + public static JobLogReportHelper getInstance(){ + return instance; + } + + + private Thread logrThread; + private volatile boolean toStop = false; + public void start(){ + logrThread = new Thread(new Runnable() { + + @Override + public void run() { + + // last clean log time + long lastCleanLogTime = 0; + + + while (!toStop) { + + // 1、log-report refresh: refresh log report in 3 days + try { + + for (int i = 0; i < 3; i++) { + + // today + Calendar itemDay = Calendar.getInstance(); + itemDay.add(Calendar.DAY_OF_MONTH, -i); + itemDay.set(Calendar.HOUR_OF_DAY, 0); + itemDay.set(Calendar.MINUTE, 0); + itemDay.set(Calendar.SECOND, 0); + itemDay.set(Calendar.MILLISECOND, 0); + + Date todayFrom = itemDay.getTime(); + + itemDay.set(Calendar.HOUR_OF_DAY, 23); + itemDay.set(Calendar.MINUTE, 59); + itemDay.set(Calendar.SECOND, 59); + itemDay.set(Calendar.MILLISECOND, 999); + + Date todayTo = itemDay.getTime(); + + // refresh log-report every minute + JobLogReport xxlJobLogReport = new JobLogReport(); + xxlJobLogReport.setTriggerDay(todayFrom); + xxlJobLogReport.setRunningCount(0); + xxlJobLogReport.setSucCount(0); + xxlJobLogReport.setFailCount(0); + + Map triggerCountMap = JobAdminConfig.getAdminConfig().getJobLogMapper().findLogReport(todayFrom, todayTo); + if (triggerCountMap!=null && triggerCountMap.size()>0) { + int triggerDayCount = triggerCountMap.containsKey("triggerDayCount")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCount"))):0; + int triggerDayCountRunning = triggerCountMap.containsKey("triggerDayCountRunning")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountRunning"))):0; + int triggerDayCountSuc = triggerCountMap.containsKey("triggerDayCountSuc")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountSuc"))):0; + int triggerDayCountFail = triggerDayCount - triggerDayCountRunning - triggerDayCountSuc; + + xxlJobLogReport.setRunningCount(triggerDayCountRunning); + xxlJobLogReport.setSucCount(triggerDayCountSuc); + xxlJobLogReport.setFailCount(triggerDayCountFail); + } + + // do refresh + int ret = JobAdminConfig.getAdminConfig().getJobLogReportMapper().update(xxlJobLogReport); + if (ret < 1) { + JobAdminConfig.getAdminConfig().getJobLogReportMapper().save(xxlJobLogReport); + } + } + + } catch (Exception e) { + if (!toStop) { + logger.error(">>>>>>>>>>> czsj-ground, job log report thread error:{}", e); + } + } + + // 2、log-clean: switch open & once each day + if (JobAdminConfig.getAdminConfig().getLogretentiondays()>0 + && System.currentTimeMillis() - lastCleanLogTime > 24*60*60*1000) { + + // expire-time + Calendar expiredDay = Calendar.getInstance(); + expiredDay.add(Calendar.DAY_OF_MONTH, -1 * JobAdminConfig.getAdminConfig().getLogretentiondays()); + expiredDay.set(Calendar.HOUR_OF_DAY, 0); + expiredDay.set(Calendar.MINUTE, 0); + expiredDay.set(Calendar.SECOND, 0); + expiredDay.set(Calendar.MILLISECOND, 0); + Date clearBeforeTime = expiredDay.getTime(); + + // clean expired log + List logIds = null; + do { + logIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findClearLogIds(0, 0, clearBeforeTime, 0, 1000); + if (logIds!=null && logIds.size()>0) { + JobAdminConfig.getAdminConfig().getJobLogMapper().clearLog(logIds); + } + } while (logIds!=null && logIds.size()>0); + + // update clean time + lastCleanLogTime = System.currentTimeMillis(); + } + + try { + TimeUnit.MINUTES.sleep(1); + } catch (Exception e) { + if (!toStop) { + logger.error(e.getMessage(), e); + } + } + + } + + logger.info(">>>>>>>>>>> czsj-ground, job log report thread stop"); + + } + }); + logrThread.setDaemon(true); + logrThread.setName("czsj-ground, admin JobLogReportHelper"); + logrThread.start(); + } + + public void toStop(){ + toStop = true; + // interrupt and wait + logrThread.interrupt(); + try { + logrThread.join(); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobRegistryMonitorHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobRegistryMonitorHelper.java new file mode 100644 index 0000000..dc0d675 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobRegistryMonitorHelper.java @@ -0,0 +1,109 @@ +package com.czsj.bigdata.core.thread; + + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.entity.JobGroup; +import com.czsj.bigdata.entity.JobRegistry; +import com.czsj.core.enums.RegistryConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.TimeUnit; + +/** + * job registry instance + * @author xuxueli 2016-10-02 19:10:24 + */ +public class JobRegistryMonitorHelper { + private static Logger logger = LoggerFactory.getLogger(JobRegistryMonitorHelper.class); + + private static JobRegistryMonitorHelper instance = new JobRegistryMonitorHelper(); + public static JobRegistryMonitorHelper getInstance(){ + return instance; + } + + private Thread registryThread; + private volatile boolean toStop = false; + public void start(){ + registryThread = new Thread(() -> { + while (!toStop) { + try { + // auto registry group + List groupList = JobAdminConfig.getAdminConfig().getJobGroupMapper().findByAddressType(0); + if (groupList!=null && !groupList.isEmpty()) { + + // remove dead address (admin/executor) + List ids = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findDead(RegistryConfig.DEAD_TIMEOUT, new Date()); + if (ids!=null && ids.size()>0) { + JobAdminConfig.getAdminConfig().getJobRegistryMapper().removeDead(ids); + } + + // fresh online address (admin/executor) + HashMap> appAddressMap = new HashMap<>(); + List list = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findAll(RegistryConfig.DEAD_TIMEOUT, new Date()); + if (list != null) { + for (JobRegistry item: list) { + if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) { + String appName = item.getRegistryKey(); + List registryList = appAddressMap.get(appName); + if (registryList == null) { + registryList = new ArrayList<>(); + } + + if (!registryList.contains(item.getRegistryValue())) { + registryList.add(item.getRegistryValue()); + } + appAddressMap.put(appName, registryList); + } + } + } + + // fresh group address + for (JobGroup group: groupList) { + List registryList = appAddressMap.get(group.getAppName()); + String addressListStr = null; + if (registryList!=null && !registryList.isEmpty()) { + Collections.sort(registryList); + addressListStr = ""; + for (String item:registryList) { + addressListStr += item + ","; + } + addressListStr = addressListStr.substring(0, addressListStr.length()-1); + } + group.setAddressList(addressListStr); + JobAdminConfig.getAdminConfig().getJobGroupMapper().update(group); + } + } + } catch (Exception e) { + if (!toStop) { + logger.error(">>>>>>>>>>> czsj-ground, job registry monitor thread error:{}", e); + } + } + try { + TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT); + } catch (InterruptedException e) { + if (!toStop) { + logger.error(">>>>>>>>>>> czsj-ground, job registry monitor thread error:{}", e); + } + } + } + logger.info(">>>>>>>>>>> czsj-ground, job registry monitor thread stop"); + }); + registryThread.setDaemon(true); + registryThread.setName("czsj-ground, admin JobRegistryMonitorHelper"); + registryThread.start(); + } + + public void toStop(){ + toStop = true; + // interrupt and wait + registryThread.interrupt(); + try { + registryThread.join(); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobScheduleHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobScheduleHelper.java new file mode 100644 index 0000000..18dc931 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobScheduleHelper.java @@ -0,0 +1,350 @@ +package com.czsj.bigdata.core.thread; + + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.core.cron.CronExpression; +import com.czsj.bigdata.core.trigger.TriggerTypeEnum; +import com.czsj.bigdata.entity.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.text.ParseException; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; + +/** + * @author xuxueli 2019-05-21 + */ +public class JobScheduleHelper { + private static Logger logger = LoggerFactory.getLogger(JobScheduleHelper.class); + + private static JobScheduleHelper instance = new JobScheduleHelper(); + + public static JobScheduleHelper getInstance() { + return instance; + } + + public static final long PRE_READ_MS = 5000; // pre read + + private Thread scheduleThread; + private Thread ringThread; + private volatile boolean scheduleThreadToStop = false; + private volatile boolean ringThreadToStop = false; + private volatile static Map> ringData = new ConcurrentHashMap<>(); + + public void start() { + + // schedule thread + scheduleThread = new Thread(new Runnable() { + @Override + public void run() { + + try { + TimeUnit.MILLISECONDS.sleep(5000 - System.currentTimeMillis() % 1000); + } catch (InterruptedException e) { + if (!scheduleThreadToStop) { + logger.error(e.getMessage(), e); + } + } + logger.info(">>>>>>>>> init czsj-ground admin scheduler success."); + + // pre-read count: treadpool-size * trigger-qps (each trigger cost 50ms, qps = 1000/50 = 20) + int preReadCount = (JobAdminConfig.getAdminConfig().getTriggerPoolFastMax() + JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax()) * 20; + + while (!scheduleThreadToStop) { + + // Scan Job + long start = System.currentTimeMillis(); + + Connection conn = null; + Boolean connAutoCommit = null; + PreparedStatement preparedStatement = null; + + boolean preReadSuc = true; + try { + + conn = JobAdminConfig.getAdminConfig().getDataSource().getConnection(); + connAutoCommit = conn.getAutoCommit(); + conn.setAutoCommit(false); + preparedStatement = conn.prepareStatement("select * from job_lock where lock_name = 'schedule_lock' for update"); + preparedStatement.execute(); + + // tx start + + // 1、pre read + long nowTime = System.currentTimeMillis(); + List scheduleList = JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount); + if (scheduleList != null && scheduleList.size() > 0) { + // 2、push time-ring + for (JobInfo jobInfo : scheduleList) { + + // time-ring jump + if (nowTime > jobInfo.getTriggerNextTime() + PRE_READ_MS) { + // 2.1、trigger-expire > 5s:pass && make next-trigger-time + logger.warn(">>>>>>>>>>> czsj-ground, schedule misfire, jobId = " + jobInfo.getId()); + // fresh next + refreshNextValidTime(jobInfo, new Date()); + } else if (nowTime > jobInfo.getTriggerNextTime()) { + // 2.2、trigger-expire < 5s:direct-trigger && make next-trigger-time + + // 1、trigger + JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.CRON, -1, null, null); + logger.debug(">>>>>>>>>>> czsj-ground, schedule push trigger : jobId = " + jobInfo.getId()); + + // 2、fresh next + refreshNextValidTime(jobInfo, new Date()); + + // next-trigger-time in 5s, pre-read again + if (jobInfo.getTriggerStatus() == 1 && nowTime + PRE_READ_MS > jobInfo.getTriggerNextTime()) { + + // 1、make ring second + int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60); + + // 2、push time ring + pushTimeRing(ringSecond, jobInfo.getId()); + + // 3、fresh next + refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime())); + + } + + } else { + // 2.3、trigger-pre-read:time-ring trigger && make next-trigger-time + + // 1、make ring second + int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60); + + // 2、push time ring + pushTimeRing(ringSecond, jobInfo.getId()); + + // 3、fresh next + refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime())); + + } + + } + + // 3、update trigger info + for (JobInfo jobInfo : scheduleList) { + JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleUpdate(jobInfo); + } + + } else { + preReadSuc = false; + } + + // tx stop + + + } catch (Exception e) { + if (!scheduleThreadToStop) { + logger.error(">>>>>>>>>>> czsj-ground, JobScheduleHelper#scheduleThread error:{}", e); + } + } finally { + + // commit + if (conn != null) { + try { + conn.commit(); + } catch (SQLException e) { + if (!scheduleThreadToStop) { + logger.error(e.getMessage(), e); + } + } + try { + conn.setAutoCommit(connAutoCommit); + } catch (SQLException e) { + if (!scheduleThreadToStop) { + logger.error(e.getMessage(), e); + } + } + try { + conn.close(); + } catch (SQLException e) { + if (!scheduleThreadToStop) { + logger.error(e.getMessage(), e); + } + } + } + + // close PreparedStatement + if (null != preparedStatement) { + try { + preparedStatement.close(); + } catch (SQLException e) { + if (!scheduleThreadToStop) { + logger.error(e.getMessage(), e); + } + } + } + } + long cost = System.currentTimeMillis() - start; + + + // Wait seconds, align second + if (cost < 1000) { // scan-overtime, not wait + try { + // pre-read period: success > scan each second; fail > skip this period; + TimeUnit.MILLISECONDS.sleep((preReadSuc ? 1000 : PRE_READ_MS) - System.currentTimeMillis() % 1000); + } catch (InterruptedException e) { + if (!scheduleThreadToStop) { + logger.error(e.getMessage(), e); + } + } + } + + } + + logger.info(">>>>>>>>>>> czsj-ground, JobScheduleHelper#scheduleThread stop"); + } + }); + scheduleThread.setDaemon(true); + scheduleThread.setName("czsj-ground, admin JobScheduleHelper#scheduleThread"); + scheduleThread.start(); + + + // ring thread + ringThread = new Thread(() -> { + + // align second + try { + TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000); + } catch (InterruptedException e) { + if (!ringThreadToStop) { + logger.error(e.getMessage(), e); + } + } + + while (!ringThreadToStop) { + + try { + // second data + List ringItemData = new ArrayList<>(); + int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长,跨过刻度,向前校验一个刻度; + for (int i = 0; i < 2; i++) { + List tmpData = ringData.remove((nowSecond + 60 - i) % 60); + if (tmpData != null) { + ringItemData.addAll(tmpData); + } + } + + // ring trigger + logger.debug(">>>>>>>>>>> czsj-ground, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData)); + if (ringItemData.size() > 0) { + // do trigger + for (int jobId : ringItemData) { + // do trigger + JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null); + } + // clear + ringItemData.clear(); + } + } catch (Exception e) { + if (!ringThreadToStop) { + logger.error(">>>>>>>>>>> czsj-ground, JobScheduleHelper#ringThread error:{}", e); + } + } + + // next second, align second + try { + TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000); + } catch (InterruptedException e) { + if (!ringThreadToStop) { + logger.error(e.getMessage(), e); + } + } + } + logger.info(">>>>>>>>>>> czsj-ground, JobScheduleHelper#ringThread stop"); + }); + ringThread.setDaemon(true); + ringThread.setName("czsj-ground, admin JobScheduleHelper#ringThread"); + ringThread.start(); + } + + private void refreshNextValidTime(JobInfo jobInfo, Date fromTime) throws ParseException { + Date nextValidTime = new CronExpression(jobInfo.getJobCron()).getNextValidTimeAfter(fromTime); + if (nextValidTime != null) { + jobInfo.setTriggerLastTime(jobInfo.getTriggerNextTime()); + jobInfo.setTriggerNextTime(nextValidTime.getTime()); + } else { + jobInfo.setTriggerStatus(0); + jobInfo.setTriggerLastTime(0); + jobInfo.setTriggerNextTime(0); + } + } + + private void pushTimeRing(int ringSecond, int jobId) { + // push async ring + List ringItemData = ringData.get(ringSecond); + if (ringItemData == null) { + ringItemData = new ArrayList(); + ringData.put(ringSecond, ringItemData); + } + ringItemData.add(jobId); + + logger.debug(">>>>>>>>>>> czsj-ground, schedule push time-ring : " + ringSecond + " = " + Arrays.asList(ringItemData)); + } + + public void toStop() { + + // 1、stop schedule + scheduleThreadToStop = true; + try { + TimeUnit.SECONDS.sleep(1); // wait + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + if (scheduleThread.getState() != Thread.State.TERMINATED) { + // interrupt and wait + scheduleThread.interrupt(); + try { + scheduleThread.join(); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + } + + // if has ring data + boolean hasRingData = false; + if (!ringData.isEmpty()) { + for (int second : ringData.keySet()) { + List tmpData = ringData.get(second); + if (tmpData != null && tmpData.size() > 0) { + hasRingData = true; + break; + } + } + } + if (hasRingData) { + try { + TimeUnit.SECONDS.sleep(8); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + } + + // stop ring (wait job-in-memory stop) + ringThreadToStop = true; + try { + TimeUnit.SECONDS.sleep(1); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + if (ringThread.getState() != Thread.State.TERMINATED) { + // interrupt and wait + ringThread.interrupt(); + try { + ringThread.join(); + } catch (InterruptedException e) { + logger.error(e.getMessage(), e); + } + } + + logger.info(">>>>>>>>>>> czsj-ground, JobScheduleHelper stop"); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobTriggerPoolHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobTriggerPoolHelper.java new file mode 100644 index 0000000..1e0c3d4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/thread/JobTriggerPoolHelper.java @@ -0,0 +1,133 @@ +package com.czsj.bigdata.core.thread; + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.core.trigger.JobTrigger; +import com.czsj.bigdata.core.trigger.TriggerTypeEnum; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * job trigger thread pool helper + * + * @author xuxueli 2018-07-03 21:08:07 + */ +public class JobTriggerPoolHelper { + private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class); + + + // ---------------------- trigger pool ---------------------- + + // fast/slow thread pool + private ThreadPoolExecutor fastTriggerPool = null; + private ThreadPoolExecutor slowTriggerPool = null; + + public void start() { + fastTriggerPool = new ThreadPoolExecutor( + 10, + JobAdminConfig.getAdminConfig().getTriggerPoolFastMax(), + 60L, + TimeUnit.SECONDS, + new LinkedBlockingQueue(1000), + new ThreadFactory() { + @Override + public Thread newThread(Runnable r) { + return new Thread(r, "czsj-ground, admin JobTriggerPoolHelper-fastTriggerPool-" + r.hashCode()); + } + }); + + slowTriggerPool = new ThreadPoolExecutor( + 10, + JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax(), + 60L, + TimeUnit.SECONDS, + new LinkedBlockingQueue(2000), + new ThreadFactory() { + @Override + public Thread newThread(Runnable r) { + return new Thread(r, "czsj-ground, admin JobTriggerPoolHelper-slowTriggerPool-" + r.hashCode()); + } + }); + } + + + public void stop() { + //triggerPool.shutdown(); + fastTriggerPool.shutdownNow(); + slowTriggerPool.shutdownNow(); + logger.info(">>>>>>>>> czsj-ground trigger thread pool shutdown success."); + } + + + // job timeout count + private volatile long minTim = System.currentTimeMillis() / 60000; // ms > min + private volatile ConcurrentMap jobTimeoutCountMap = new ConcurrentHashMap<>(); + + + /** + * add trigger + */ + public void addTrigger(final int jobId, final TriggerTypeEnum triggerType, final int failRetryCount, final String executorShardingParam, final String executorParam) { + + // choose thread pool + ThreadPoolExecutor triggerPool_ = fastTriggerPool; + AtomicInteger jobTimeoutCount = jobTimeoutCountMap.get(jobId); + if (jobTimeoutCount != null && jobTimeoutCount.get() > 10) { // job-timeout 10 times in 1 min + triggerPool_ = slowTriggerPool; + } + // trigger + triggerPool_.execute(() -> { + long start = System.currentTimeMillis(); + try { + // do trigger + JobTrigger.trigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } finally { + // check timeout-count-map + long minTim_now = System.currentTimeMillis() / 60000; + if (minTim != minTim_now) { + minTim = minTim_now; + jobTimeoutCountMap.clear(); + } + // incr timeout-count-map + long cost = System.currentTimeMillis() - start; + if (cost > 500) { // ob-timeout threshold 500ms + AtomicInteger timeoutCount = jobTimeoutCountMap.putIfAbsent(jobId, new AtomicInteger(1)); + if (timeoutCount != null) { + timeoutCount.incrementAndGet(); + } + } + } + }); + } + + + // ---------------------- helper ---------------------- + + private static JobTriggerPoolHelper helper = new JobTriggerPoolHelper(); + + public static void toStart() { + helper.start(); + } + + public static void toStop() { + helper.stop(); + } + + /** + * @param jobId + * @param triggerType + * @param failRetryCount >=0: use this param + * <0: use param from job info config + * @param executorShardingParam + * @param executorParam null: use job param + * not null: cover job param + */ + public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) { + helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/trigger/JobTrigger.java b/czsj-system/src/main/java/com/czsj/bigdata/core/trigger/JobTrigger.java new file mode 100644 index 0000000..bb6976e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/trigger/JobTrigger.java @@ -0,0 +1,263 @@ +package com.czsj.bigdata.core.trigger; + + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import com.czsj.bigdata.core.route.ExecutorRouteStrategyEnum; +import com.czsj.bigdata.core.scheduler.JobScheduler; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.entity.JobGroup; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.bigdata.entity.JobLog; +import com.czsj.bigdata.tool.query.BaseQueryTool; +import com.czsj.bigdata.tool.query.QueryToolFactory; +import com.czsj.bigdata.util.JSONUtils; +import com.czsj.core.biz.ExecutorBiz; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.biz.model.TriggerParam; +import com.czsj.core.enums.ExecutorBlockStrategyEnum; +import com.czsj.core.enums.IncrementTypeEnum; +import com.czsj.core.glue.GlueTypeEnum; +import com.czsj.rpc.util.IpUtil; +import com.czsj.rpc.util.ThrowableUtil; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Calendar; +import java.util.Date; + +/** + * xxl-job trigger + * Created by xuxueli on 17/7/13. + */ +public class JobTrigger { + private static Logger logger = LoggerFactory.getLogger(JobTrigger.class); + + /** + * trigger job + * + * @param jobId + * @param triggerType + * @param failRetryCount >=0: use this param + * <0: use param from job info config + * @param executorShardingParam + * @param executorParam null: use job param + * not null: cover job param + */ + public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) { + JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId); + if (jobInfo == null) { + logger.warn(">>>>>>>>>>>> trigger fail, jobId invalid,jobId={}", jobId); + return; + } + if (GlueTypeEnum.BEAN.getDesc().equals(jobInfo.getGlueType())) { + //解密账密 + String json = JSONUtils.changeJson(jobInfo.getJobJson(), JSONUtils.decrypt); + jobInfo.setJobJson(json); + } + if (StringUtils.isNotBlank(executorParam)) { + jobInfo.setExecutorParam(executorParam); + } + int finalFailRetryCount = failRetryCount >= 0 ? failRetryCount : jobInfo.getExecutorFailRetryCount(); + JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(jobInfo.getJobGroup()); + + // sharding param + int[] shardingParam = null; + if (executorShardingParam != null) { + String[] shardingArr = executorShardingParam.split("/"); + if (shardingArr.length == 2 && isNumeric(shardingArr[0]) && isNumeric(shardingArr[1])) { + shardingParam = new int[2]; + shardingParam[0] = Integer.valueOf(shardingArr[0]); + shardingParam[1] = Integer.valueOf(shardingArr[1]); + } + } + if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null) + && group.getRegistryList() != null && !group.getRegistryList().isEmpty() + && shardingParam == null) { + for (int i = 0; i < group.getRegistryList().size(); i++) { + processTrigger(group, jobInfo, finalFailRetryCount, triggerType, i, group.getRegistryList().size()); + } + } else { + if (shardingParam == null) { + shardingParam = new int[]{0, 1}; + } + processTrigger(group, jobInfo, finalFailRetryCount, triggerType, shardingParam[0], shardingParam[1]); + } + + } + + private static boolean isNumeric(String str) { + try { + int result = Integer.valueOf(str); + return true; + } catch (NumberFormatException e) { + return false; + } + } + + /** + * @param group job group, registry list may be empty + * @param jobInfo + * @param finalFailRetryCount + * @param triggerType + * @param index sharding index + * @param total sharding index + */ + private static void processTrigger(JobGroup group, JobInfo jobInfo, int finalFailRetryCount, TriggerTypeEnum triggerType, int index, int total) { + + TriggerParam triggerParam = new TriggerParam(); + + // param + ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), ExecutorBlockStrategyEnum.SERIAL_EXECUTION); // block strategy + ExecutorRouteStrategyEnum executorRouteStrategyEnum = ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null); // route strategy + String shardingParam = (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) ? String.valueOf(index).concat("/").concat(String.valueOf(total)) : null; + + // 1、save log-id + Calendar calendar = Calendar.getInstance(); + calendar.setTime(new Date()); + calendar.set(Calendar.MILLISECOND, 0); + Date triggerTime = calendar.getTime(); + JobLog jobLog = new JobLog(); + jobLog.setJobGroup(jobInfo.getJobGroup()); + jobLog.setJobId(jobInfo.getId()); + jobLog.setTriggerTime(triggerTime); + jobLog.setJobDesc(jobInfo.getJobDesc()); + + JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog); + logger.debug(">>>>>>>>>>> czsj-ground trigger start, jobId:{}", jobLog.getId()); + + // 2、init trigger-param + triggerParam.setJobId(jobInfo.getId()); + triggerParam.setExecutorHandler(jobInfo.getExecutorHandler()); + triggerParam.setExecutorParams(jobInfo.getExecutorParam()); + triggerParam.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy()); + triggerParam.setExecutorTimeout(jobInfo.getExecutorTimeout()); + triggerParam.setLogId(jobLog.getId()); + triggerParam.setLogDateTime(jobLog.getTriggerTime().getTime()); + triggerParam.setGlueType(jobInfo.getGlueType()); + triggerParam.setGlueSource(jobInfo.getGlueSource()); + triggerParam.setGlueUpdatetime(jobInfo.getGlueUpdatetime().getTime()); + triggerParam.setBroadcastIndex(index); + triggerParam.setBroadcastTotal(total); + triggerParam.setJobJson(jobInfo.getJobJson()); + + //increment parameter + Integer incrementType = jobInfo.getIncrementType(); + if (incrementType != null) { + triggerParam.setIncrementType(incrementType); + if (IncrementTypeEnum.ID.getCode() == incrementType) { + long maxId = getMaxId(jobInfo); + jobLog.setMaxId(maxId); + triggerParam.setEndId(maxId); + if(maxId != 0){ + triggerParam.setStartId(maxId); + jobInfo.setIncStartId(maxId); + }else{ + triggerParam.setStartId(jobInfo.getIncStartId()); + } + } else if (IncrementTypeEnum.TIME.getCode() == incrementType) { + triggerParam.setStartTime(jobInfo.getIncStartTime()); + triggerParam.setTriggerTime(triggerTime); + triggerParam.setReplaceParamType(jobInfo.getReplaceParamType()); + } else if (IncrementTypeEnum.PARTITION.getCode() == incrementType) { + triggerParam.setPartitionInfo(jobInfo.getPartitionInfo()); + } + triggerParam.setReplaceParam(jobInfo.getReplaceParam()); + } + //jvm parameter + triggerParam.setJvmParam(jobInfo.getJvmParam()); + + // 3、init address + String address = null; + ReturnT routeAddressResult = null; + if (group.getRegistryList() != null && !group.getRegistryList().isEmpty()) { + if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) { + if (index < group.getRegistryList().size()) { + address = group.getRegistryList().get(index); + } else { + address = group.getRegistryList().get(0); + } + } else { + routeAddressResult = executorRouteStrategyEnum.getRouter().route(triggerParam, group.getRegistryList()); + if (routeAddressResult.getCode() == ReturnT.SUCCESS_CODE) { + address = routeAddressResult.getContent(); + } + } + } else { + routeAddressResult = new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("jobconf_trigger_address_empty")); + } + + // 4、trigger remote executor + ReturnT triggerResult = null; + if (address != null) { + triggerResult = runExecutor(triggerParam, address); + } else { + triggerResult = new ReturnT(ReturnT.FAIL_CODE, null); + } + + // 5、collection trigger info + StringBuilder triggerMsgSb = new StringBuilder(); + triggerMsgSb.append(I18nUtil.getString("jobconf_trigger_type")).append(":").append(triggerType.getTitle()); + triggerMsgSb.append("
").append(I18nUtil.getString("jobconf_trigger_admin_adress")).append(":").append(IpUtil.getIp()); + triggerMsgSb.append("
").append(I18nUtil.getString("jobconf_trigger_exe_regtype")).append(":") + .append((group.getAddressType() == 0) ? I18nUtil.getString("jobgroup_field_addressType_0") : I18nUtil.getString("jobgroup_field_addressType_1")); + triggerMsgSb.append("
").append(I18nUtil.getString("jobconf_trigger_exe_regaddress")).append(":").append(group.getRegistryList()); + triggerMsgSb.append("
").append(I18nUtil.getString("jobinfo_field_executorRouteStrategy")).append(":").append(executorRouteStrategyEnum.getTitle()); + if (shardingParam != null) { + triggerMsgSb.append("(" + shardingParam + ")"); + } + triggerMsgSb.append("
").append(I18nUtil.getString("jobinfo_field_executorBlockStrategy")).append(":").append(blockStrategy.getTitle()); + triggerMsgSb.append("
").append(I18nUtil.getString("jobinfo_field_timeout")).append(":").append(jobInfo.getExecutorTimeout()); + triggerMsgSb.append("
").append(I18nUtil.getString("jobinfo_field_executorFailRetryCount")).append(":").append(finalFailRetryCount); + + triggerMsgSb.append("

>>>>>>>>>>>" + I18nUtil.getString("jobconf_trigger_run") + "<<<<<<<<<<<
") + .append((routeAddressResult != null && routeAddressResult.getMsg() != null) ? routeAddressResult.getMsg() + "

" : "").append(triggerResult.getMsg() != null ? triggerResult.getMsg() : ""); + + // 6、save log trigger-info + jobLog.setExecutorAddress(address); + jobLog.setExecutorHandler(jobInfo.getExecutorHandler()); + jobLog.setExecutorParam(jobInfo.getExecutorParam()); + jobLog.setExecutorShardingParam(shardingParam); + jobLog.setExecutorFailRetryCount(finalFailRetryCount); + jobLog.setTriggerCode(triggerResult.getCode()); + jobLog.setTriggerMsg(triggerMsgSb.toString()); + JobAdminConfig.getAdminConfig().getJobInfoMapper().update(jobInfo); + JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(jobLog); + + logger.debug(">>>>>>>>>>> czsj-ground trigger end, jobId:{}", jobLog.getId()); + } + + private static long getMaxId(JobInfo jobInfo) { + JobDatasource datasource = JobAdminConfig.getAdminConfig().getJobDatasourceMapper().selectById(jobInfo.getDatasourceId()); + BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource); + return qTool.getMaxIdVal(jobInfo.getReaderTable(), jobInfo.getPrimaryKey()); + } + + /** + * run executor + * + * @param triggerParam + * @param address + * @return + */ + public static ReturnT runExecutor(TriggerParam triggerParam, String address) { + ReturnT runResult = null; + try { + ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address); + runResult = executorBiz.run(triggerParam); + } catch (Exception e) { + logger.error(">>>>>>>>>>> czsj-ground trigger error, please check if the executor[{}] is running.", address, e); + runResult = new ReturnT(ReturnT.FAIL_CODE, ThrowableUtil.toString(e)); + } + + StringBuilder runResultSB = new StringBuilder(I18nUtil.getString("jobconf_trigger_run") + ":"); + runResultSB.append("
address:").append(address); + runResultSB.append("
code:").append(runResult.getCode()); + runResultSB.append("
msg:").append(runResult.getMsg()); + + runResult.setMsg(runResultSB.toString()); + return runResult; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/trigger/TriggerTypeEnum.java b/czsj-system/src/main/java/com/czsj/bigdata/core/trigger/TriggerTypeEnum.java new file mode 100644 index 0000000..18bbf6a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/trigger/TriggerTypeEnum.java @@ -0,0 +1,27 @@ +package com.czsj.bigdata.core.trigger; + + +import com.czsj.bigdata.core.util.I18nUtil; + +/** + * trigger type enum + * + * @author xuxueli 2018-09-16 04:56:41 + */ +public enum TriggerTypeEnum { + + MANUAL(I18nUtil.getString("jobconf_trigger_type_manual")), + CRON(I18nUtil.getString("jobconf_trigger_type_cron")), + RETRY(I18nUtil.getString("jobconf_trigger_type_retry")), + PARENT(I18nUtil.getString("jobconf_trigger_type_parent")), + API(I18nUtil.getString("jobconf_trigger_type_api")); + + private TriggerTypeEnum(String title){ + this.title = title; + } + private String title; + public String getTitle() { + return title; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/util/EmailUtil.java b/czsj-system/src/main/java/com/czsj/bigdata/core/util/EmailUtil.java new file mode 100644 index 0000000..d9c518c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/util/EmailUtil.java @@ -0,0 +1,56 @@ +package com.czsj.bigdata.core.util; + +import com.sun.mail.util.MailSSLSocketFactory; + +import javax.mail.internet.InternetAddress; +import javax.mail.internet.MimeMessage; +import java.util.Properties; + + + +public class EmailUtil { + + public static void send(String emailUserName,String emailPassword,String emailAuthorization,String SJemailUserName,String title,String msg) throws Exception{ + Properties properties = new Properties(); + //设置QQ邮件服务器 + properties.setProperty("mail.host","smtp.qq.com"); + //邮件发送协议 + properties.setProperty("mail.transport.protocol","smtp"); + //需要验证用户名密码 + properties.setProperty("mail.smtp.auth","true"); + //还要设置SSL加密,加上以下代码即可 + MailSSLSocketFactory mailSSLSocketFactory = new MailSSLSocketFactory(); + mailSSLSocketFactory.setTrustAllHosts(true); + properties.put("mail.smtp.ssl.enable","true"); + properties.put("mail.smtp.ssl.socketFactory",mailSSLSocketFactory); + //使用JavaMail发送邮件的5个步骤 + //1、创建定义整个应用程序所需环境信息的 Session 对象 + Session session = Session.getDefaultInstance(properties, new Authenticator() { + @Override + public PasswordAuthentication getPasswordAuthentication() { + //发件人用户名,授权码 + return new PasswordAuthentication(emailUserName,emailAuthorization); + } + }); + //开启Session的debug模式,这样就可以查看程序发送Email的运行状态 + session.setDebug(true); + //2、通过session得到transport对象 + Transport transport = session.getTransport(); + //3、使用用户名和授权码连上邮件服务器 + transport.connect("smtp.qq.com",emailUserName,emailPassword); + //4、创建邮件:写邮件 + //注意需要传递Session + MimeMessage message = new MimeMessage(session); + //指明邮件的发件人 + message.setFrom(new InternetAddress(emailUserName)); + //指明邮件的收件人,现在发件人和收件人是一样的,就是自己给自己发 + message.setRecipient(Message.RecipientType.TO , new InternetAddress(SJemailUserName)); + message.setSubject(title); + message.setContent(msg,"text/html;charset=UTF-8"); + //5、发送邮件 + transport.sendMessage(message,message.getAllRecipients()); + + //6、关闭连接 + transport.close(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/util/I18nUtil.java b/czsj-system/src/main/java/com/czsj/bigdata/core/util/I18nUtil.java new file mode 100644 index 0000000..fd4268c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/util/I18nUtil.java @@ -0,0 +1,80 @@ +package com.czsj.bigdata.core.util; + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.EncodedResource; +import org.springframework.core.io.support.PropertiesLoaderUtils; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +/** + * i18n util + * + * @author xuxueli 2018-01-17 20:39:06 + */ +public class I18nUtil { + private static Logger logger = LoggerFactory.getLogger(I18nUtil.class); + + private static Properties prop = null; + public static Properties loadI18nProp(){ + if (prop != null) { + return prop; + } + try { + // build i18n prop + String i18n = JobAdminConfig.getAdminConfig().getI18n(); + i18n = (i18n!=null && i18n.trim().length()>0)?("_"+i18n):i18n; + String i18nFile = MessageFormat.format("i18n/message{0}.properties", i18n); + + // load prop + Resource resource = new ClassPathResource(i18nFile); + EncodedResource encodedResource = new EncodedResource(resource,"UTF-8"); + prop = PropertiesLoaderUtils.loadProperties(encodedResource); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + return prop; + } + + /** + * get val of i18n key + * + * @param key + * @return + */ + public static String getString(String key) { + return loadI18nProp().getProperty(key); + } + + /** + * get mult val of i18n mult key, as json + * + * @param keys + * @return + */ + public static String getMultString(String... keys) { + Map map = new HashMap(); + + Properties prop = loadI18nProp(); + if (keys!=null && keys.length>0) { + for (String key: keys) { + map.put(key, prop.getProperty(key)); + } + } else { + for (String key: prop.stringPropertyNames()) { + map.put(key, prop.getProperty(key)); + } + } + + String json = JacksonUtil.writeValueAsString(map); + return json; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/util/JacksonUtil.java b/czsj-system/src/main/java/com/czsj/bigdata/core/util/JacksonUtil.java new file mode 100644 index 0000000..ab02e53 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/util/JacksonUtil.java @@ -0,0 +1,92 @@ +package com.czsj.bigdata.core.util; + +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +/** + * Jackson util + * + * 1、obj need private and set/get; + * 2、do not support inner class; + * + * @author xuxueli 2015-9-25 18:02:56 + */ +public class JacksonUtil { + private static Logger logger = LoggerFactory.getLogger(JacksonUtil.class); + + private final static ObjectMapper objectMapper = new ObjectMapper(); + public static ObjectMapper getInstance() { + return objectMapper; + } + + /** + * bean、array、List、Map --> json + * + * @param obj + * @return json string + * @throws Exception + */ + public static String writeValueAsString(Object obj) { + try { + return getInstance().writeValueAsString(obj); + } catch (JsonGenerationException e) { + logger.error(e.getMessage(), e); + } catch (JsonMappingException e) { + logger.error(e.getMessage(), e); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + return null; + } + + /** + * string --> bean、Map、List(array) + * + * @param jsonStr + * @param clazz + * @return obj + * @throws Exception + */ + public static T readValue(String jsonStr, Class clazz) { + try { + return getInstance().readValue(jsonStr, clazz); + } catch (JsonParseException e) { + logger.error(e.getMessage(), e); + } catch (JsonMappingException e) { + logger.error(e.getMessage(), e); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + return null; + } + + /** + * string --> List... + * + * @param jsonStr + * @param parametrized + * @param parameterClasses + * @param + * @return + */ + public static T readValue(String jsonStr, Class parametrized, Class... parameterClasses) { + try { + JavaType javaType = getInstance().getTypeFactory().constructParametricType(parametrized, parameterClasses); + return getInstance().readValue(jsonStr, javaType); + } catch (JsonParseException e) { + logger.error(e.getMessage(), e); + } catch (JsonMappingException e) { + logger.error(e.getMessage(), e); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/core/util/LocalCacheUtil.java b/czsj-system/src/main/java/com/czsj/bigdata/core/util/LocalCacheUtil.java new file mode 100644 index 0000000..8711a9f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/core/util/LocalCacheUtil.java @@ -0,0 +1,133 @@ +package com.czsj.bigdata.core.util; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * local cache tool + * + * @author xuxueli 2018-01-22 21:37:34 + */ +public class LocalCacheUtil { + + private static ConcurrentMap cacheRepository = new ConcurrentHashMap(); // 类型建议用抽象父类,兼容性更好; + private static class LocalCacheData{ + private String key; + private Object val; + private long timeoutTime; + + public LocalCacheData() { + } + + public LocalCacheData(String key, Object val, long timeoutTime) { + this.key = key; + this.val = val; + this.timeoutTime = timeoutTime; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public Object getVal() { + return val; + } + + public void setVal(Object val) { + this.val = val; + } + + public long getTimeoutTime() { + return timeoutTime; + } + + public void setTimeoutTime(long timeoutTime) { + this.timeoutTime = timeoutTime; + } + } + + + /** + * set cache + * + * @param key + * @param val + * @param cacheTime + * @return + */ + public static boolean set(String key, Object val, long cacheTime){ + + // clean timeout cache, before set new cache (avoid cache too much) + cleanTimeoutCache(); + + // set new cache + if (key==null || key.trim().length()==0) { + return false; + } + if (val == null) { + remove(key); + } + if (cacheTime <= 0) { + remove(key); + } + long timeoutTime = System.currentTimeMillis() + cacheTime; + LocalCacheData localCacheData = new LocalCacheData(key, val, timeoutTime); + cacheRepository.put(localCacheData.getKey(), localCacheData); + return true; + } + + /** + * remove cache + * + * @param key + * @return + */ + public static boolean remove(String key){ + if (key==null || key.trim().length()==0) { + return false; + } + cacheRepository.remove(key); + return true; + } + + /** + * get cache + * + * @param key + * @return + */ + public static Object get(String key){ + if (key==null || key.trim().length()==0) { + return null; + } + LocalCacheData localCacheData = cacheRepository.get(key); + if (localCacheData!=null && System.currentTimeMillis()=localCacheData.getTimeoutTime()) { + cacheRepository.remove(key); + } + } + } + return true; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/ClickhouseReaderDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/ClickhouseReaderDto.java new file mode 100644 index 0000000..4da257e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/ClickhouseReaderDto.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * + * @author gavin + * @ClassName clickhouse reader dto + * @Version 2.0 + * @since 2022/9/29 + */ +@Data +public class ClickhouseReaderDto implements Serializable { +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/ClickhouseWriterDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/ClickhouseWriterDto.java new file mode 100644 index 0000000..90e2265 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/ClickhouseWriterDto.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * + * @author gavin + * @ClassName clickhouse write dto + * @Version 2.0 + * @since 2022/9/29 + */ +@Data +public class ClickhouseWriterDto implements Serializable { +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/DataXBatchJsonBuildDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/DataXBatchJsonBuildDto.java new file mode 100644 index 0000000..7aaaf50 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/DataXBatchJsonBuildDto.java @@ -0,0 +1,32 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; +import java.util.List; + +/** + * 构建json dto + * + * @author jingwk + * @ClassName DataXJsonDto + * @Version 2.1.2 + * @since 2022/05/05 17:15 + */ +@Data +public class DataXBatchJsonBuildDto implements Serializable { + + private Long readerDatasourceId; + + private List readerTables; + + private Long writerDatasourceId; + + private List writerTables; + + private int templateId; + + private RdbmsReaderDto rdbmsReader; + + private RdbmsWriterDto rdbmsWriter; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/DataXJsonBuildDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/DataXJsonBuildDto.java new file mode 100644 index 0000000..731156e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/DataXJsonBuildDto.java @@ -0,0 +1,46 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; +import java.util.List; + +/** + * 构建json dto + * + * @author jingwk + * @ClassName DataxJsonDto + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +@Data +public class DataXJsonBuildDto implements Serializable { + + private Long readerDatasourceId; + + private List readerTables; + + private List readerColumns; + + private Long writerDatasourceId; + + private List writerTables; + + private List writerColumns; + + private HiveReaderDto hiveReader; + + private HiveWriterDto hiveWriter; + + private HbaseReaderDto hbaseReader; + + private HbaseWriterDto hbaseWriter; + + private RdbmsReaderDto rdbmsReader; + + private RdbmsWriterDto rdbmsWriter; + + private MongoDBReaderDto mongoDBReader; + + private MongoDBWriterDto mongoDBWriter; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/FlinkSqlDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/FlinkSqlDto.java new file mode 100644 index 0000000..cca07b4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/FlinkSqlDto.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import javax.validation.constraints.NotBlank; +import java.io.Serializable; + +/** + * + * @author fei + * @date 2022-01-27 + * + **/ +@Data +public class FlinkSqlDto implements Serializable{ + private static final long serialVersionUID = 1L; + + @NotBlank(message = "SQL 字符串必传!") + private String sqlStr; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/FlinkXBatchJsonBuildDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/FlinkXBatchJsonBuildDto.java new file mode 100644 index 0000000..1c3e68d --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/FlinkXBatchJsonBuildDto.java @@ -0,0 +1,32 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; +import java.util.List; + +/** + * 构建json dto + * + * @author jingwk + * @ClassName FlinkXJsonDto + * @Version 2.1.2 + * @since 2022/05/05 17:15 + */ +@Data +public class FlinkXBatchJsonBuildDto implements Serializable { + + private Long readerDatasourceId; + + private List readerTables; + + private Long writerDatasourceId; + + private List writerTables; + + private int templateId; + + private RdbmsReaderDto rdbmsReader; + + private RdbmsWriterDto rdbmsWriter; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/HbaseReaderDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/HbaseReaderDto.java new file mode 100644 index 0000000..566f3a3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/HbaseReaderDto.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class HbaseReaderDto implements Serializable { + + private String readerMaxVersion; + + private String readerMode; + + private Range readerRange; + +} + diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/HbaseWriterDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/HbaseWriterDto.java new file mode 100644 index 0000000..f4b9ac4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/HbaseWriterDto.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class HbaseWriterDto implements Serializable { + + private String writeNullMode; + + private String writerMode; + + private String writerRowkeyColumn; + + private VersionColumn writerVersionColumn; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/HiveReaderDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/HiveReaderDto.java new file mode 100644 index 0000000..5548198 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/HiveReaderDto.java @@ -0,0 +1,28 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 构建hive reader dto + * + * @author jingwk + * @ClassName hive reader + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Data +public class HiveReaderDto implements Serializable { + + private String readerPath; + + private String readerDefaultFS; + + private String readerFileType; + + private String readerFieldDelimiter; + + private Boolean readerSkipHeader; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/HiveWriterDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/HiveWriterDto.java new file mode 100644 index 0000000..19d422a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/HiveWriterDto.java @@ -0,0 +1,29 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 构建hive write dto + * + * @author jingwk + * @ClassName hive write dto + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Data +public class HiveWriterDto implements Serializable { + + private String writerDefaultFS; + + private String writerFileType; + + private String writerPath; + + private String writerFileName; + + private String writeMode; + + private String writeFieldDelimiter; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/JsonBuildDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/JsonBuildDto.java new file mode 100644 index 0000000..9bbdb1a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/JsonBuildDto.java @@ -0,0 +1,52 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; +import java.util.List; + +/** + * 构建json dto + * + * @author jingwk + * @ClassName FlinkxJsonDto + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +@Data +public class JsonBuildDto implements Serializable { + + private Long readerDatasourceId; + + private List readerTables; + + private List readerColumns; + + private Long writerDatasourceId; + + private List writerTables; + + private List writerColumns; + + private HiveReaderDto hiveReader; + + private HiveWriterDto hiveWriter; + + private HbaseReaderDto hbaseReader; + + private HbaseWriterDto hbaseWriter; + + private RdbmsReaderDto rdbmsReader; + + private RdbmsWriterDto rdbmsWriter; + + private MongoDBReaderDto mongoDBReader; + + private MongoDBWriterDto mongoDBWriter; + + private ClickhouseReaderDto clickhouseReader; + + private ClickhouseWriterDto clickhouseWriter; + + private String type; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/MongoDBReaderDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/MongoDBReaderDto.java new file mode 100644 index 0000000..b62b063 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/MongoDBReaderDto.java @@ -0,0 +1,19 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 构建mongodb reader dto + * + * @author jingwk + * @ClassName mongodb reader + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +@Data +public class MongoDBReaderDto implements Serializable { + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/MongoDBWriterDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/MongoDBWriterDto.java new file mode 100644 index 0000000..23b5b29 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/MongoDBWriterDto.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 构建mongodb write dto + * + * @author jingwk + * @ClassName mongodb write dto + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +@Data +public class MongoDBWriterDto implements Serializable { + + private UpsertInfo upsertInfo; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/Range.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/Range.java new file mode 100644 index 0000000..d3ed9cd --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/Range.java @@ -0,0 +1,15 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class Range implements Serializable { + + private String startRowkey; + + private String endRowkey; + + private Boolean isBinaryRowkey; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/RdbmsReaderDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/RdbmsReaderDto.java new file mode 100644 index 0000000..8fa96ca --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/RdbmsReaderDto.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 构建json dto + * + * @author jingwk + * @ClassName RdbmsReaderDto + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Data +public class RdbmsReaderDto implements Serializable { + + private String readerSplitPk; + + private String whereParams; + + private String querySql; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/RdbmsWriterDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/RdbmsWriterDto.java new file mode 100644 index 0000000..54d7537 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/RdbmsWriterDto.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 构建json dto + * + * @author jingwk + * @ClassName RdbmsWriteDto + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Data +public class RdbmsWriterDto implements Serializable { + + private String preSql; + + private String postSql; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/TaskScheduleDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/TaskScheduleDto.java new file mode 100644 index 0000000..18b6db8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/TaskScheduleDto.java @@ -0,0 +1,86 @@ +package com.czsj.bigdata.dto; + +public class TaskScheduleDto { + + + /** + * 所选作业类型: + * 1 -> 每天 + * 2 -> 每月 + * 3 -> 每周 + */ + Integer jobType; + + /** + * 一周的哪几天 + */ + Integer[] dayOfWeeks; + + /** + * 一个月的哪几天 + */ + Integer[] dayOfMonths; + + /** + * 秒 + */ + Integer second; + + /** + * 分 + */ + Integer minute; + + /** + * 时 + */ + Integer hour; + + public Integer getJobType() { + return jobType; + } + + public void setJobType(Integer jobType) { + this.jobType = jobType; + } + + public Integer[] getDayOfWeeks() { + return dayOfWeeks; + } + + public void setDayOfWeeks(Integer[] dayOfWeeks) { + this.dayOfWeeks = dayOfWeeks; + } + + public Integer[] getDayOfMonths() { + return dayOfMonths; + } + + public void setDayOfMonths(Integer[] dayOfMonths) { + this.dayOfMonths = dayOfMonths; + } + + public Integer getSecond() { + return second; + } + + public void setSecond(Integer second) { + this.second = second; + } + + public Integer getMinute() { + return minute; + } + + public void setMinute(Integer minute) { + this.minute = minute; + } + + public Integer getHour() { + return hour; + } + + public void setHour(Integer hour) { + this.hour = hour; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/TriggerJobDto.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/TriggerJobDto.java new file mode 100644 index 0000000..a0474e2 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/TriggerJobDto.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +import java.io.Serializable; + +/** + * 用于启动任务接收的实体 + * + * @author jingwk + * @ClassName TriggerJobDto + * @Version 1.0 + * @since 2019/12/01 16:12 + */ +@Data +public class TriggerJobDto implements Serializable { + + private String executorParam; + + private int jobId; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/UpsertInfo.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/UpsertInfo.java new file mode 100644 index 0000000..146c916 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/UpsertInfo.java @@ -0,0 +1,18 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +/** + * Created by mac on 2022/3/16. + */ +@Data +public class UpsertInfo { + /** + * 当设置为true时,表示针对相同的upsertKey做更新操作 + */ + private Boolean isUpsert; + /** + * upsertKey指定了没行记录的业务主键。用来做更新时使用。 + */ + private String upsertKey; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/dto/VersionColumn.java b/czsj-system/src/main/java/com/czsj/bigdata/dto/VersionColumn.java new file mode 100644 index 0000000..3a160a5 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/dto/VersionColumn.java @@ -0,0 +1,11 @@ +package com.czsj.bigdata.dto; + +import lombok.Data; + +@Data +public class VersionColumn { + + private Integer index; + + private String value; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/APIAuth.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIAuth.java new file mode 100644 index 0000000..fd47e3a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIAuth.java @@ -0,0 +1,24 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class APIAuth { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("请求token的ID") + private String token_id; + + @ApiModelProperty("请求分组的ID") + private String group_id; + + @ApiModelProperty("更新时间") + private String update_time; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/APIAuthConfig.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIAuthConfig.java new file mode 100644 index 0000000..136e267 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIAuthConfig.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class APIAuthConfig { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("权限的id") + private String auth_id; + + @ApiModelProperty("配置的id") + private String config_id; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/APIConfig.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIConfig.java new file mode 100644 index 0000000..c62ce22 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIConfig.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class APIConfig { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("请求路径") + private String path; + + @ApiModelProperty("名称") + private String name; + + @ApiModelProperty("API分组") + private String group_id; + + @ApiModelProperty("描述") + private String describe; + + @ApiModelProperty("数据源ID") + private Long datasource_id; + + @ApiModelProperty("请求参数") + private String params; + + @ApiModelProperty("创建时间") + private String create_time; + + @ApiModelProperty("更新时间") + private String update_time; + + @ApiModelProperty("执行的SQL语句") + private String sql_text; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/APIGroup.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIGroup.java new file mode 100644 index 0000000..784f5a6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIGroup.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class APIGroup { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("分组名称") + private String name; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/APISQL.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/APISQL.java new file mode 100644 index 0000000..8e570ec --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/APISQL.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class APISQL { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("api的Id") + private String api_id; + + @ApiModelProperty("api的执行SQL") + private String sql_text; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/APIToken.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIToken.java new file mode 100644 index 0000000..fd6d9dd --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/APIToken.java @@ -0,0 +1,27 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class APIToken { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("请求Token") + private String token; + + @ApiModelProperty("描述") + private String describe; + + @ApiModelProperty("过期时间") + private String expire; + + @ApiModelProperty("创建时间") + private String create_time; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/BaseForm.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/BaseForm.java new file mode 100644 index 0000000..30b0740 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/BaseForm.java @@ -0,0 +1,252 @@ +package com.czsj.bigdata.entity; + +import cn.hutool.core.util.BooleanUtil; +import cn.hutool.core.util.NumberUtil; +import cn.hutool.core.util.ObjectUtil; +import cn.hutool.core.util.StrUtil; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.czsj.bigdata.util.PageUtils; +import com.czsj.bigdata.util.ServletUtils; +import lombok.extern.slf4j.Slf4j; + +import javax.servlet.http.HttpServletRequest; +import java.net.URLDecoder; +import java.util.Enumeration; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * + * @Author: czsj + * @Date: 2022/9/16 11:14 + * @Description: 基础参数辅助类 + **/ +@Slf4j +public class BaseForm { + /** + * 查询参数对象 + */ + protected Map values = new LinkedHashMap<>(); + + /** + * 当前页码 + */ + private Long current = 1L; + + /** + * 页大小 + */ + private Long size = 10L; + + /** + * 构造方法 + */ + public BaseForm() { + try { + HttpServletRequest request = ServletUtils.getRequest(); + Enumeration params = request.getParameterNames(); + while (params.hasMoreElements()) { + String name = params.nextElement(); + String value = StrUtil.trim(request.getParameter(name)); + this.set(name, URLDecoder.decode(value, "UTF-8")); + } + this.parsePagingQueryParams(); + } catch (Exception e) { + e.printStackTrace(); + log.error("BaseControlForm initialize parameters setting error:" + e); + } + } + + /** + * 获取页码 + * + * @return + */ + public Long getPageNo() { + String pageNum = StrUtil.toString(this.get("current")); + if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) { + this.current = Long.parseLong(pageNum); + } + return this.current; + } + + /** + * 获取页大小 + * + * @return + */ + public Long getPageSize() { + String pageSize = StrUtil.toString(this.get("size")); + + if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) { + this.size = Long.parseLong(pageSize); + } + return this.size; + } + + /** + * 获得参数信息对象 + * + * @return + */ + public Map getParameters() { + return values; + } + + /** + * 根据key获取values中的值 + * + * @param name + * @return + */ + public Object get(String name) { + if (values == null) { + values = new LinkedHashMap<>(); + return null; + } + return this.values.get(name); + } + + /** + * 根据key获取values中String类型值 + * + * @param key + * @return String + */ + public String getString(String key) { + return StrUtil.toString(get(key)); + } + + /** + * 获取排序字段 + * + * @return + */ + public String getSort() { + return StrUtil.toString(this.values.get("sort")); + } + + /** + * 获取排序 + * + * @return + */ + public String getOrder() { + return StrUtil.toString(this.values.get("order")); + } + + /** + * 获取排序 + * + * @return + */ + public String getOrderby() { + return StrUtil.toString(this.values.get("orderby")); + } + + /** + * 解析出mybatis plus分页查询参数 + */ + public Page getPlusPagingQueryEntity() { + Page page = new Page(); + //如果无current,默认返回1000条数据 + page.setCurrent(this.getPageNo()); + page.setSize(this.getPageSize()); + if (ObjectUtil.isNotNull(this.get("ifCount"))) { + page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount"))); + } else { + //默认给true + page.setSearchCount(true); + } + return page; + } + + /** + * 解析分页排序参数(pageHelper) + */ + public void parsePagingQueryParams() { + // 排序字段解析 + String orderBy = StrUtil.toString(this.get("orderby")).trim(); + String sortName = StrUtil.toString(this.get("sort")).trim(); + String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase(); + + if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) { + if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) { + sortOrder = "asc"; + } + this.set("orderby", sortName + " " + sortOrder); + } + } + + + /** + * 设置参数 + * + * @param name 参数名称 + * @param value 参数值 + */ + public void set(String name, Object value) { + if (ObjectUtil.isNotNull(value)) { + this.values.put(name, value); + } + } + + /** + * 移除参数 + * + * @param name + */ + public void remove(String name) { + this.values.remove(name); + } + + /** + * 清除所有参数 + */ + public void clear() { + if (values != null) { + values.clear(); + } + } + + + /** + * 自定义查询组装 + * + * @param map + * @return + */ + public QueryWrapper pageQueryWrapperCustom(Map map, QueryWrapper queryWrapper) { + // mybatis plus 分页相关的参数 + Map pageParams = PageUtils.filterPageParams(map); + //过滤空值,分页查询相关的参数 + Map colQueryMap = PageUtils.filterColumnQueryParams(map); + //排序 操作 + pageParams.forEach((k, v) -> { + switch (k) { + case "ascs": + queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v))); + break; + case "descs": + queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v))); + break; + } + }); + + //遍历进行字段查询条件组装 + colQueryMap.forEach((k, v) -> { + switch (k) { + case "pluginName": + case "datasourceName": + queryWrapper.like(StrUtil.toUnderlineCase(k), v); + break; + default: + queryWrapper.eq(StrUtil.toUnderlineCase(k), v); + } + }); + + return queryWrapper; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/BaseResource.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/BaseResource.java new file mode 100644 index 0000000..1eda8e4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/BaseResource.java @@ -0,0 +1,37 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class BaseResource { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("资源名称") + private String name; + + @ApiModelProperty("资源地址") + private String resource_address; + + @ApiModelProperty("更新时间") + private String update_time; + + @ApiModelProperty("服务器IP") + private String serverIp; + + @ApiModelProperty("服务器用户名") + private String serverUser; + + @ApiModelProperty("服务器密码") + private String serverPassword; + + @ApiModelProperty("资源类型") + private String type; + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/ColumnClass.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/ColumnClass.java new file mode 100644 index 0000000..0770b13 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/ColumnClass.java @@ -0,0 +1,14 @@ +package com.czsj.bigdata.entity; + +/** + * + * + * @Date: 2022/4/4 9:09 + * @Description: + **/ +public class ColumnClass { + private String columnType; + private String columnName; + private String tableName; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/Common.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/Common.java new file mode 100644 index 0000000..69bbd9a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/Common.java @@ -0,0 +1,14 @@ +package com.czsj.bigdata.entity; + +/** + * + * @Author: czsj + * @Date: 2022/10/7 11:21 + * @Description: 常量描述类 + **/ +public class Common { + public static final String DOCPAGE = "/doc.html"; + public static String PORT ="8080"; + public static String SERVERPORT ="server.port"; + public static String SERVERCONTEXTPATH ="server.contextPath"; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/DeployTask.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/DeployTask.java new file mode 100644 index 0000000..de2e8ff --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/DeployTask.java @@ -0,0 +1,32 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class DeployTask { + + @ApiModelProperty("作业ID") + private String jid; + + @ApiModelProperty("作业名称") + private String name; + + @ApiModelProperty("开始时间") + private String begintime; + + @ApiModelProperty("持续时间") + private String duration; + + @ApiModelProperty("结束时间") + private String endtime; + + @ApiModelProperty("任务数") + private String tasknumber; + + @ApiModelProperty("状态") + private String status; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/DevEnvSetting.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/DevEnvSetting.java new file mode 100644 index 0000000..2eb3c24 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/DevEnvSetting.java @@ -0,0 +1,55 @@ +package com.czsj.bigdata.entity; + +import com.baomidou.mybatisplus.annotation.TableField; +import com.fasterxml.jackson.annotation.JsonFormat; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * + */ +@Data +public class DevEnvSetting { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("属性名称") + private String name; + + @ApiModelProperty("属性值") + private String propValue; + + @ApiModelProperty("属性描述") + private String description; + + @ApiModelProperty("用户Id") + private Long userId; + + @ApiModelProperty("标记") + private Boolean flag; + + @ApiModelProperty("上传的URL") + private Boolean uploadurl; + + @ApiModelProperty("部署的URL") + private Boolean deployurl; + + @ApiModelProperty("展示的URL") + private Boolean showurl; + + @ApiModelProperty("下线的URL") + private Boolean offlineurl; + + @ApiModelProperty("创建时间") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + + @ApiModelProperty("更新时间") + private Date updateTime; + + @TableField(exist=false) + private String userName; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/DevTask.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/DevTask.java new file mode 100644 index 0000000..8322e27 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/DevTask.java @@ -0,0 +1,44 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +/** + * + */ +@Data +public class DevTask { + + @ApiModelProperty("属性Id") + private int id; + + @ApiModelProperty("任务名称") + private String name; + + @ApiModelProperty("任务类型") + private String tasktype; + + @ApiModelProperty("运行类型") + private String runtype; + + @ApiModelProperty("运行参数") + private String run_param; + + @ApiModelProperty("JAR包路径") + private String jarpath; + + @ApiModelProperty("任务的SQL") + private String sql_text; + + @ApiModelProperty("任务描述") + private String task_describe; + + @ApiModelProperty("创建时间") + private String create_time; + + @ApiModelProperty("更新时间") + private String update_time; + + @ApiModelProperty("类型") + private String type; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/InfoReport.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/InfoReport.java new file mode 100644 index 0000000..1fd971a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/InfoReport.java @@ -0,0 +1,9 @@ +package com.czsj.bigdata.entity; + +import lombok.Data; + +@Data +public class InfoReport { + private int resultCount; + private String countType; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobDatasource.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobDatasource.java new file mode 100644 index 0000000..b586b9f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobDatasource.java @@ -0,0 +1,149 @@ +package com.czsj.bigdata.entity; + +import com.alibaba.fastjson.annotation.JSONField; +import com.baomidou.mybatisplus.annotation.*; +import com.baomidou.mybatisplus.extension.activerecord.Model; +import com.czsj.bigdata.core.handler.AESEncryptHandler; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.io.Serializable; +import java.util.Date; + +/** + * jdbc数据源配置实体类(job_jdbc_datasource) + * + * @author zhouhongfa@gz-yibo.com + * @version v1.0 + * @since 2019-07-30 + */ + +@Data +@ApiModel +@TableName("job_jdbc_datasource") +public class JobDatasource extends Model { + + /** + * 自增主键 + */ + @TableId + @ApiModelProperty(value = "自增主键") + private Long id; + + /** + * 数据源名称 + */ + @ApiModelProperty(value = "数据源名称") + private String datasourceName; + + /** + * 数据源 + */ + @ApiModelProperty(value = "数据源") + private String datasource; + + /** + * 数据源分组 + */ + @ApiModelProperty(value = "数据源分组") + private String datasourceGroup; + + /** + * 用户名 + * AESEncryptHandler 加密类 + * MyBatis Plus 3.0.7.1之前版本没有typeHandler属性,需要升级到最低3.1.2 + */ + @ApiModelProperty(value = "用户名") + @TableField(typeHandler = AESEncryptHandler.class) + private String jdbcUsername; + + /** + * 密码 + */ + @TableField(typeHandler = AESEncryptHandler.class) + @ApiModelProperty(value = "密码") + private String jdbcPassword; + + /** + * jdbc url + */ + @ApiModelProperty(value = "jdbc url") + private String jdbcUrl; + + /** + * jdbc驱动类 + */ + @ApiModelProperty(value = "jdbc驱动类") + private String jdbcDriverClass; + + /** + * 状态:0删除 1启用 2禁用 + */ + @TableLogic + @ApiModelProperty(value = "状态:0删除 1启用 2禁用") + private Integer status; + + /** + * 创建人 + */ + @TableField(fill = FieldFill.INSERT) + @ApiModelProperty(value = "创建人", hidden = true) + private String createBy; + + /** + * 创建时间 + */ + @TableField(fill = FieldFill.INSERT) + @JSONField(format = "yyyy/MM/dd") + @ApiModelProperty(value = "创建时间", hidden = true) + private Date createDate; + + /** + * 更新人 + */ + @TableField(fill = FieldFill.INSERT_UPDATE) + @ApiModelProperty(value = "更新人", hidden = true) + private String updateBy; + + /** + * 更新时间 + */ + @TableField(fill = FieldFill.INSERT_UPDATE) + @JSONField(format = "yyyy/MM/dd") + @ApiModelProperty(value = "更新时间", hidden = true) + private Date updateDate; + + /** + * 备注 + */ + @ApiModelProperty(value = "备注", hidden = true) + private String comments; + + /** + * zookeeper地址 + */ + @ApiModelProperty(value = "zookeeper地址", hidden = true) + private String zkAdress; + + /** + * 数据库名 + */ + @ApiModelProperty(value = "数据库名", hidden = true) + private String databaseName; + + /** + * 数据库名 + */ + @ApiModelProperty(value = "orc库名", hidden = true) + private String orcschema; + /** + * 获取主键值 + * + * @return 主键值 + */ + @Override + protected Serializable pkVal() { + return this.id; + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobGroup.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobGroup.java new file mode 100644 index 0000000..454355a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobGroup.java @@ -0,0 +1,84 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Created by jingwk on 2019/11/17 + */ +public class JobGroup { + + @ApiModelProperty("执行器Id") + private int id; + @ApiModelProperty("执行器AppName") + private String appName; + @ApiModelProperty("执行器名称") + private String title; + @ApiModelProperty("排序") + private int order; + @ApiModelProperty("执行器地址类型:0=自动注册、1=手动录入") + private int addressType; + @ApiModelProperty("执行器地址列表,多地址逗号分隔(手动录入)") + private String addressList; + + // registry list + private List registryList; // 执行器地址列表(系统注册) + public List getRegistryList() { + if (addressList!=null && addressList.trim().length()>0) { + registryList = new ArrayList<>(Arrays.asList(addressList.split(","))); + } + return registryList; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getAppName() { + return appName; + } + + public void setAppName(String appName) { + this.appName = appName; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public int getOrder() { + return order; + } + + public void setOrder(int order) { + this.order = order; + } + + public int getAddressType() { + return addressType; + } + + public void setAddressType(int addressType) { + this.addressType = addressType; + } + + public String getAddressList() { + return addressList; + } + + public void setAddressList(String addressList) { + this.addressList = addressList; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobInfo.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobInfo.java new file mode 100644 index 0000000..3613402 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobInfo.java @@ -0,0 +1,125 @@ +package com.czsj.bigdata.entity; + +import com.baomidou.mybatisplus.annotation.TableField; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * xxl-job info + * + * @author jingwk 2019-11-17 14:25:49 + */ +@Data +public class JobInfo { + + @ApiModelProperty("主键ID") + private int id; + + @ApiModelProperty("执行器主键ID") + private int jobGroup; + + @ApiModelProperty("任务执行CRON表达式") + private String jobCron; + + @ApiModelProperty("排序") + private String jobDesc; + + private Date addTime; + + private Date updateTime; + + @ApiModelProperty("修改用户") + private Long userId; + + @ApiModelProperty("报警邮件") + private String alarmEmail; + + @ApiModelProperty("执行器路由策略") + private String executorRouteStrategy; + + @ApiModelProperty("执行器,任务Handler名称") + private String executorHandler; + + @ApiModelProperty("执行器,任务参数") + private String executorParam; + + @ApiModelProperty("阻塞处理策略") + private String executorBlockStrategy; + + @ApiModelProperty("任务执行超时时间,单位秒") + private int executorTimeout; + + @ApiModelProperty("失败重试次数") + private int executorFailRetryCount; + + @ApiModelProperty("GLUE类型\t#com.wugui.datatx.core.glue.GlueTypeEnum") + private String glueType; + + @ApiModelProperty("GLUE源代码") + private String glueSource; + + @ApiModelProperty("GLUE备注") + private String glueRemark; + + @ApiModelProperty("GLUE更新时间") + private Date glueUpdatetime; + + @ApiModelProperty("子任务ID") + private String childJobId; + + @ApiModelProperty("调度状态:0-停止,1-运行") + private int triggerStatus; + + @ApiModelProperty("上次调度时间") + private long triggerLastTime; + + @ApiModelProperty("下次调度时间") + private long triggerNextTime; + + @ApiModelProperty("datax运行json") + private String jobJson; + + @ApiModelProperty("脚本动态参数") + private String replaceParam; + + @ApiModelProperty("增量日期格式") + private String replaceParamType; + + @ApiModelProperty("jvm参数") + private String jvmParam; + + @ApiModelProperty("增量初始时间") + private Date incStartTime; + + @ApiModelProperty("分区信息") + private String partitionInfo; + + @ApiModelProperty("最近一次执行状态") + private int lastHandleCode; + + @ApiModelProperty("所属项目Id") + private int projectId; + + @ApiModelProperty("主键字段") + private String primaryKey; + + @ApiModelProperty("增量初始id") + private Long incStartId; + + @ApiModelProperty("增量方式") + private int incrementType; + + @ApiModelProperty("datax的读表") + private String readerTable; + + @ApiModelProperty("数据源id") + private int datasourceId; + + @TableField(exist=false) + private String projectName; + + @TableField(exist=false) + private String userName; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLog.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLog.java new file mode 100644 index 0000000..d7cef1d --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLog.java @@ -0,0 +1,66 @@ +package com.czsj.bigdata.entity; + +import com.fasterxml.jackson.annotation.JsonFormat; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * czsj-ground log, used to track trigger process + * + * @author jingwk 2019-11-17 22:08:11 + */ +@Data +public class JobLog { + + private long id; + + // job info + @ApiModelProperty("执行器主键ID") + private int jobGroup; + @ApiModelProperty("任务,主键ID") + private int jobId; + @ApiModelProperty("任务描述") + private String jobDesc; + + // execute info + @ApiModelProperty("执行器地址,本次执行的地址") + private String executorAddress; + @ApiModelProperty("执行器任务handler") + private String executorHandler; + @ApiModelProperty("执行器任务参数") + private String executorParam; + @ApiModelProperty("执行器任务分片参数,格式如 1/2") + private String executorShardingParam; + @ApiModelProperty("失败重试次数") + private int executorFailRetryCount; + + // trigger info + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @ApiModelProperty("调度-时间") + private Date triggerTime; + @ApiModelProperty("调度-结果") + private int triggerCode; + @ApiModelProperty("调度-日志") + private String triggerMsg; + + // handle info + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @ApiModelProperty("执行-时间") + private Date handleTime; + @ApiModelProperty("执行-状态") + private int handleCode; + @ApiModelProperty("执行-日志") + private String handleMsg; + + // alarm info + @ApiModelProperty("告警状态:0-默认、1-无需告警、2-告警成功、3-告警失败") + private int alarmStatus; + + @ApiModelProperty("DataX进程Id") + private String processId; + + @ApiModelProperty("增量最大id") + private Long maxId; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLogGlue.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLogGlue.java new file mode 100644 index 0000000..24f94b8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLogGlue.java @@ -0,0 +1,34 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * xxl-job log for glue, used to track job code process + * + * @author xuxueli 2016-5-19 17:57:46 + */ +@Data +public class JobLogGlue { + + private int id; + + @ApiModelProperty("任务主键ID") + private int jobId; + + @ApiModelProperty("GLUE类型\t#com.xxl.job.core.glue.GlueTypeEnum") + private String glueType; + + @ApiModelProperty("GLUE源代码") + private String glueSource; + + @ApiModelProperty("GLUE备注") + private String glueRemark; + + private Date addTime; + + private Date updateTime; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLogReport.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLogReport.java new file mode 100644 index 0000000..ca44167 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobLogReport.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.entity; + +import lombok.Data; + +import java.util.Date; + +@Data +public class JobLogReport { + + private int id; + + private Date triggerDay; + + private int runningCount; + private int sucCount; + private int failCount; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobPermission.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobPermission.java new file mode 100644 index 0000000..965af4b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobPermission.java @@ -0,0 +1,57 @@ +package com.czsj.bigdata.entity; + +public class JobPermission { + + private int id; + //权限名称 + private String name; + + //权限描述 + private String descritpion; + + //授权链接 + private String url; + + //父节点id + private int pid; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescritpion() { + return descritpion; + } + + public void setDescritpion(String descritpion) { + this.descritpion = descritpion; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public int getPid() { + return pid; + } + + public void setPid(int pid) { + this.pid = pid; + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobProject.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobProject.java new file mode 100644 index 0000000..88ab912 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobProject.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.entity; + +import com.baomidou.mybatisplus.annotation.TableField; +import com.fasterxml.jackson.annotation.JsonFormat; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * Created by jingwk on 2022/05/24 + */ +@Data +public class JobProject { + + @ApiModelProperty("项目Id") + private int id; + + @ApiModelProperty("项目名称") + private String name; + + @ApiModelProperty("项目描述") + private String description; + + @ApiModelProperty("用户Id") + private Long userId; + + @ApiModelProperty("标记") + private Boolean flag; + + @ApiModelProperty("创建时间") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + + @ApiModelProperty("更新时间") + private Date updateTime; + + @TableField(exist=false) + private String userName; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobRegistry.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobRegistry.java new file mode 100644 index 0000000..62f9c73 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobRegistry.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.entity; + +import com.fasterxml.jackson.annotation.JsonFormat; +import lombok.Data; + +import java.util.Date; + +/** + * Created by xuxueli on 16/9/30. + */ +@Data +public class JobRegistry { + + private int id; + private String registryGroup; + private String registryKey; + private String registryValue; + private double cpuUsage; + private double memoryUsage; + private double loadAverage; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobRole.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobRole.java new file mode 100644 index 0000000..cbde977 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobRole.java @@ -0,0 +1,30 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; + +/** + * @author xuxueli 2019-05-04 16:43:12 + */ +public class JobRole { + + private int id; + @ApiModelProperty("账号") + private String name; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobTemplate.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobTemplate.java new file mode 100644 index 0000000..eab5d25 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobTemplate.java @@ -0,0 +1,92 @@ +package com.czsj.bigdata.entity; + +import com.baomidou.mybatisplus.annotation.TableField; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.util.Date; + +/** + * xxl-job info + * + * @author jingwk 2019-11-17 14:25:49 + */ +@Data +public class JobTemplate { + + @ApiModelProperty("主键ID") + private int id; + + @ApiModelProperty("执行器主键ID") + private int jobGroup; + + @ApiModelProperty("任务执行CRON表达式") + private String jobCron; + + @ApiModelProperty("排序") + private String jobDesc; + + private Date addTime; + + private Date updateTime; + + @ApiModelProperty("修改用户") + private Long userId; + + @ApiModelProperty("报警邮件") + private String alarmEmail; + + @ApiModelProperty("执行器路由策略") + private String executorRouteStrategy; + + @ApiModelProperty("执行器,任务Handler名称") + private String executorHandler; + + @ApiModelProperty("执行器,任务参数") + private String executorParam; + + @ApiModelProperty("阻塞处理策略") + private String executorBlockStrategy; + + @ApiModelProperty("任务执行超时时间,单位秒") + private int executorTimeout; + + @ApiModelProperty("失败重试次数") + private int executorFailRetryCount; + + @ApiModelProperty("GLUE类型\t#com.wugui.datatx.core.glue.GlueTypeEnum") + private String glueType; + + @ApiModelProperty("GLUE源代码") + private String glueSource; + + @ApiModelProperty("GLUE备注") + private String glueRemark; + + @ApiModelProperty("GLUE更新时间") + private Date glueUpdatetime; + + @ApiModelProperty("子任务ID") + private String childJobId; + + @ApiModelProperty("上次调度时间") + private long triggerLastTime; + + @ApiModelProperty("下次调度时间") + private long triggerNextTime; + + @ApiModelProperty("datax运行json") + private String jobJson; + + @ApiModelProperty("jvm参数") + private String jvmParam; + + @ApiModelProperty("所属项目") + private int projectId; + + @TableField(exist=false) + private String projectName; + + @TableField(exist=false) + private String userName; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JobUser.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobUser.java new file mode 100644 index 0000000..4d25317 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JobUser.java @@ -0,0 +1,78 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import org.springframework.util.StringUtils; + +/** + * @author xuxueli 2019-05-04 16:43:12 + */ +public class JobUser { + + private int id; + @ApiModelProperty("账号") + private String username; + @ApiModelProperty("密码") + private String password; + @ApiModelProperty("角色:0-普通用户、1-管理员") + private String role; + @ApiModelProperty("权限:执行器ID列表,多个逗号分割") + private String permission; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public String getRole() { + return role; + } + + public void setRole(String role) { + this.role = role; + } + + public String getPermission() { + return permission; + } + + public void setPermission(String permission) { + this.permission = permission; + } + + // plugin + public boolean validPermission(int jobGroup){ + if ("1".equals(this.role)) { + return true; + } else { + if (StringUtils.hasText(this.permission)) { + for (String permissionItem : this.permission.split(",")) { + if (String.valueOf(jobGroup).equals(permissionItem)) { + return true; + } + } + } + return false; + } + + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/JwtUser.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/JwtUser.java new file mode 100644 index 0000000..0e8a6b3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/JwtUser.java @@ -0,0 +1,84 @@ +package com.czsj.bigdata.entity; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.core.userdetails.UserDetails; + +import java.util.Collection; +import java.util.Collections; + +/** + * Created by jingwk on 2019/11/17 + */ +public class JwtUser implements UserDetails { + + private Integer id; + private String username; + private String password; + private Collection authorities; + + public JwtUser() { + } + + // 写一个能直接使用user创建jwtUser的构造器 + public JwtUser(JobUser user) { + id = user.getId(); + username = user.getUsername(); + password = user.getPassword(); + authorities = Collections.singleton(new SimpleGrantedAuthority(user.getRole())); + } + + @Override + public Collection getAuthorities() { + return authorities; + } + + @Override + public String getPassword() { + return password; + } + + @Override + public String getUsername() { + return username; + } + + @Override + public boolean isAccountNonExpired() { + return true; + } + + @Override + public boolean isAccountNonLocked() { + return true; + } + + @Override + public boolean isCredentialsNonExpired() { + return true; + } + + @Override + public boolean isEnabled() { + return true; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + @Override + public String toString() { + return "JwtUser{" + + "id=" + id + + ", username='" + username + '\'' + + ", password='" + password + '\'' + + ", authorities=" + authorities + + '}'; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/LoginUser.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/LoginUser.java new file mode 100644 index 0000000..7f375ef --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/LoginUser.java @@ -0,0 +1,15 @@ +package com.czsj.bigdata.entity; + +import lombok.Data; + +/** + * Created by jingwk on 2019/11/17 + */ +@Data +public class LoginUser { + + private String username; + private String password; + private Integer rememberMe; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/OperLog.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/OperLog.java new file mode 100644 index 0000000..980e6bb --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/OperLog.java @@ -0,0 +1,18 @@ +package com.czsj.bigdata.entity; + +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +@Data +public class OperLog { + private long id; + + @ApiModelProperty("操作") + private String operate; + @ApiModelProperty("用户名") + private String user; + @ApiModelProperty("地址") + private String address; + @ApiModelProperty("创建时间") + private String createtime; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/ResponseData.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/ResponseData.java new file mode 100644 index 0000000..7435cab --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/ResponseData.java @@ -0,0 +1,73 @@ +package com.czsj.bigdata.entity; + +import com.alibaba.fastjson.annotation.JSONField; +import com.alibaba.fastjson.serializer.SerializerFeature; + +/** + * + * + * @Date: 2022/2/1 12:34 + * @Description: + **/ +public class ResponseData { + String msg; + boolean success; + + @JSONField(serialzeFeatures = {SerializerFeature.WriteMapNullValue}) + Object data; + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public Object getData() { + return data; + } + + public void setData(Object data) { + this.data = data; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public static ResponseData apiSuccess(Object data) { + ResponseData dto = new ResponseData(); + dto.setData(data); + dto.setSuccess(true); + // dto.setMsg("Api access succeeded"); + return dto; + + } + + public static ResponseData successWithMsg(String msg) { + ResponseData dto = new ResponseData(); + dto.setData(null); + dto.setSuccess(true); + dto.setMsg(msg); + return dto; + } + + public static ResponseData successWithData(Object data) { + ResponseData dto = new ResponseData(); + dto.setData(data); + dto.setSuccess(true); + return dto; + } + + public static ResponseData fail(String msg) { + ResponseData dto = new ResponseData(); + dto.setSuccess(false); + dto.setMsg(msg); + return dto; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/SysJob.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/SysJob.java new file mode 100644 index 0000000..c0b7444 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/SysJob.java @@ -0,0 +1,172 @@ +package com.czsj.bigdata.entity; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.czsj.bigdata.util.CronUtils; +import com.czsj.common.annotation.Excel; +import com.czsj.common.annotation.Excel.ColumnType; +import com.czsj.common.constant.ScheduleConstants; +import com.czsj.common.core.domain.BaseEntity; +import com.czsj.common.utils.StringUtils; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; + +import javax.validation.constraints.NotBlank; +import javax.validation.constraints.Size; +import java.util.Date; + +/** + * 定时任务调度表 sys_job + * + * @author czsj + */ +public class SysJob extends BaseEntity +{ + private static final long serialVersionUID = 1L; + + /** 任务ID */ + @Excel(name = "任务序号", cellType = ColumnType.NUMERIC) + private Long jobId; + + /** 任务名称 */ + @Excel(name = "任务名称") + private String jobName; + + /** 任务组名 */ + @Excel(name = "任务组名") + private String jobGroup; + + /** 调用目标字符串 */ + @Excel(name = "调用目标字符串") + private String invokeTarget; + + /** cron执行表达式 */ + @Excel(name = "执行表达式 ") + private String cronExpression; + + /** cron计划策略 */ + @Excel(name = "计划策略 ", readConverterExp = "0=默认,1=立即触发执行,2=触发一次执行,3=不触发立即执行") + private String misfirePolicy = ScheduleConstants.MISFIRE_DEFAULT; + + /** 是否并发执行(0允许 1禁止) */ + @Excel(name = "并发执行", readConverterExp = "0=允许,1=禁止") + private String concurrent; + + /** 任务状态(0正常 1暂停) */ + @Excel(name = "任务状态", readConverterExp = "0=正常,1=暂停") + private String status; + + public Long getJobId() + { + return jobId; + } + + public void setJobId(Long jobId) + { + this.jobId = jobId; + } + + @NotBlank(message = "任务名称不能为空") + @Size(min = 0, max = 64, message = "任务名称不能超过64个字符") + public String getJobName() + { + return jobName; + } + + public void setJobName(String jobName) + { + this.jobName = jobName; + } + + public String getJobGroup() + { + return jobGroup; + } + + public void setJobGroup(String jobGroup) + { + this.jobGroup = jobGroup; + } + + @NotBlank(message = "调用目标字符串不能为空") + @Size(min = 0, max = 500, message = "调用目标字符串长度不能超过500个字符") + public String getInvokeTarget() + { + return invokeTarget; + } + + public void setInvokeTarget(String invokeTarget) + { + this.invokeTarget = invokeTarget; + } + + @NotBlank(message = "Cron执行表达式不能为空") + @Size(min = 0, max = 255, message = "Cron执行表达式不能超过255个字符") + public String getCronExpression() + { + return cronExpression; + } + + public void setCronExpression(String cronExpression) + { + this.cronExpression = cronExpression; + } + + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss") + public Date getNextValidTime() + { + if (StringUtils.isNotEmpty(cronExpression)) + { + return CronUtils.getNextExecution(cronExpression); + } + return null; + } + + public String getMisfirePolicy() + { + return misfirePolicy; + } + + public void setMisfirePolicy(String misfirePolicy) + { + this.misfirePolicy = misfirePolicy; + } + + public String getConcurrent() + { + return concurrent; + } + + public void setConcurrent(String concurrent) + { + this.concurrent = concurrent; + } + + public String getStatus() + { + return status; + } + + public void setStatus(String status) + { + this.status = status; + } + + @Override + public String toString() { + return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE) + .append("jobId", getJobId()) + .append("jobName", getJobName()) + .append("jobGroup", getJobGroup()) + .append("cronExpression", getCronExpression()) + .append("nextValidTime", getNextValidTime()) + .append("misfirePolicy", getMisfirePolicy()) + .append("concurrent", getConcurrent()) + .append("status", getStatus()) + .append("createBy", getCreateBy()) + .append("createTime", getCreateTime()) + .append("updateBy", getUpdateBy()) + .append("updateTime", getUpdateTime()) + .append("remark", getRemark()) + .toString(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/SysJobLog.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/SysJobLog.java new file mode 100644 index 0000000..65ff280 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/SysJobLog.java @@ -0,0 +1,156 @@ +package com.czsj.bigdata.entity; + +import com.czsj.common.annotation.Excel; +import com.czsj.common.core.domain.BaseEntity; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; + +import java.util.Date; + +/** + * 定时任务调度日志表 sys_job_log + * + * @author czsj + */ +public class SysJobLog extends BaseEntity +{ + private static final long serialVersionUID = 1L; + + /** ID */ + @Excel(name = "日志序号") + private Long jobLogId; + + /** 任务名称 */ + @Excel(name = "任务名称") + private String jobName; + + /** 任务组名 */ + @Excel(name = "任务组名") + private String jobGroup; + + /** 调用目标字符串 */ + @Excel(name = "调用目标字符串") + private String invokeTarget; + + /** 日志信息 */ + @Excel(name = "日志信息") + private String jobMessage; + + /** 执行状态(0正常 1失败) */ + @Excel(name = "执行状态", readConverterExp = "0=正常,1=失败") + private String status; + + /** 异常信息 */ + @Excel(name = "异常信息") + private String exceptionInfo; + + /** 开始时间 */ + private Date startTime; + + /** 停止时间 */ + private Date stopTime; + + public Long getJobLogId() + { + return jobLogId; + } + + public void setJobLogId(Long jobLogId) + { + this.jobLogId = jobLogId; + } + + public String getJobName() + { + return jobName; + } + + public void setJobName(String jobName) + { + this.jobName = jobName; + } + + public String getJobGroup() + { + return jobGroup; + } + + public void setJobGroup(String jobGroup) + { + this.jobGroup = jobGroup; + } + + public String getInvokeTarget() + { + return invokeTarget; + } + + public void setInvokeTarget(String invokeTarget) + { + this.invokeTarget = invokeTarget; + } + + public String getJobMessage() + { + return jobMessage; + } + + public void setJobMessage(String jobMessage) + { + this.jobMessage = jobMessage; + } + + public String getStatus() + { + return status; + } + + public void setStatus(String status) + { + this.status = status; + } + + public String getExceptionInfo() + { + return exceptionInfo; + } + + public void setExceptionInfo(String exceptionInfo) + { + this.exceptionInfo = exceptionInfo; + } + + public Date getStartTime() + { + return startTime; + } + + public void setStartTime(Date startTime) + { + this.startTime = startTime; + } + + public Date getStopTime() + { + return stopTime; + } + + public void setStopTime(Date stopTime) + { + this.stopTime = stopTime; + } + + @Override + public String toString() { + return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE) + .append("jobLogId", getJobLogId()) + .append("jobName", getJobName()) + .append("jobGroup", getJobGroup()) + .append("jobMessage", getJobMessage()) + .append("status", getStatus()) + .append("exceptionInfo", getExceptionInfo()) + .append("startTime", getStartTime()) + .append("stopTime", getStopTime()) + .toString(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/SysServers.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/SysServers.java new file mode 100644 index 0000000..f8faa49 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/SysServers.java @@ -0,0 +1,264 @@ +package com.czsj.bigdata.entity; + +import java.math.BigDecimal; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; +import com.czsj.common.annotation.Excel; +import com.czsj.common.core.domain.BaseEntity; + +/** + * 执行器管理对象 sys_servers + * + * @author czsj + * @date 2022-04-28 + */ +public class SysServers extends BaseEntity +{ + private static final long serialVersionUID = 1L; + + /** $column.columnComment */ + private Long id; + + /** 分组名 */ + @Excel(name = "分组名") + private String groupname; + + /** 分组编码 */ + @Excel(name = "分组编码") + private String groupcode; + + /** 服务器地址 */ + @Excel(name = "服务器地址") + private String serveraddress; + + /** 系统名 */ + @Excel(name = "系统名") + private String osname; + + /** 程序启动时间 */ + @Excel(name = "程序启动时间") + private String starttime; + + /** pid */ + @Excel(name = "pid") + private String pid; + + /** cpu核心数 */ + @Excel(name = "cpu核心数") + private Long cpucores; + + /** cpu使用率 */ + @Excel(name = "cpu使用率") + private BigDecimal cpuutilization; + + /** cpu空闲率 */ + @Excel(name = "cpu空闲率") + private BigDecimal cpurate; + + /** JVM初始内存 */ + @Excel(name = "JVM初始内存") + private BigDecimal jvminitialmemory; + + /** JVM最大内存 */ + @Excel(name = "JVM最大内存") + private BigDecimal jvmmaxmemory; + + /** JVM已用内存 */ + @Excel(name = "JVM已用内存") + private BigDecimal jvmusedmemory; + + /** 总物理内存 */ + @Excel(name = "总物理内存") + private BigDecimal physicalmemory; + + /** 剩余物理内存 */ + @Excel(name = "剩余物理内存") + private BigDecimal surplusmemory; + + /** 已用物理内存 */ + @Excel(name = "已用物理内存") + private BigDecimal usedmemory; + + /** 磁盘状态 */ + @Excel(name = "磁盘状态") + private String diskstatus; + + public void setId(Long id) + { + this.id = id; + } + + public Long getId() + { + return id; + } + public void setGroupname(String groupname) + { + this.groupname = groupname; + } + + public String getGroupname() + { + return groupname; + } + public void setGroupcode(String groupcode) + { + this.groupcode = groupcode; + } + + public String getGroupcode() + { + return groupcode; + } + public void setServeraddress(String serveraddress) + { + this.serveraddress = serveraddress; + } + + public String getServeraddress() + { + return serveraddress; + } + public void setOsname(String osname) + { + this.osname = osname; + } + + public String getOsname() + { + return osname; + } + public void setStarttime(String starttime) + { + this.starttime = starttime; + } + + public String getStarttime() + { + return starttime; + } + public void setPid(String pid) + { + this.pid = pid; + } + + public String getPid() + { + return pid; + } + public void setCpucores(Long cpucores) + { + this.cpucores = cpucores; + } + + public Long getCpucores() + { + return cpucores; + } + public void setCpuutilization(BigDecimal cpuutilization) + { + this.cpuutilization = cpuutilization; + } + + public BigDecimal getCpuutilization() + { + return cpuutilization; + } + public void setCpurate(BigDecimal cpurate) + { + this.cpurate = cpurate; + } + + public BigDecimal getCpurate() + { + return cpurate; + } + public void setJvminitialmemory(BigDecimal jvminitialmemory) + { + this.jvminitialmemory = jvminitialmemory; + } + + public BigDecimal getJvminitialmemory() + { + return jvminitialmemory; + } + public void setJvmmaxmemory(BigDecimal jvmmaxmemory) + { + this.jvmmaxmemory = jvmmaxmemory; + } + + public BigDecimal getJvmmaxmemory() + { + return jvmmaxmemory; + } + public void setJvmusedmemory(BigDecimal jvmusedmemory) + { + this.jvmusedmemory = jvmusedmemory; + } + + public BigDecimal getJvmusedmemory() + { + return jvmusedmemory; + } + public void setPhysicalmemory(BigDecimal physicalmemory) + { + this.physicalmemory = physicalmemory; + } + + public BigDecimal getPhysicalmemory() + { + return physicalmemory; + } + public void setSurplusmemory(BigDecimal surplusmemory) + { + this.surplusmemory = surplusmemory; + } + + public BigDecimal getSurplusmemory() + { + return surplusmemory; + } + public void setUsedmemory(BigDecimal usedmemory) + { + this.usedmemory = usedmemory; + } + + public BigDecimal getUsedmemory() + { + return usedmemory; + } + public void setDiskstatus(String diskstatus) + { + this.diskstatus = diskstatus; + } + + public String getDiskstatus() + { + return diskstatus; + } + + @Override + public String toString() { + return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE) + .append("id", getId()) + .append("groupname", getGroupname()) + .append("groupcode", getGroupcode()) + .append("serveraddress", getServeraddress()) + .append("osname", getOsname()) + .append("starttime", getStarttime()) + .append("pid", getPid()) + .append("cpucores", getCpucores()) + .append("cpuutilization", getCpuutilization()) + .append("cpurate", getCpurate()) + .append("jvminitialmemory", getJvminitialmemory()) + .append("jvmmaxmemory", getJvmmaxmemory()) + .append("jvmusedmemory", getJvmusedmemory()) + .append("physicalmemory", getPhysicalmemory()) + .append("surplusmemory", getSurplusmemory()) + .append("usedmemory", getUsedmemory()) + .append("diskstatus", getDiskstatus()) + .append("createTime", getCreateTime()) + .append("createBy", getCreateBy()) + .toString(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/entity/SystemMonitor.java b/czsj-system/src/main/java/com/czsj/bigdata/entity/SystemMonitor.java new file mode 100644 index 0000000..4cf7457 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/entity/SystemMonitor.java @@ -0,0 +1,47 @@ +package com.czsj.bigdata.entity; + +import lombok.Data; + +@Data +public class SystemMonitor { + + //系统名称 + private String osName; + + //程序启动时间 + private String startTime; + + //pid + private String pid; + + //cpu核数 + private Integer cpuCores; + + //cpu使用率 + private Double cpuUtilization; + + //cpu空闲率 + private Double cpuRate; + + //JVM初始内存 + private Double jvmInitialMemory; + + //JVM最大内存 + private Double jvmMaxMemory; + + //JVM已用内存 + private Double jvmUsedMemory; + + //总物理内存 + private Double physicalMemory; + + //剩余物理内存 + private Double surplusMemory; + + //以用物理内存 + private Double usedMemory; + + //磁盘状态 + private String diskStatus; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/exception/TokenIsExpiredException.java b/czsj-system/src/main/java/com/czsj/bigdata/exception/TokenIsExpiredException.java new file mode 100644 index 0000000..3dd1120 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/exception/TokenIsExpiredException.java @@ -0,0 +1,27 @@ +package com.czsj.bigdata.exception; + +/** + * @description: 自定义异常 + * @author: jingwk + * @date: 2019/11/17 17:21 + */ +public class TokenIsExpiredException extends Exception{ + public TokenIsExpiredException() { + } + + public TokenIsExpiredException(String message) { + super(message); + } + + public TokenIsExpiredException(String message, Throwable cause) { + super(message, cause); + } + + public TokenIsExpiredException(Throwable cause) { + super(cause); + } + + public TokenIsExpiredException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/filter/JWTAuthenticationFilter.java b/czsj-system/src/main/java/com/czsj/bigdata/filter/JWTAuthenticationFilter.java new file mode 100644 index 0000000..a023a3c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/filter/JWTAuthenticationFilter.java @@ -0,0 +1,92 @@ +package com.czsj.bigdata.filter; + +import com.alibaba.fastjson.JSON; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.bigdata.entity.JwtUser; +import com.czsj.bigdata.entity.LoginUser; +import com.czsj.bigdata.util.JwtTokenUtils; +import com.czsj.core.biz.model.ReturnT; +import lombok.extern.slf4j.Slf4j; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.AuthenticationException; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; + +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import static com.czsj.core.util.Constants.SPLIT_COMMA; + +/** + * Created by jingwk on 2019/11/17 + */ +@Slf4j +public class JWTAuthenticationFilter extends UsernamePasswordAuthenticationFilter { + + private ThreadLocal rememberMe = new ThreadLocal<>(); + private AuthenticationManager authenticationManager; + + public JWTAuthenticationFilter(AuthenticationManager authenticationManager) { + this.authenticationManager = authenticationManager; + super.setFilterProcessesUrl("/api/auth/login"); + } + + @Override + public Authentication attemptAuthentication(HttpServletRequest request, + HttpServletResponse response) throws AuthenticationException { + + // 从输入流中获取到登录的信息 + try { + LoginUser loginUser = new ObjectMapper().readValue(request.getInputStream(), LoginUser.class); + rememberMe.set(loginUser.getRememberMe()); + return authenticationManager.authenticate( + new UsernamePasswordAuthenticationToken(loginUser.getUsername(), loginUser.getPassword(), new ArrayList<>()) + ); + } catch (IOException e) { + logger.error("attemptAuthentication error :{}",e); + return null; + } + } + + // 成功验证后调用的方法 + // 如果验证成功,就生成token并返回 + @Override + protected void successfulAuthentication(HttpServletRequest request, + HttpServletResponse response, + FilterChain chain, + Authentication authResult) throws IOException { + + JwtUser jwtUser = (JwtUser) authResult.getPrincipal(); + boolean isRemember = rememberMe.get() == 1; + + String role = ""; + Collection authorities = jwtUser.getAuthorities(); + for (GrantedAuthority authority : authorities){ + role = authority.getAuthority(); + } + + String token = JwtTokenUtils.createToken(jwtUser.getId(),jwtUser.getUsername(), role, isRemember); + response.setHeader("token", JwtTokenUtils.TOKEN_PREFIX + token); + response.setCharacterEncoding("UTF-8"); + Map maps = new HashMap<>(); + maps.put("data", JwtTokenUtils.TOKEN_PREFIX + token); + maps.put("roles", role.split(SPLIT_COMMA)); + response.getWriter().write(JSON.toJSONString(new ReturnT<>(maps))); + } + + @Override + protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, AuthenticationException failed) throws IOException, ServletException { + response.setCharacterEncoding("UTF-8"); + response.getWriter().write(JSON.toJSON(new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("login_param_invalid"))).toString()); + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/filter/JWTAuthorizationFilter.java b/czsj-system/src/main/java/com/czsj/bigdata/filter/JWTAuthorizationFilter.java new file mode 100644 index 0000000..d89a896 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/filter/JWTAuthorizationFilter.java @@ -0,0 +1,73 @@ +package com.czsj.bigdata.filter; + +import com.alibaba.fastjson.JSON; +import com.baomidou.mybatisplus.extension.api.R; +import com.czsj.bigdata.exception.TokenIsExpiredException; +import com.czsj.bigdata.util.JwtTokenUtils; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.web.authentication.www.BasicAuthenticationFilter; + +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.Collections; + +/** + * Created by jingwk on 2019/11/17 + */ +public class JWTAuthorizationFilter extends BasicAuthenticationFilter { + + public JWTAuthorizationFilter(AuthenticationManager authenticationManager) { + super(authenticationManager); + } + + @Override + protected void doFilterInternal(HttpServletRequest request, + HttpServletResponse response, + FilterChain chain) throws IOException, ServletException { + + String tokenHeader = request.getHeader(JwtTokenUtils.TOKEN_HEADER); + // 如果请求头中没有Authorization信息则直接放行 + if (tokenHeader == null || !tokenHeader.startsWith(JwtTokenUtils.TOKEN_PREFIX)) { + chain.doFilter(request, response); + return; + } + // 如果请求头中有token,则进行解析,并且设置认证信息 + try { + SecurityContextHolder.getContext().setAuthentication(getAuthentication(tokenHeader)); + } catch (TokenIsExpiredException e) { + //返回json形式的错误信息 + response.setCharacterEncoding("UTF-8"); + response.setContentType("application/json; charset=utf-8"); + response.getWriter().write(JSON.toJSONString(R.failed(e.getMessage()))); + response.getWriter().flush(); + return; + } + super.doFilterInternal(request, response, chain); + } + + // 这里从token中获取用户信息并新建一个token + private UsernamePasswordAuthenticationToken getAuthentication(String tokenHeader) throws TokenIsExpiredException { + String token = tokenHeader.replace(JwtTokenUtils.TOKEN_PREFIX, ""); + boolean expiration = JwtTokenUtils.isExpiration(token); + if (expiration) { + throw new TokenIsExpiredException("登录时间过长,请退出重新登录"); + } + else { + String username = JwtTokenUtils.getUsername(token); + String role = JwtTokenUtils.getUserRole(token); + if (username != null) { + return new UsernamePasswordAuthenticationToken(username, null, + Collections.singleton(new SimpleGrantedAuthority(role)) + ); + } + } + return null; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/APIAuthMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/APIAuthMapper.java new file mode 100644 index 0000000..6e6c184 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/APIAuthMapper.java @@ -0,0 +1,24 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.czsj.bigdata.entity.APIAuth; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +@Mapper +public interface APIAuthMapper extends BaseMapper{ + + int delete(@Param("id") int id); + + List findAll(); + + int save(APIAuth apiAuth); + + int update(APIAuth entity); + + APIAuth getById(int id); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/BaseResourceMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/BaseResourceMapper.java new file mode 100644 index 0000000..817af5b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/BaseResourceMapper.java @@ -0,0 +1,30 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.czsj.bigdata.entity.BaseResource; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +@Mapper +public interface BaseResourceMapper extends BaseMapper{ + + int delete(@Param("id") int id); + + List findList(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("name") String name); + + int save(BaseResource apiAuth); + + int update(BaseResource entity); + + BaseResource getById(int id); + + List getResource(); + + + List getFileResource(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevEnvSettingMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevEnvSettingMapper.java new file mode 100644 index 0000000..9697974 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevEnvSettingMapper.java @@ -0,0 +1,13 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.czsj.bigdata.entity.DevEnvSetting; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; +@Mapper +public interface DevEnvSettingMapper extends BaseMapper{ + IPage getDevEnvSettingListPaging(IPage page, + @Param("searchName") String searchName); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevJarMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevJarMapper.java new file mode 100644 index 0000000..d36b904 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevJarMapper.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.czsj.bigdata.entity.DevTask; +import org.apache.ibatis.annotations.Mapper; + +@Mapper +public interface DevJarMapper extends BaseMapper{ + + + int save(DevTask devJar); + + int update(DevTask entity); + + DevTask getById(int id); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevSQLMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevSQLMapper.java new file mode 100644 index 0000000..8c268b1 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevSQLMapper.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.czsj.bigdata.entity.DevTask; +import org.apache.ibatis.annotations.Mapper; + +@Mapper +public interface DevSQLMapper extends BaseMapper{ + + + int save(DevTask devJar); + + int update(DevTask entity); + + DevTask getById(int id); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevTaskMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevTaskMapper.java new file mode 100644 index 0000000..11ab3f1 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/DevTaskMapper.java @@ -0,0 +1,28 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.czsj.bigdata.entity.DevEnvSetting; +import com.czsj.bigdata.entity.DevTask; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +@Mapper +public interface DevTaskMapper extends BaseMapper{ + + int delete(@Param("id") int id); + + int save(DevTask devJar); + + int update(DevTask entity); + + DevTask getById(int id); + + List findList(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("type") String type); + + String findPath(@Param("tasktype") String tasktype); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobDatasourceMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobDatasourceMapper.java new file mode 100644 index 0000000..8ca0460 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobDatasourceMapper.java @@ -0,0 +1,25 @@ +package com.czsj.bigdata.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.czsj.bigdata.entity.JobDatasource; +import org.apache.ibatis.annotations.Mapper; + +import java.util.List; + +/** + * jdbc数据源配置表数据库访问层 + * + * @author zhouhongfa@gz-yibo.com + * @version v1.0 + * @since 2019-07-30 + */ +@Mapper +public interface JobDatasourceMapper extends BaseMapper { + int update(JobDatasource datasource); + + JobDatasource getDataSourceById(Long id); + + List findDataSourceName(); + + List getdataSourceAll(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobGroupMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobGroupMapper.java new file mode 100644 index 0000000..2952f7b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobGroupMapper.java @@ -0,0 +1,29 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobGroup; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * Created by xuxueli on 16/9/30. + */ +@Mapper +public interface JobGroupMapper { + + List findAll(); + + List find(@Param("appName") String appName, + @Param("title") String title, + @Param("addressList") String addressList); + + int save(JobGroup jobGroup); + List findByAddressType(@Param("addressType") int addressType); + + int update(JobGroup jobGroup); + + int remove(@Param("id") int id); + + JobGroup load(@Param("id") int id); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobInfoMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobInfoMapper.java new file mode 100644 index 0000000..a9d808c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobInfoMapper.java @@ -0,0 +1,60 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobInfo; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + + +/** + * job info + * + * @author xuxueli 2016-1-12 18:03:45 + */ +@Mapper +public interface JobInfoMapper { + + List pageList(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("jobGroup") int jobGroup, + @Param("triggerStatus") int triggerStatus, + @Param("jobDesc") String jobDesc, + @Param("glueType") String glueType, + @Param("userId") int userId, + @Param("projectIds") Integer[] projectIds); + + int pageListCount(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("jobGroup") int jobGroup, + @Param("triggerStatus") int triggerStatus, + @Param("jobDesc") String jobDesc, + @Param("glueType") String glueType, + @Param("userId") int userId, + @Param("projectIds") Integer[] projectIds); + + List findAll(); + + int save(JobInfo info); + + JobInfo loadById(@Param("id") int id); + + int update(JobInfo jobInfo); + + int delete(@Param("id") long id); + + List getJobsByGroup(@Param("jobGroup") int jobGroup); + + int findAllCount(); + + List scheduleJobQuery(@Param("maxNextTime") long maxNextTime, @Param("pagesize") int pagesize); + + int scheduleUpdate(JobInfo xxlJobInfo); + + int incrementTimeUpdate(@Param("id") int id, @Param("incStartTime") Date incStartTime); + + public int updateLastHandleCode(@Param("id") int id,@Param("lastHandleCode")int lastHandleCode); + + void incrementIdUpdate(@Param("id") int id, @Param("incStartId")Long incStartId); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogGlueMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogGlueMapper.java new file mode 100644 index 0000000..36f78b2 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogGlueMapper.java @@ -0,0 +1,25 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobLogGlue; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * job log for glue + * + * @author xuxueli 2016-5-19 18:04:56 + */ +@Mapper +public interface JobLogGlueMapper { + + int save(JobLogGlue jobLogGlue); + + List findByJobId(@Param("jobId") int jobId); + + int removeOld(@Param("jobId") int jobId, @Param("limit") int limit); + + int deleteByJobId(@Param("jobId") int jobId); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogMapper.java new file mode 100644 index 0000000..6994d8a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogMapper.java @@ -0,0 +1,67 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobLog; +import com.czsj.bigdata.entity.OperLog; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; +import java.util.Map; + +/** + * job log + * + * @author xuxueli 2016-1-12 18:03:06 + */ +@Mapper +public interface JobLogMapper { + + // exist jobId not use jobGroup, not exist use jobGroup + List pageList(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("jobGroup") int jobGroup, + @Param("jobId") int jobId, + @Param("triggerTimeStart") Date triggerTimeStart, + @Param("triggerTimeEnd") Date triggerTimeEnd, + @Param("logStatus") int logStatus); + + int pageListCount(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("jobGroup") int jobGroup, + @Param("jobId") int jobId, + @Param("triggerTimeStart") Date triggerTimeStart, + @Param("triggerTimeEnd") Date triggerTimeEnd, + @Param("logStatus") int logStatus); + + JobLog load(@Param("id") long id); + + long save(JobLog jobLog); + + int updateTriggerInfo(JobLog jobLog); + + int updateHandleInfo(JobLog jobLog); + + int updateProcessId(@Param("id") long id, + @Param("processId") String processId); + + int delete(@Param("jobId") int jobId); + + Map findLogReport(@Param("from") Date from, + @Param("to") Date to); + + List findClearLogIds(@Param("jobGroup") int jobGroup, + @Param("jobId") int jobId, + @Param("clearBeforeTime") Date clearBeforeTime, + @Param("clearBeforeNum") int clearBeforeNum, + @Param("pagesize") int pagesize); + + int clearLog(@Param("logIds") List logIds); + + List findFailJobLogIds(@Param("pagesize") int pagesize); + + int updateAlarmStatus(@Param("logId") long logId, + @Param("oldAlarmStatus") int oldAlarmStatus, + @Param("newAlarmStatus") int newAlarmStatus); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogReportMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogReportMapper.java new file mode 100644 index 0000000..7cc0afd --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogReportMapper.java @@ -0,0 +1,30 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.InfoReport; +import com.czsj.bigdata.entity.JobLogReport; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +/** + * job log + * + * @author xuxueli 2019-11-22 + */ +@Mapper +public interface JobLogReportMapper { + + int save(JobLogReport xxlJobLogReport); + + int update(JobLogReport xxlJobLogReport); + + List queryLogReport(@Param("triggerDayFrom") Date triggerDayFrom, + @Param("triggerDayTo") Date triggerDayTo); + + + List getInfoReportCount(); + + JobLogReport queryLogReportTotal(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogSysMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogSysMapper.java new file mode 100644 index 0000000..bce5511 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobLogSysMapper.java @@ -0,0 +1,66 @@ +package com.czsj.bigdata.mapper; + + +import com.czsj.bigdata.entity.SysJobLog; + +import java.util.List; + +/** + * 调度任务日志信息 数据层 + * + * @author czsj + */ +public interface JobLogSysMapper +{ + /** + * 获取quartz调度器日志的计划任务 + * + * @param jobLog 调度日志信息 + * @return 调度任务日志集合 + */ + public List selectJobLogList(SysJobLog jobLog); + + /** + * 查询所有调度任务日志 + * + * @return 调度任务日志列表 + */ + public List selectJobLogAll(); + + /** + * 通过调度任务日志ID查询调度信息 + * + * @param jobLogId 调度任务日志ID + * @return 调度任务日志对象信息 + */ + public SysJobLog selectJobLogById(Long jobLogId); + + /** + * 新增任务日志 + * + * @param jobLog 调度日志信息 + * @return 结果 + */ + public int insertJobLog(SysJobLog jobLog); + + /** + * 批量删除调度日志信息 + * + * @param logIds 需要删除的数据ID + * @return 结果 + */ + public int deleteJobLogByIds(Long[] logIds); + + /** + * 删除任务日志 + * + * @param jobId 调度日志ID + * @return 结果 + */ + public int deleteJobLogById(Long jobId); + + /** + * 清空任务日志 + */ + public void cleanJobLog(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobProjectMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobProjectMapper.java new file mode 100644 index 0000000..dece20e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobProjectMapper.java @@ -0,0 +1,26 @@ +package com.czsj.bigdata.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.czsj.bigdata.entity.JobProject; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +/** + * Project + * + * @author jingwk + * @version v2.1.12 + * @since 2022-05-24 + */ +@Mapper +public interface JobProjectMapper extends BaseMapper { + /** + * project page + * @param page + * @param searchName + * @return + */ + IPage getProjectListPaging(IPage page, + @Param("searchName") String searchName); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobRegistryMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobRegistryMapper.java new file mode 100644 index 0000000..e06d534 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobRegistryMapper.java @@ -0,0 +1,50 @@ +package com.czsj.bigdata.mapper; + + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.czsj.bigdata.entity.JobProject; +import com.czsj.bigdata.entity.JobRegistry; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +/** + * Created by jingwk on 2019/11/17 + */ +@Mapper +public interface JobRegistryMapper extends BaseMapper { + + public List findDead(@Param("timeout") int timeout, + @Param("nowTime") Date nowTime); + + public int removeDead(@Param("ids") List ids); + + public List findAll(@Param("timeout") int timeout, + @Param("nowTime") Date nowTime); + + public int registryUpdate(@Param("registryGroup") String registryGroup, + @Param("registryKey") String registryKey, + @Param("registryValue") String registryValue, + @Param("cpuUsage") double cpuUsage, + @Param("memoryUsage") double memoryUsage, + @Param("loadAverage") double loadAverage, + @Param("updateTime") Date updateTime); + + public int registrySave(@Param("registryGroup") String registryGroup, + @Param("registryKey") String registryKey, + @Param("registryValue") String registryValue, + @Param("cpuUsage") double cpuUsage, + @Param("memoryUsage") double memoryUsage, + @Param("loadAverage") double loadAverage, + @Param("updateTime") Date updateTime); + + public int registryDelete(@Param("registryGroup") String registryGroup, + @Param("registryKey") String registryKey, + @Param("registryValue") String registryValue); + + IPage selectAll(Page page); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobSysMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobSysMapper.java new file mode 100644 index 0000000..d64d997 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobSysMapper.java @@ -0,0 +1,77 @@ +package com.czsj.bigdata.mapper; + + +import com.czsj.bigdata.entity.SysJob; + +import java.util.List; + +/** + * 调度任务信息 数据层 + * + * @author czsj + */ +public interface JobSysMapper +{ + /** + * 查询调度任务日志集合 + * + * @param job 调度信息 + * @return 操作日志集合 + */ + public List selectJobList(SysJob job); + + /** + * 查询所有调度任务 + * + * @return 调度任务列表 + */ + public List selectJobAll(); + + /** + * 通过调度ID查询调度任务信息 + * + * @param jobId 调度ID + * @return 角色对象信息 + */ + public SysJob selectJobById(Long jobId); + + /** + * 通过调度ID删除调度任务信息 + * + * @param jobId 调度ID + * @return 结果 + */ + public int deleteJobById(Long jobId); + + /** + * 批量删除调度任务信息 + * + * @param ids 需要删除的数据ID + * @return 结果 + */ + public int deleteJobByIds(Long[] ids); + + /** + * 修改调度任务信息 + * + * @param job 调度任务信息 + * @return 结果 + */ + public int updateJob(SysJob job); + + /** + * 新增调度任务信息 + * + * @param job 调度任务信息 + * @return 结果 + */ + public int insertJob(SysJob job); + + /** + * 通过任务字符串查jobid + * + * @param target 调度任务信息 + * @return 结果 + */ + public Integer selectJobBytarget(String target); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobTemplateMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobTemplateMapper.java new file mode 100644 index 0000000..2793720 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobTemplateMapper.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobTemplate; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + + +/** + * job info + * @author xuxueli 2016-1-12 18:03:45 + */ +@Mapper +public interface JobTemplateMapper { + + public List pageList(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("jobGroup") int jobGroup, + @Param("jobDesc") String jobDesc, + @Param("executorHandler") String executorHandler, + @Param("userId") int userId, + @Param("projectIds") Integer[] projectIds); + + public int pageListCount(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("jobGroup") int jobGroup, + @Param("jobDesc") String jobDesc, + @Param("executorHandler") String executorHandler, + @Param("userId") int userId, + @Param("projectIds") Integer[] projectIds); + + public int save(JobTemplate info); + + public JobTemplate loadById(@Param("id") int id); + + public int update(JobTemplate jobTemplate); + + public int delete(@Param("id") long id); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobUserMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobUserMapper.java new file mode 100644 index 0000000..402aa7e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/JobUserMapper.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobUser; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; +import org.springframework.stereotype.Repository; + +import java.util.List; + +/** + * @author xuxueli 2019-05-04 16:44:59 + */ +@Mapper +@Repository +public interface JobUserMapper { + + List pageList(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("username") String username); + + List findAll(@Param("username") String username); + + int pageListCount(@Param("offset") int offset, + @Param("pagesize") int pagesize, + @Param("username") String username); + + JobUser loadByUserName(@Param("username") String username); + + JobUser getUserById(@Param("id") int id); + + List getUsersByIds(@Param("ids") String[] ids); + + int save(JobUser jobUser); + + int update(JobUser jobUser); + + int delete(@Param("id") int id); + + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/PermissionMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/PermissionMapper.java new file mode 100644 index 0000000..38585b8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/PermissionMapper.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.JobPermission; +import org.apache.ibatis.annotations.Mapper; +import org.springframework.stereotype.Repository; + +import java.util.List; + +@Mapper +@Repository +public interface PermissionMapper { + + List findAll(); + + List findByAdminUserId(int userId); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/mapper/SysServersMapper.java b/czsj-system/src/main/java/com/czsj/bigdata/mapper/SysServersMapper.java new file mode 100644 index 0000000..76aaa99 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/mapper/SysServersMapper.java @@ -0,0 +1,65 @@ +package com.czsj.bigdata.mapper; + +import com.czsj.bigdata.entity.SysServers; +import org.apache.ibatis.annotations.Mapper; + +import java.util.List; + + +/** + * 执行器管理Mapper接口 + * + * @author czsj + * @date 2022-04-28 + */ +@Mapper +public interface SysServersMapper +{ + /** + * 查询执行器管理 + * + * @param id 执行器管理主键 + * @return 执行器管理 + */ + public SysServers selectSysServersById(Long id); + + /** + * 查询执行器管理列表 + * + * @param sysServers 执行器管理 + * @return 执行器管理集合 + */ + public List selectSysServersList(SysServers sysServers); + + /** + * 新增执行器管理 + * + * @param sysServers 执行器管理 + * @return 结果 + */ + public int insertSysServers(SysServers sysServers); + + /** + * 修改执行器管理 + * + * @param sysServers 执行器管理 + * @return 结果 + */ + public int updateSysServers(SysServers sysServers); + + /** + * 删除执行器管理 + * + * @param id 执行器管理主键 + * @return 结果 + */ + public int deleteSysServersById(Long id); + + /** + * 批量删除执行器管理 + * + * @param ids 需要删除的数据主键集合 + * @return 结果 + */ + public int deleteSysServersByIds(Long[] ids); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/APIAuthService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/APIAuthService.java new file mode 100644 index 0000000..3108926 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/APIAuthService.java @@ -0,0 +1,7 @@ +package com.czsj.bigdata.service; +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.APIAuth; +import com.czsj.bigdata.entity.APIConfig; + +public interface APIAuthService extends IService{ +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/APIConfigService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/APIConfigService.java new file mode 100644 index 0000000..f0eb312 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/APIConfigService.java @@ -0,0 +1,7 @@ +package com.czsj.bigdata.service; +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.APIConfig; +import com.czsj.bigdata.entity.APIConfig; + +public interface APIConfigService extends IService{ +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/DatasourceQueryService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/DatasourceQueryService.java new file mode 100644 index 0000000..0bbed24 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/DatasourceQueryService.java @@ -0,0 +1,62 @@ +package com.czsj.bigdata.service; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; + +/** + * 数据库查询服务 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName JdbcDatasourceQueryService + * @Version 1.0 + * @since 2019/7/31 20:50 + */ +public interface DatasourceQueryService { + + /** + * 获取db列表 + * @param id + * @return + */ + List getDBs(Long id) throws IOException; + + /** + * 根据数据源表id查询出可用的表 + * + * @param id + * @return + */ + List getTables(Long id,String tableSchema) throws IOException; + + /** + * 获取CollectionNames + * @param dbName + * @return + */ + List getCollectionNames(long id,String dbName) throws IOException; + + /** + * 根据数据源id,表名查询出该表所有字段 + * + * @param id + * @return + */ + List getColumns(Long id, String tableName) throws IOException; + + /** + * 根据 sql 语句获取字段 + * + * @param datasourceId + * @param querySql + * @return + */ + List getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException; + + /** + * 获取PG table schema + * @param id + * @return + */ + List getTableSchema(Long id); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/DataxJsonService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/DataxJsonService.java new file mode 100644 index 0000000..c94520d --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/DataxJsonService.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.service; + +import com.czsj.bigdata.dto.DataXJsonBuildDto; + +/** + * com.wugui.datax json构建服务层接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/1 + */ +public interface DataxJsonService { + + /** + * build datax json + * + * @param dto + * @return + */ + String buildJobJson(DataXJsonBuildDto dto); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/DevEnvSettingService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/DevEnvSettingService.java new file mode 100644 index 0000000..218d9bb --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/DevEnvSettingService.java @@ -0,0 +1,8 @@ +package com.czsj.bigdata.service; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.DevEnvSetting; +public interface DevEnvSettingService extends IService{ + IPage getDevEnvSettingListPaging(Integer pageSize, Integer pageNo, String searchName); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/DevJarService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/DevJarService.java new file mode 100644 index 0000000..6fcf272 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/DevJarService.java @@ -0,0 +1,6 @@ +package com.czsj.bigdata.service; +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.DevTask; + +public interface DevJarService extends IService{ +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/ISysServersService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/ISysServersService.java new file mode 100644 index 0000000..9242457 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/ISysServersService.java @@ -0,0 +1,62 @@ +package com.czsj.bigdata.service; + +import java.util.List; + +import com.czsj.bigdata.entity.SysServers; + +/** + * 执行器管理Service接口 + * + * @author czsj + * @date 2022-04-28 + */ +public interface ISysServersService +{ + /** + * 查询执行器管理 + * + * @param id 执行器管理主键 + * @return 执行器管理 + */ + public SysServers selectSysServersById(Long id); + + /** + * 查询执行器管理列表 + * + * @param sysServers 执行器管理 + * @return 执行器管理集合 + */ + public List selectSysServersList(SysServers sysServers); + + /** + * 新增执行器管理 + * + * @param sysServers 执行器管理 + * @return 结果 + */ + public int insertSysServers(SysServers sysServers); + + /** + * 修改执行器管理 + * + * @param sysServers 执行器管理 + * @return 结果 + */ + public int updateSysServers(SysServers sysServers); + + /** + * 批量删除执行器管理 + * + * @param ids 需要删除的执行器管理主键集合 + * @return 结果 + */ + public int deleteSysServersByIds(Long[] ids); + + /** + * 删除执行器管理信息 + * + * @param id 执行器管理主键 + * @return 结果 + */ + public int deleteSysServersById(Long id); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/JobDatasourceService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/JobDatasourceService.java new file mode 100644 index 0000000..9b19632 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/JobDatasourceService.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.service; + +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.JobDatasource; + +import java.io.IOException; +import java.util.List; + +/** + * jdbc数据源配置表服务接口 + * + * @author jingwk + * @version v2.0 + * @since 2022-01-10 + */ +public interface JobDatasourceService extends IService { + /** + * 测试数据源 + * @param jdbcDatasource + * @return + */ + Boolean dataSourceTest(JobDatasource jdbcDatasource) throws IOException; + + /** + *更新数据源信息 + * @param datasource + * @return + */ + int update(JobDatasource datasource); + + /** + * 获取所有数据源 + * @return + */ + List selectAllDatasource(); + + + List findDataSourceName(); + + JobDatasource getDataSourceById(Long datasourceId); +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/JobProjectService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/JobProjectService.java new file mode 100644 index 0000000..54ce4ee --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/JobProjectService.java @@ -0,0 +1,26 @@ +package com.czsj.bigdata.service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.entity.JobProject; + +/** + * Job project + * + * @author jingwk + * @version v2.1.2 + * @since 2022-05-24 + */ +public interface JobProjectService extends IService { + + /** + * project page + * @param pageSize + * @param pageNo + * @param searchName + * @return + */ + + IPage getProjectListPaging(Integer pageSize, Integer pageNo, String searchName); +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/JobRegistryService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/JobRegistryService.java new file mode 100644 index 0000000..2f1736b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/JobRegistryService.java @@ -0,0 +1,14 @@ +package com.czsj.bigdata.service; + +import com.baomidou.mybatisplus.extension.service.IService; +import com.czsj.bigdata.entity.JobRegistry; + +/** + * jdbc数据源配置表服务接口 + * + * @author jingwk + * @version v2.1.1 + * @since 2022-03-15 + */ +public interface JobRegistryService extends IService { +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/JobService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/JobService.java new file mode 100644 index 0000000..8ed9ba3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/JobService.java @@ -0,0 +1,96 @@ +package com.czsj.bigdata.service; + + +import com.czsj.bigdata.dto.DataXBatchJsonBuildDto; +import com.czsj.bigdata.dto.TaskScheduleDto; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.core.biz.model.ReturnT; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * core job action for czsj-ground + * + * @author xuxueli 2016-5-28 15:30:33 + */ +public interface JobService { + + /** + * page list + * + * @param start + * @param length + * @param jobGroup + * @param jobDesc + * @param glueType + * @param userId + * @return + */ + Map pageList(int start, int length, int jobGroup, int triggerStatus, String jobDesc, String glueType, int userId,Integer[] projectIds); + + List list(); + + /** + * add job + * + * @param jobInfo + * @return + */ + ReturnT add(JobInfo jobInfo); + + /** + * update job + * + * @param jobInfo + * @return + */ + ReturnT update(JobInfo jobInfo); + + /** + * remove job + * * + * + * @param id + * @return + */ + ReturnT remove(int id); + + /** + * start job + * + * @param id + * @return + */ + ReturnT start(int id); + + /** + * stop job + * + * @param id + * @return + */ + ReturnT stop(int id); + + /** + * dashboard info + * + * @return + */ + Map dashboardInfo(); + + /** + * chart info + * + * @return + */ + ReturnT> chartInfo(); + + /** + * batch add + * @param dto + * @return + */ + ReturnT batchAdd(DataXBatchJsonBuildDto dto) throws IOException; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/JobTemplateService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/JobTemplateService.java new file mode 100644 index 0000000..3413ba8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/JobTemplateService.java @@ -0,0 +1,51 @@ +package com.czsj.bigdata.service; + + +import com.czsj.bigdata.entity.JobTemplate; +import com.czsj.core.biz.model.ReturnT; + +import java.util.Map; + +/** + * core job action for czsj-ground + * + * @author xuxueli 2016-5-28 15:30:33 + */ +public interface JobTemplateService { + + /** + * page list + * + * @param start + * @param length + * @param jobGroup + * @param jobDesc + * @param executorHandler + * @param userId + * @return + */ + Map pageList(int start, int length, int jobGroup, String jobDesc, String executorHandler, int userId,Integer[] projectIds); + /** + * add job + * + * @param jobTemplate + * @return + */ + ReturnT add(JobTemplate jobTemplate); + + /** + * update job + * + * @param jobTemplate + * @return + */ + ReturnT update(JobTemplate jobTemplate); + + /** + * remove job + * * + * @param id + * @return + */ + ReturnT remove(int id); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/JsonService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/JsonService.java new file mode 100644 index 0000000..ff28905 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/JsonService.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.service; + +import com.czsj.bigdata.dto.JsonBuildDto; + +/** + * com.czsj json构建服务层接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/1 + */ +public interface JsonService { + + /** + * build flinkx json + * + * @param dto + * @return + */ + String buildJobDataxJson(JsonBuildDto dto); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/RpcService.java b/czsj-system/src/main/java/com/czsj/bigdata/service/RpcService.java new file mode 100644 index 0000000..720e60c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/RpcService.java @@ -0,0 +1,12 @@ +package com.czsj.bigdata.service; + +public interface RpcService { + + /**获取当前机器的服务器状态*/ + String getMonitor(); + + + String runjob(String jonifo); + + String getLog(String executorAddress); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/APIAuthServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/APIAuthServiceImpl.java new file mode 100644 index 0000000..9971324 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/APIAuthServiceImpl.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.service.impl; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.czsj.bigdata.entity.APIAuth; +import com.czsj.bigdata.mapper.APIAuthMapper; +import com.czsj.bigdata.service.APIAuthService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service("apiAuthService") +public class APIAuthServiceImpl extends ServiceImpl implements APIAuthService { + + @Autowired + private APIAuthMapper apiAuthMapper; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/APIDBQueryServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/APIDBQueryServiceImpl.java new file mode 100644 index 0000000..2c58521 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/APIDBQueryServiceImpl.java @@ -0,0 +1,49 @@ +package com.czsj.bigdata.service.impl; + +import java.sql.*; + +/** + * + * + * @Date: 2022/1/27 9:04 + * @Description: + **/ +public class APIDBQueryServiceImpl { + private static Connection conn = null;//表示数据库连接的对象 + private static Statement stmt = null;//表示数据库更新操作 + private static ResultSet result = null;//表示接受数据库查询到的结果 + + public static void main(String[] args) throws SQLException, ClassNotFoundException { + //组装SQL + //执行查询返回结果 + APIDBQueryServiceImpl.getResult("select * from student","mysql"); + } + + public static ResultSet getResult(String sql,String type) throws ClassNotFoundException, SQLException { + Class.forName("com.mysql.jdbc.Driver");//使用class类加载驱动程序 + switch (type) { + case "mysql" : + conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/test", "root", "root"); + break; + case "postgresql" : + //语句 + conn = DriverManager.getConnection("jdbc:postgresql://localhost:5432/postgres", "postgres", "123456"); + break; + default : + conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/test", "root", "root"); + } + + stmt = conn.createStatement(); + + result = stmt.executeQuery(sql); + + while(result.next()){ + String name = result.getString("name"); + System.out.println(name); + } + result.close(); + stmt.close(); + conn.close(); + return result; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/AdminBizImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/AdminBizImpl.java new file mode 100644 index 0000000..c66ea74 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/AdminBizImpl.java @@ -0,0 +1,213 @@ +package com.czsj.bigdata.service.impl; + +import com.czsj.bigdata.core.kill.KillJob; +import com.czsj.bigdata.core.thread.JobTriggerPoolHelper; +import com.czsj.bigdata.core.trigger.TriggerTypeEnum; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.bigdata.entity.JobLog; +import com.czsj.bigdata.mapper.JobInfoMapper; +import com.czsj.bigdata.mapper.JobLogMapper; +import com.czsj.bigdata.mapper.JobRegistryMapper; +import com.czsj.core.biz.AdminBiz; +import com.czsj.core.biz.model.HandleCallbackParam; +import com.czsj.core.biz.model.HandleProcessCallbackParam; +import com.czsj.core.biz.model.RegistryParam; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.enums.IncrementTypeEnum; +import com.czsj.core.handler.IJobHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; +import org.springframework.util.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import java.text.MessageFormat; +import java.util.Date; +import java.util.List; + +/** + * @author xuxueli 2017-07-27 21:54:20 + */ +@Service +public class AdminBizImpl implements AdminBiz { + private static Logger logger = LoggerFactory.getLogger(AdminBizImpl.class); + + @Autowired + public JobLogMapper jobLogMapper; + @Autowired + private JobInfoMapper jobInfoMapper; + @Autowired + private JobRegistryMapper jobRegistryMapper; + + @Override + public ReturnT callback(List callbackParamList) { + for (HandleCallbackParam handleCallbackParam : callbackParamList) { + ReturnT callbackResult = callback(handleCallbackParam); + logger.debug(">>>>>>>>> JobApiController.callback {}, handleCallbackParam={}, callbackResult={}", + (callbackResult.getCode() == IJobHandler.SUCCESS.getCode() ? "success" : "fail"), handleCallbackParam, callbackResult); + } + + return ReturnT.SUCCESS; + } + + @Override + public ReturnT processCallback(List callbackParamList) { + for (HandleProcessCallbackParam handleProcessCallbackParam : callbackParamList) { + ReturnT callbackResult = processCallback(handleProcessCallbackParam); + logger.debug(">>>>>>>>> JobApiController.processCallback {}, handleCallbackParam={}, callbackResult={}", + (callbackResult.getCode() == IJobHandler.SUCCESS.getCode() ? "success" : "fail"), handleProcessCallbackParam, callbackResult); + } + return ReturnT.SUCCESS; + } + + private ReturnT processCallback(HandleProcessCallbackParam handleProcessCallbackParam) { + int result = jobLogMapper.updateProcessId(handleProcessCallbackParam.getLogId(), handleProcessCallbackParam.getProcessId()); + return result > 0 ? ReturnT.FAIL : ReturnT.SUCCESS; + } + + + private ReturnT callback(HandleCallbackParam handleCallbackParam) { + // valid log item + JobLog log = jobLogMapper.load(handleCallbackParam.getLogId()); + if (log == null) { + return new ReturnT(ReturnT.FAIL_CODE, "log item not found."); + } + if (log.getHandleCode() > 0) { + return new ReturnT(ReturnT.FAIL_CODE, "log repeate callback."); // avoid repeat callback, trigger child job etc + } + + // trigger success, to trigger child job + String callbackMsg = null; + int resultCode = handleCallbackParam.getExecuteResult().getCode(); + + if (IJobHandler.SUCCESS.getCode() == resultCode) { + + JobInfo jobInfo = jobInfoMapper.loadById(log.getJobId()); + + updateIncrementParam(log, jobInfo.getIncrementType()); + + if (jobInfo != null && jobInfo.getChildJobId() != null && jobInfo.getChildJobId().trim().length() > 0) { + callbackMsg = "

>>>>>>>>>>>" + I18nUtil.getString("jobconf_trigger_child_run") + "<<<<<<<<<<<
"; + + String[] childJobIds = jobInfo.getChildJobId().split(","); + for (int i = 0; i < childJobIds.length; i++) { + int childJobId = (childJobIds[i] != null && childJobIds[i].trim().length() > 0 && isNumeric(childJobIds[i])) ? Integer.parseInt(childJobIds[i]) : -1; + if (childJobId > 0) { + + JobTriggerPoolHelper.trigger(childJobId, TriggerTypeEnum.PARENT, -1, null, null); + ReturnT triggerChildResult = ReturnT.SUCCESS; + + // add msg + callbackMsg += MessageFormat.format(I18nUtil.getString("jobconf_callback_child_msg1"), + (i + 1), + childJobIds.length, + childJobIds[i], + (triggerChildResult.getCode() == ReturnT.SUCCESS_CODE ? I18nUtil.getString("system_success") : I18nUtil.getString("system_fail")), + triggerChildResult.getMsg()); + } else { + callbackMsg += MessageFormat.format(I18nUtil.getString("jobconf_callback_child_msg2"), + (i + 1), + childJobIds.length, + childJobIds[i]); + } + } + + } + } + + //kill execution timeout DataX process + if (!StringUtils.isEmpty(log.getProcessId()) && IJobHandler.FAIL_TIMEOUT.getCode() == resultCode) { + KillJob.trigger(log.getId(), log.getTriggerTime(), log.getExecutorAddress(), log.getProcessId()); + } + + // handle msg + StringBuilder handleMsg = new StringBuilder(); + if (log.getHandleMsg() != null) { + handleMsg.append(log.getHandleMsg()).append("
"); + } + if (handleCallbackParam.getExecuteResult().getMsg() != null) { + handleMsg.append(handleCallbackParam.getExecuteResult().getMsg()); + } + if (callbackMsg != null) { + handleMsg.append(callbackMsg); + } + + if (handleMsg.length() > 15000) { + handleMsg = new StringBuilder(handleMsg.substring(0, 15000)); // text最大64kb 避免长度过长 + } + + // success, save log + log.setHandleTime(new Date()); + log.setHandleCode(resultCode); + log.setHandleMsg(handleMsg.toString()); + + jobLogMapper.updateHandleInfo(log); + jobInfoMapper.updateLastHandleCode(log.getJobId(), resultCode); + + return ReturnT.SUCCESS; + } + + private void updateIncrementParam(JobLog log, Integer incrementType) { + if (IncrementTypeEnum.ID.getCode() == incrementType) { + jobInfoMapper.incrementIdUpdate(log.getJobId(),log.getMaxId()); + } else if (IncrementTypeEnum.TIME.getCode() == incrementType) { + jobInfoMapper.incrementTimeUpdate(log.getJobId(), log.getTriggerTime()); + } + } + + private boolean isNumeric(String str) { + try { + Integer.valueOf(str); + return true; + } catch (NumberFormatException e) { + return false; + } + } + + @Override + public ReturnT registry(RegistryParam registryParam) { + + // valid + if (!StringUtils.hasText(registryParam.getRegistryGroup()) + || !StringUtils.hasText(registryParam.getRegistryKey()) + || !StringUtils.hasText(registryParam.getRegistryValue())) { + return new ReturnT(ReturnT.FAIL_CODE, "Illegal Argument."); + } + + int ret = jobRegistryMapper.registryUpdate(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), + registryParam.getRegistryValue(), registryParam.getCpuUsage(), registryParam.getMemoryUsage(), registryParam.getLoadAverage(), new Date()); + if (ret < 1) { + jobRegistryMapper.registrySave(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), + registryParam.getRegistryValue(), registryParam.getCpuUsage(), registryParam.getMemoryUsage(), registryParam.getLoadAverage(), new Date()); + + // fresh + freshGroupRegistryInfo(registryParam); + } + return ReturnT.SUCCESS; + } + + @Override + public ReturnT registryRemove(RegistryParam registryParam) { + + // valid + if (!StringUtils.hasText(registryParam.getRegistryGroup()) + || !StringUtils.hasText(registryParam.getRegistryKey()) + || !StringUtils.hasText(registryParam.getRegistryValue())) { + return new ReturnT(ReturnT.FAIL_CODE, "Illegal Argument."); + } + + int ret = jobRegistryMapper.registryDelete(registryParam.getRegistryGroup(), registryParam.getRegistryKey(), registryParam.getRegistryValue()); + if (ret > 0) { + + // fresh + freshGroupRegistryInfo(registryParam); + } + return ReturnT.SUCCESS; + } + + private void freshGroupRegistryInfo(RegistryParam registryParam) { + // Under consideration, prevent affecting core tables + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DatasourceQueryServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DatasourceQueryServiceImpl.java new file mode 100644 index 0000000..4fe1ed6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DatasourceQueryServiceImpl.java @@ -0,0 +1,115 @@ +package com.czsj.bigdata.service.impl; + +import cn.hutool.core.util.ObjectUtil; +import com.google.common.collect.Lists; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.service.DatasourceQueryService; +import com.czsj.bigdata.service.JobDatasourceService; +import com.czsj.bigdata.tool.query.*; +import com.czsj.bigdata.util.JdbcConstants; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; + +/** + * datasource query + * + * @author zhouhongfa@gz-yibo.com + * @ClassName JdbcDatasourceQueryServiceImpl + * @Version 1.0 + * @since 2019/7/31 20:51 + */ +@Service +public class DatasourceQueryServiceImpl implements DatasourceQueryService { + + @Autowired + private JobDatasourceService jobDatasourceService; + + @Override + public List getDBs(Long id) throws IOException { + //获取数据源对象 + JobDatasource datasource = jobDatasourceService.getById(id); + return new MongoDBQueryTool(datasource).getDBNames(); + } + + + @Override + public List getTables(Long id, String tableSchema) throws IOException { + //获取数据源对象 + JobDatasource datasource = jobDatasourceService.getById(id); + //queryTool组装 + if (ObjectUtil.isNull(datasource)) { + return Lists.newArrayList(); + } + if (JdbcConstants.HBASE.equals(datasource.getDatasource())) { + return new HBaseQueryTool(datasource).getTableNames(); + } else if (JdbcConstants.MONGODB.equals(datasource.getDatasource())) { + return new MongoDBQueryTool(datasource).getCollectionNames(datasource.getDatabaseName()); + } else { + BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource); + if(StringUtils.isBlank(tableSchema)){ + return qTool.getTableNames(); + }else{ + return qTool.getTableNames(tableSchema); + } + } + } + + @Override + public List getTableSchema(Long id) { + //获取数据源对象 + JobDatasource datasource = jobDatasourceService.getById(id); + //queryTool组装 + if (ObjectUtil.isNull(datasource)) { + return Lists.newArrayList(); + } + BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource); + return qTool.getTableSchema(); + } + + @Override + public List getCollectionNames(long id, String dbName) throws IOException { + //获取数据源对象 + JobDatasource datasource = jobDatasourceService.getById(id); + //queryTool组装 + if (ObjectUtil.isNull(datasource)) { + return Lists.newArrayList(); + } + return new MongoDBQueryTool(datasource).getCollectionNames(dbName); + } + + + @Override + public List getColumns(Long id, String tableName) throws IOException { + //获取数据源对象 + JobDatasource datasource = jobDatasourceService.getById(id); + //queryTool组装 + if (ObjectUtil.isNull(datasource)) { + return Lists.newArrayList(); + } + if (JdbcConstants.HBASE.equals(datasource.getDatasource())) { + return new HBaseQueryTool(datasource).getColumns(tableName); + } else if (JdbcConstants.MONGODB.equals(datasource.getDatasource())) { + return new MongoDBQueryTool(datasource).getColumns(tableName); + } else { + BaseQueryTool queryTool = QueryToolFactory.getByDbType(datasource); + return queryTool.getColumnNames(tableName, datasource.getDatasource()); + } + } + + @Override + public List getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException { + //获取数据源对象 + JobDatasource jdbcDatasource = jobDatasourceService.getById(datasourceId); + //queryTool组装 + if (ObjectUtil.isNull(jdbcDatasource)) { + return Lists.newArrayList(); + } + BaseQueryTool queryTool = QueryToolFactory.getByDbType(jdbcDatasource); + return queryTool.getColumnsByQuerySql(querySql); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DataxJsonServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DataxJsonServiceImpl.java new file mode 100644 index 0000000..e6b65e7 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DataxJsonServiceImpl.java @@ -0,0 +1,44 @@ +package com.czsj.bigdata.service.impl; + +import com.alibaba.fastjson.JSON; +import com.czsj.bigdata.dto.DataXBatchJsonBuildDto; +import com.czsj.bigdata.dto.DataXJsonBuildDto; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.bigdata.entity.JobTemplate; +import com.czsj.bigdata.mapper.JobInfoMapper; +import com.czsj.bigdata.mapper.JobTemplateMapper; +import com.czsj.bigdata.service.DatasourceQueryService; +import com.czsj.bigdata.service.DataxJsonService; +import com.czsj.bigdata.service.JobDatasourceService; +import com.czsj.bigdata.tool.datax.DataxJsonHelper; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * com.wugui.datax json构建实现类 + * + * @author jingwk + * @ClassName DataxJsonServiceImpl + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Service +public class DataxJsonServiceImpl implements DataxJsonService { + + @Autowired + private JobDatasourceService jobJdbcDatasourceService; + + @Override + public String buildJobJson(DataXJsonBuildDto dataXJsonBuildDto) { + DataxJsonHelper dataxJsonHelper = new DataxJsonHelper(); + // reader + JobDatasource readerDatasource = jobJdbcDatasourceService.getById(dataXJsonBuildDto.getReaderDatasourceId()); + // reader plugin init + dataxJsonHelper.initReader(dataXJsonBuildDto, readerDatasource); + JobDatasource writerDatasource = jobJdbcDatasourceService.getById(dataXJsonBuildDto.getWriterDatasourceId()); + dataxJsonHelper.initWriter(dataXJsonBuildDto, writerDatasource); + + return JSON.toJSONString(dataxJsonHelper.buildJob()); + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DevEnvSettingServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DevEnvSettingServiceImpl.java new file mode 100644 index 0000000..fba43f6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/DevEnvSettingServiceImpl.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.service.impl; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.czsj.bigdata.mapper.DevEnvSettingMapper; +import com.czsj.bigdata.service.DevEnvSettingService; +import com.czsj.bigdata.entity.DevEnvSetting; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +@Service("devEnvSettingService") +public class DevEnvSettingServiceImpl extends ServiceImpl implements DevEnvSettingService { + + @Autowired + private DevEnvSettingMapper devEnvSettingMapper; + + @Override + public IPage getDevEnvSettingListPaging(Integer pageSize, Integer pageNo, String searchName) { + Page page = new Page(pageNo, pageSize); + return devEnvSettingMapper.getDevEnvSettingListPaging(page, searchName); + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobDatasourceServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobDatasourceServiceImpl.java new file mode 100644 index 0000000..10c01c6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobDatasourceServiceImpl.java @@ -0,0 +1,70 @@ +package com.czsj.bigdata.service.impl; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.czsj.bigdata.mapper.JobDatasourceMapper; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.service.JobDatasourceService; +import com.czsj.bigdata.tool.query.BaseQueryTool; +import com.czsj.bigdata.tool.query.HBaseQueryTool; +import com.czsj.bigdata.tool.query.MongoDBQueryTool; +import com.czsj.bigdata.tool.query.QueryToolFactory; +import com.czsj.bigdata.util.AESUtil; +import com.czsj.bigdata.util.JdbcConstants; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import org.springframework.beans.factory.annotation.Autowired; +import java.io.IOException; +import java.util.List; + +/** + * Created by jingwk on 2022/01/30 + */ +@Service +@Transactional(readOnly = true) +public class JobDatasourceServiceImpl extends ServiceImpl implements JobDatasourceService { + + @Autowired + private JobDatasourceMapper datasourceMapper; + + @Override + public Boolean dataSourceTest(JobDatasource jobDatasource) throws IOException { + if (JdbcConstants.HBASE.equals(jobDatasource.getDatasource())) { + return new HBaseQueryTool(jobDatasource).dataSourceTest(); + } + String userName = AESUtil.decrypt(jobDatasource.getJdbcUsername()); + // 判断账密是否为密文 + if (userName == null) { + jobDatasource.setJdbcUsername(AESUtil.encrypt(jobDatasource.getJdbcUsername())); + } + String pwd = AESUtil.decrypt(jobDatasource.getJdbcPassword()); + if (pwd == null) { + jobDatasource.setJdbcPassword(AESUtil.encrypt(jobDatasource.getJdbcPassword())); + } + if (JdbcConstants.MONGODB.equals(jobDatasource.getDatasource())) { + return new MongoDBQueryTool(jobDatasource).dataSourceTest(jobDatasource.getDatabaseName()); + } + BaseQueryTool queryTool = QueryToolFactory.getByDbType(jobDatasource); + return queryTool.dataSourceTest(); + } + + @Override + public int update(JobDatasource datasource) { + return datasourceMapper.update(datasource); + } + + @Override + public List selectAllDatasource() { + return datasourceMapper.selectList(null); + } + + @Override + public List findDataSourceName() { + return datasourceMapper.findDataSourceName(); + } + + @Override + public JobDatasource getDataSourceById(Long datasourceId) { + return datasourceMapper.getDataSourceById(datasourceId); + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobProjectServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobProjectServiceImpl.java new file mode 100644 index 0000000..aa79ec7 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobProjectServiceImpl.java @@ -0,0 +1,29 @@ +package com.czsj.bigdata.service.impl; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.czsj.bigdata.entity.JobProject; +import com.czsj.bigdata.mapper.JobProjectMapper; +import com.czsj.bigdata.service.JobProjectService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * JobProjectServiceImpl + * @author jingwk + * @since 2019-05-30 + * @version v2.1.2 + */ +@Service("jobProjectService") +public class JobProjectServiceImpl extends ServiceImpl implements JobProjectService { + + @Autowired + private JobProjectMapper jobProjectMapper; + + @Override + public IPage getProjectListPaging(Integer pageSize, Integer pageNo, String searchName) { + Page page = new Page(pageNo, pageSize); + return jobProjectMapper.getProjectListPaging(page, searchName); + } +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobRegistryServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobRegistryServiceImpl.java new file mode 100644 index 0000000..3a77a77 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobRegistryServiceImpl.java @@ -0,0 +1,18 @@ +package com.czsj.bigdata.service.impl; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import com.czsj.bigdata.entity.JobRegistry; +import com.czsj.bigdata.mapper.JobRegistryMapper; +import com.czsj.bigdata.service.JobRegistryService; +import org.springframework.stereotype.Service; + +/** + * JobRegistryServiceImpl + * @author jingwk + * @since 2019-03-15 + * @version v2.1.1 + */ +@Service("jobRegistryService") +public class JobRegistryServiceImpl extends ServiceImpl implements JobRegistryService { + +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobServiceImpl.java new file mode 100644 index 0000000..733487e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobServiceImpl.java @@ -0,0 +1,463 @@ +package com.czsj.bigdata.service.impl; + +import com.czsj.bigdata.core.cron.CronExpression; +import com.czsj.bigdata.core.route.ExecutorRouteStrategyEnum; +import com.czsj.bigdata.core.thread.JobScheduleHelper; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.bigdata.dto.DataXBatchJsonBuildDto; +import com.czsj.bigdata.dto.DataXJsonBuildDto; +import com.czsj.bigdata.entity.JobGroup; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.bigdata.entity.JobLogReport; +import com.czsj.bigdata.entity.JobTemplate; +import com.czsj.bigdata.mapper.*; +import com.czsj.bigdata.service.DatasourceQueryService; +import com.czsj.bigdata.service.DataxJsonService; +import com.czsj.bigdata.service.JobService; +import com.czsj.bigdata.util.DateFormatUtils; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.enums.ExecutorBlockStrategyEnum; +import com.czsj.core.glue.GlueTypeEnum; +import com.czsj.core.util.DateUtil; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.BeanUtils; +import org.springframework.stereotype.Service; + +import org.springframework.beans.factory.annotation.Autowired; +import java.io.IOException; +import java.text.MessageFormat; +import java.text.ParseException; +import java.util.*; + +/** + * core job action for xxl-job + * + * @author xuxueli 2016-5-28 15:30:33 + */ +@Service +public class JobServiceImpl implements JobService { + private static Logger logger = LoggerFactory.getLogger(JobServiceImpl.class); + + @Autowired + private JobGroupMapper jobGroupMapper; + @Autowired + private JobInfoMapper jobInfoMapper; + @Autowired + private JobLogMapper jobLogMapper; + @Autowired + private JobLogGlueMapper jobLogGlueMapper; + @Autowired + private JobLogReportMapper jobLogReportMapper; + @Autowired + private DatasourceQueryService datasourceQueryService; + @Autowired + private JobTemplateMapper jobTemplateMapper; + @Autowired + private DataxJsonService dataxJsonService; + + @Override + public Map pageList(int start, int length, int jobGroup, int triggerStatus, String jobDesc, String glueType, int userId, Integer[] projectIds) { + + // page list + List list = jobInfoMapper.pageList(start, length, jobGroup, triggerStatus, jobDesc, glueType, userId, projectIds); + int list_count = jobInfoMapper.pageListCount(start, length, jobGroup, triggerStatus, jobDesc, glueType, userId, projectIds); + + // package result + Map maps = new HashMap<>(); + maps.put("recordsTotal", list_count); // 总记录数 + maps.put("recordsFiltered", list_count); // 过滤后的总记录数 + maps.put("data", list); // 分页列表 + return maps; + } + + public List list() { + return jobInfoMapper.findAll(); + } + + @Override + public ReturnT add(JobInfo jobInfo) { + // valid + JobGroup group = jobGroupMapper.load(jobInfo.getJobGroup()); + if (group == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_jobgroup"))); + } + if (!CronExpression.isValidExpression(jobInfo.getJobCron())) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid")); + } + if (jobInfo.getGlueType().equals(GlueTypeEnum.BEAN.getDesc()) && jobInfo.getJobJson().trim().length() <= 2) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobjson"))); + } + if (jobInfo.getJobDesc() == null || jobInfo.getJobDesc().trim().length() == 0) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobdesc"))); + } + if (jobInfo.getUserId() == 0 ) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_author"))); + } + if (ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorRouteStrategy") + I18nUtil.getString("system_invalid"))); + } + if (ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), null) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorBlockStrategy") + I18nUtil.getString("system_invalid"))); + } + if (GlueTypeEnum.match(jobInfo.getGlueType()) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_gluetype") + I18nUtil.getString("system_invalid"))); + } + if (GlueTypeEnum.BEAN == GlueTypeEnum.match(jobInfo.getGlueType()) && (jobInfo.getExecutorHandler() == null || jobInfo.getExecutorHandler().trim().length() == 0)) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + "JobHandler")); + } + + + if (StringUtils.isBlank(jobInfo.getReplaceParamType()) || !DateFormatUtils.formatList().contains(jobInfo.getReplaceParamType())) { + jobInfo.setReplaceParamType(DateFormatUtils.TIMESTAMP); + } + + // fix "\r" in shell + if (GlueTypeEnum.GLUE_SHELL == GlueTypeEnum.match(jobInfo.getGlueType()) && jobInfo.getGlueSource() != null) { + jobInfo.setGlueSource(jobInfo.getGlueSource().replaceAll("\r", "")); + } + + // ChildJobId valid + if (jobInfo.getChildJobId() != null && jobInfo.getChildJobId().trim().length() > 0) { + String[] childJobIds = jobInfo.getChildJobId().split(","); + for (String childJobIdItem : childJobIds) { + if (StringUtils.isNotBlank(childJobIdItem) && isNumeric(childJobIdItem) && Integer.parseInt(childJobIdItem) > 0) { + JobInfo childJobInfo = jobInfoMapper.loadById(Integer.parseInt(childJobIdItem)); + if (childJobInfo == null) { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_not_found")), childJobIdItem)); + } + } else { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_invalid")), childJobIdItem)); + } + } + + // join , avoid "xxx,," + String temp = ""; + for (String item : childJobIds) { + temp += item + ","; + } + temp = temp.substring(0, temp.length() - 1); + + jobInfo.setChildJobId(temp); + } + + // add in db + jobInfo.setAddTime(new Date()); + jobInfo.setJobJson(jobInfo.getJobJson()); + jobInfo.setUpdateTime(new Date()); + jobInfo.setGlueUpdatetime(new Date()); + jobInfoMapper.save(jobInfo); + if (jobInfo.getId() < 1) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_add") + I18nUtil.getString("system_fail"))); + } + + return new ReturnT<>(String.valueOf(jobInfo.getId())); + } + + private boolean isNumeric(String str) { + try { + Integer.valueOf(str); + return true; + } catch (NumberFormatException e) { + return false; + } + } + + @Override + public ReturnT update(JobInfo jobInfo) { + + // valid + if (!CronExpression.isValidExpression(jobInfo.getJobCron())) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid")); + } + if (jobInfo.getGlueType().equals(GlueTypeEnum.BEAN.getDesc()) && jobInfo.getJobJson().trim().length() <= 2) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobjson"))); + } + if (jobInfo.getJobDesc() == null || jobInfo.getJobDesc().trim().length() == 0) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobdesc"))); + } + + if (jobInfo.getProjectId() == 0) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobproject"))); + } + if (jobInfo.getUserId() == 0) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_author"))); + } + if (ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorRouteStrategy") + I18nUtil.getString("system_invalid"))); + } + if (ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), null) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorBlockStrategy") + I18nUtil.getString("system_invalid"))); + } + + // ChildJobId valid + if (jobInfo.getChildJobId() != null && jobInfo.getChildJobId().trim().length() > 0) { + String[] childJobIds = jobInfo.getChildJobId().split(","); + for (String childJobIdItem : childJobIds) { + if (childJobIdItem != null && childJobIdItem.trim().length() > 0 && isNumeric(childJobIdItem)) { + JobInfo childJobInfo = jobInfoMapper.loadById(Integer.parseInt(childJobIdItem)); + if (childJobInfo == null) { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_not_found")), childJobIdItem)); + } + } else { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_invalid")), childJobIdItem)); + } + } + + // join , avoid "xxx,," + String temp = ""; + for (String item : childJobIds) { + temp += item + ","; + } + temp = temp.substring(0, temp.length() - 1); + + jobInfo.setChildJobId(temp); + } + + // group valid + JobGroup jobGroup = jobGroupMapper.load(jobInfo.getJobGroup()); + if (jobGroup == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_jobgroup") + I18nUtil.getString("system_invalid"))); + } + + // stage job info + JobInfo exists_jobInfo = jobInfoMapper.loadById(jobInfo.getId()); + if (exists_jobInfo == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_id") + I18nUtil.getString("system_not_found"))); + } + + // next trigger time (5s后生效,避开预读周期) + long nextTriggerTime = exists_jobInfo.getTriggerNextTime(); + if (exists_jobInfo.getTriggerStatus() == 1 && !jobInfo.getJobCron().equals(exists_jobInfo.getJobCron())) { + try { + Date nextValidTime = new CronExpression(jobInfo.getJobCron()).getNextValidTimeAfter(new Date(System.currentTimeMillis() + JobScheduleHelper.PRE_READ_MS)); + if (nextValidTime == null) { + return new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_never_fire")); + } + nextTriggerTime = nextValidTime.getTime(); + } catch (ParseException e) { + logger.error(e.getMessage(), e); + return new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid") + " | " + e.getMessage()); + } + } + + BeanUtils.copyProperties(jobInfo, exists_jobInfo); + if (StringUtils.isBlank(jobInfo.getReplaceParamType())) { + jobInfo.setReplaceParamType(DateFormatUtils.TIMESTAMP); + } + exists_jobInfo.setTriggerNextTime(nextTriggerTime); + exists_jobInfo.setUpdateTime(new Date()); + + if (GlueTypeEnum.BEAN.getDesc().equals(jobInfo.getGlueType())) { + exists_jobInfo.setJobJson(jobInfo.getJobJson()); + exists_jobInfo.setGlueSource(null); + } else { + exists_jobInfo.setGlueSource(jobInfo.getGlueSource()); + exists_jobInfo.setJobJson(null); + } + exists_jobInfo.setGlueUpdatetime(new Date()); + jobInfoMapper.update(exists_jobInfo); + + + return ReturnT.SUCCESS; + } + + @Override + public ReturnT remove(int id) { + JobInfo xxlJobInfo = jobInfoMapper.loadById(id); + if (xxlJobInfo == null) { + return ReturnT.SUCCESS; + } + + jobInfoMapper.delete(id); + jobLogMapper.delete(id); + jobLogGlueMapper.deleteByJobId(id); + return ReturnT.SUCCESS; + } + + @Override + public ReturnT start(int id) { + JobInfo xxlJobInfo = jobInfoMapper.loadById(id); + + // next trigger time (5s后生效,避开预读周期) + long nextTriggerTime = 0; + try { + Date nextValidTime = new CronExpression(xxlJobInfo.getJobCron()).getNextValidTimeAfter(new Date(System.currentTimeMillis() + JobScheduleHelper.PRE_READ_MS)); + if (nextValidTime == null) { + return new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_never_fire")); + } + nextTriggerTime = nextValidTime.getTime(); + } catch (ParseException e) { + logger.error(e.getMessage(), e); + return new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid") + " | " + e.getMessage()); + } + + xxlJobInfo.setTriggerStatus(1); + xxlJobInfo.setTriggerLastTime(0); + xxlJobInfo.setTriggerNextTime(nextTriggerTime); + + xxlJobInfo.setUpdateTime(new Date()); + jobInfoMapper.update(xxlJobInfo); + return ReturnT.SUCCESS; + } + + @Override + public ReturnT stop(int id) { + JobInfo jobInfo = jobInfoMapper.loadById(id); + + jobInfo.setTriggerStatus(0); + jobInfo.setTriggerLastTime(0); + jobInfo.setTriggerNextTime(0); + + jobInfo.setUpdateTime(new Date()); + jobInfoMapper.update(jobInfo); + return ReturnT.SUCCESS; + } + + @Override + public Map dashboardInfo() { + + int jobInfoCount = jobInfoMapper.findAllCount(); + int jobLogCount = 0; + int jobLogSuccessCount = 0; + JobLogReport jobLogReport = jobLogReportMapper.queryLogReportTotal(); + if (jobLogReport != null) { + jobLogCount = jobLogReport.getRunningCount() + jobLogReport.getSucCount() + jobLogReport.getFailCount(); + jobLogSuccessCount = jobLogReport.getSucCount(); + } + + // executor count + Set executorAddressSet = new HashSet<>(); + List groupList = jobGroupMapper.findAll(); + + if (groupList != null && !groupList.isEmpty()) { + for (JobGroup group : groupList) { + if (group.getRegistryList() != null && !group.getRegistryList().isEmpty()) { + executorAddressSet.addAll(group.getRegistryList()); + } + } + } + + int executorCount = executorAddressSet.size(); + + Map dashboardMap = new HashMap<>(); + dashboardMap.put("jobInfoCount", jobInfoCount); + dashboardMap.put("jobLogCount", jobLogCount); + dashboardMap.put("jobLogSuccessCount", jobLogSuccessCount); + dashboardMap.put("executorCount", executorCount); + return dashboardMap; + } + + @Override + public ReturnT> chartInfo() { + // process + List triggerDayList = new ArrayList(); + List triggerDayCountRunningList = new ArrayList(); + List triggerDayCountSucList = new ArrayList(); + List triggerDayCountFailList = new ArrayList(); + int triggerCountRunningTotal = 0; + int triggerCountSucTotal = 0; + int triggerCountFailTotal = 0; + + List logReportList = jobLogReportMapper.queryLogReport(DateUtil.addDays(new Date(), -7), new Date()); + + if (logReportList != null && logReportList.size() > 0) { + for (JobLogReport item : logReportList) { + String day = DateUtil.formatDate(item.getTriggerDay()); + int triggerDayCountRunning = item.getRunningCount(); + int triggerDayCountSuc = item.getSucCount(); + int triggerDayCountFail = item.getFailCount(); + + triggerDayList.add(day); + triggerDayCountRunningList.add(triggerDayCountRunning); + triggerDayCountSucList.add(triggerDayCountSuc); + triggerDayCountFailList.add(triggerDayCountFail); + + triggerCountRunningTotal += triggerDayCountRunning; + triggerCountSucTotal += triggerDayCountSuc; + triggerCountFailTotal += triggerDayCountFail; + } + } else { + for (int i = -6; i <= 0; i++) { + triggerDayList.add(DateUtil.formatDate(DateUtil.addDays(new Date(), i))); + triggerDayCountRunningList.add(0); + triggerDayCountSucList.add(0); + triggerDayCountFailList.add(0); + } + } + + Map result = new HashMap<>(); + result.put("triggerDayList", triggerDayList); + result.put("triggerDayCountRunningList", triggerDayCountRunningList); + result.put("triggerDayCountSucList", triggerDayCountSucList); + result.put("triggerDayCountFailList", triggerDayCountFailList); + + result.put("triggerCountRunningTotal", triggerCountRunningTotal); + result.put("triggerCountSucTotal", triggerCountSucTotal); + result.put("triggerCountFailTotal", triggerCountFailTotal); + + return new ReturnT<>(result); + } + + + @Override + public ReturnT batchAdd(DataXBatchJsonBuildDto dto) throws IOException { + + String key = "system_please_choose"; + List rdTables = dto.getReaderTables(); + List wrTables = dto.getWriterTables(); + if (dto.getReaderDatasourceId() == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerDataSource")); + } + if (dto.getWriterDatasourceId() == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerDataSource")); + } + if (rdTables.size() != wrTables.size()) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("json_build_inconsistent_number_r_w_tables")); + } + + DataXJsonBuildDto jsonBuild = new DataXJsonBuildDto(); + + List rColumns; + List wColumns; + for (int i = 0; i < rdTables.size(); i++) { + rColumns = datasourceQueryService.getColumns(dto.getReaderDatasourceId(), rdTables.get(i)); + wColumns = datasourceQueryService.getColumns(dto.getWriterDatasourceId(), wrTables.get(i)); + + jsonBuild.setReaderDatasourceId(dto.getReaderDatasourceId()); + jsonBuild.setWriterDatasourceId(dto.getWriterDatasourceId()); + + jsonBuild.setReaderColumns(rColumns); + jsonBuild.setWriterColumns(wColumns); + + jsonBuild.setRdbmsReader(dto.getRdbmsReader()); + jsonBuild.setRdbmsWriter(dto.getRdbmsWriter()); + + List rdTable = new ArrayList<>(); + rdTable.add(rdTables.get(i)); + jsonBuild.setReaderTables(rdTable); + + List wdTable = new ArrayList<>(); + wdTable.add(wrTables.get(i)); + jsonBuild.setWriterTables(wdTable); + + String json = dataxJsonService.buildJobJson(jsonBuild); + + JobTemplate jobTemplate = jobTemplateMapper.loadById(dto.getTemplateId()); + JobInfo jobInfo = new JobInfo(); + BeanUtils.copyProperties(jobTemplate, jobInfo); + jobInfo.setJobJson(json); + jobInfo.setJobDesc(rdTables.get(i)); + jobInfo.setAddTime(new Date()); + jobInfo.setUpdateTime(new Date()); + jobInfo.setGlueUpdatetime(new Date()); + jobInfoMapper.save(jobInfo); + } + return ReturnT.SUCCESS; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobTemplateServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobTemplateServiceImpl.java new file mode 100644 index 0000000..e5279ab --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JobTemplateServiceImpl.java @@ -0,0 +1,239 @@ +package com.czsj.bigdata.service.impl; + + +import com.czsj.bigdata.core.cron.CronExpression; +import com.czsj.bigdata.core.route.ExecutorRouteStrategyEnum; +import com.czsj.bigdata.core.util.I18nUtil; +import com.czsj.bigdata.entity.JobGroup; +import com.czsj.bigdata.entity.JobInfo; +import com.czsj.bigdata.entity.JobTemplate; +import com.czsj.bigdata.mapper.*; +import com.czsj.bigdata.service.JobTemplateService; +import com.czsj.core.biz.model.ReturnT; +import com.czsj.core.enums.ExecutorBlockStrategyEnum; +import com.czsj.core.glue.GlueTypeEnum; +import com.czsj.core.util.Constants; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Service; + +import org.springframework.beans.factory.annotation.Autowired; +import java.text.MessageFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * core job action for xxl-job + * + * @author xuxueli 2016-5-28 15:30:33 + */ +@Service +public class JobTemplateServiceImpl implements JobTemplateService { + @Autowired + private JobGroupMapper jobGroupMapper; + @Autowired + private JobTemplateMapper jobTemplateMapper; + @Autowired + private JobLogMapper jobLogMapper; + @Autowired + private JobLogGlueMapper jobLogGlueMapper; + @Autowired + private JobInfoMapper jobInfoMapper; + + @Override + public Map pageList(int start, int length, int jobGroup, String jobDesc, String executorHandler, int userId, Integer[] projectIds) { + + // page list + List list = jobTemplateMapper.pageList(start, length, jobGroup, jobDesc, executorHandler, userId, projectIds); + int list_count = jobTemplateMapper.pageListCount(start, length, jobGroup, jobDesc, executorHandler, userId, projectIds); + + // package result + Map maps = new HashMap<>(); + maps.put("recordsTotal", list_count); // 总记录数 + maps.put("recordsFiltered", list_count); // 过滤后的总记录数 + maps.put("data", list); // 分页列表 + return maps; + } + + @Override + public ReturnT add(JobTemplate jobTemplate) { + // valid + JobGroup group = jobGroupMapper.load(jobTemplate.getJobGroup()); + if (group == null) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_jobgroup"))); + } + if (!CronExpression.isValidExpression(jobTemplate.getJobCron())) { + return new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid")); + } + if (jobTemplate.getJobDesc() == null || jobTemplate.getJobDesc().trim().length() == 0) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobdesc"))); + } + if (jobTemplate.getUserId() == 0) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_author"))); + } + if (ExecutorRouteStrategyEnum.match(jobTemplate.getExecutorRouteStrategy(), null) == null) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorRouteStrategy") + I18nUtil.getString("system_invalid"))); + } + if (ExecutorBlockStrategyEnum.match(jobTemplate.getExecutorBlockStrategy(), null) == null) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorBlockStrategy") + I18nUtil.getString("system_invalid"))); + } + if (GlueTypeEnum.match(jobTemplate.getGlueType()) == null) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_gluetype") + I18nUtil.getString("system_invalid"))); + } + if (GlueTypeEnum.BEAN == GlueTypeEnum.match(jobTemplate.getGlueType()) && (jobTemplate.getExecutorHandler() == null || jobTemplate.getExecutorHandler().trim().length() == 0)) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + "JobHandler")); + } + + // fix "\r" in shell + if (GlueTypeEnum.GLUE_SHELL == GlueTypeEnum.match(jobTemplate.getGlueType()) && jobTemplate.getGlueSource() != null) { + jobTemplate.setGlueSource(jobTemplate.getGlueSource().replaceAll("\r", "")); + } + + // ChildJobId valid + if (jobTemplate.getChildJobId() != null && jobTemplate.getChildJobId().trim().length() > 0) { + String[] childJobIds = jobTemplate.getChildJobId().split(","); + for (String childJobIdItem : childJobIds) { + if (StringUtils.isNotBlank(childJobIdItem) && isNumeric(childJobIdItem) && Integer.parseInt(childJobIdItem) > 0) { + JobInfo jobInfo = jobInfoMapper.loadById(Integer.parseInt(childJobIdItem)); + if (jobInfo == null) { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_not_found")), childJobIdItem)); + } + } else { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_invalid")), childJobIdItem)); + } + } + // join , avoid "xxx,," + String temp = Constants.STRING_BLANK; + for (String item : childJobIds) { + temp += item + Constants.SPLIT_COMMA; + } + temp = temp.substring(0, temp.length() - 1); + + jobTemplate.setChildJobId(temp); + } + + if (jobTemplate.getProjectId() == 0) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobproject"))); + } + + // add in db + jobTemplate.setAddTime(new Date()); + jobTemplate.setUpdateTime(new Date()); + jobTemplate.setGlueUpdatetime(new Date()); + jobTemplateMapper.save(jobTemplate); + if (jobTemplate.getId() < 1) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_add") + I18nUtil.getString("system_fail"))); + } + + return new ReturnT(String.valueOf(jobTemplate.getId())); + } + + private boolean isNumeric(String str) { + try { + int result = Integer.parseInt(str); + return true; + } catch (NumberFormatException e) { + return false; + } + } + + @Override + public ReturnT update(JobTemplate jobTemplate) { + + // valid + if (!CronExpression.isValidExpression(jobTemplate.getJobCron())) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid")); + } + if (jobTemplate.getJobDesc() == null || jobTemplate.getJobDesc().trim().length() == 0) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_jobdesc"))); + } + if (jobTemplate.getUserId() == 0) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_field_author"))); + } + if (ExecutorRouteStrategyEnum.match(jobTemplate.getExecutorRouteStrategy(), null) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorRouteStrategy") + I18nUtil.getString("system_invalid"))); + } + if (ExecutorBlockStrategyEnum.match(jobTemplate.getExecutorBlockStrategy(), null) == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_executorBlockStrategy") + I18nUtil.getString("system_invalid"))); + } + + // ChildJobId valid + if (jobTemplate.getChildJobId() != null && jobTemplate.getChildJobId().trim().length() > 0) { + String[] childJobIds = jobTemplate.getChildJobId().split(","); + for (String childJobIdItem : childJobIds) { + if (childJobIdItem != null && childJobIdItem.trim().length() > 0 && isNumeric(childJobIdItem)) { + JobTemplate childJobTemplate = jobTemplateMapper.loadById(Integer.parseInt(childJobIdItem)); + if (childJobTemplate == null) { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_not_found")), childJobIdItem)); + } + } else { + return new ReturnT(ReturnT.FAIL_CODE, + MessageFormat.format((I18nUtil.getString("jobinfo_field_childJobId") + "({0})" + I18nUtil.getString("system_invalid")), childJobIdItem)); + } + } + + // join , avoid "xxx,," + String temp = Constants.STRING_BLANK; + for (String item : childJobIds) { + temp += item + Constants.SPLIT_COMMA; + } + temp = temp.substring(0, temp.length() - 1); + + jobTemplate.setChildJobId(temp); + } + + // group valid + JobGroup jobGroup = jobGroupMapper.load(jobTemplate.getJobGroup()); + if (jobGroup == null) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_jobgroup") + I18nUtil.getString("system_invalid"))); + } + + // stage job info + JobTemplate exists_jobTemplate = jobTemplateMapper.loadById(jobTemplate.getId()); + if (exists_jobTemplate == null) { + return new ReturnT(ReturnT.FAIL_CODE, (I18nUtil.getString("jobinfo_field_id") + I18nUtil.getString("system_not_found"))); + } + + // next trigger time (5s后生效,避开预读周期) + long nextTriggerTime = exists_jobTemplate.getTriggerNextTime(); + + exists_jobTemplate.setJobGroup(jobTemplate.getJobGroup()); + exists_jobTemplate.setJobCron(jobTemplate.getJobCron()); + exists_jobTemplate.setJobDesc(jobTemplate.getJobDesc()); + exists_jobTemplate.setUserId(jobTemplate.getUserId()); + exists_jobTemplate.setAlarmEmail(jobTemplate.getAlarmEmail()); + exists_jobTemplate.setExecutorRouteStrategy(jobTemplate.getExecutorRouteStrategy()); + exists_jobTemplate.setExecutorHandler(jobTemplate.getExecutorHandler()); + exists_jobTemplate.setExecutorParam(jobTemplate.getExecutorParam()); + exists_jobTemplate.setExecutorBlockStrategy(jobTemplate.getExecutorBlockStrategy()); + exists_jobTemplate.setExecutorTimeout(jobTemplate.getExecutorTimeout()); + exists_jobTemplate.setExecutorFailRetryCount(jobTemplate.getExecutorFailRetryCount()); + exists_jobTemplate.setChildJobId(jobTemplate.getChildJobId()); + exists_jobTemplate.setTriggerNextTime(nextTriggerTime); + exists_jobTemplate.setJobJson(jobTemplate.getJobJson()); + exists_jobTemplate.setJvmParam(jobTemplate.getJvmParam()); + exists_jobTemplate.setProjectId(jobTemplate.getProjectId()); + exists_jobTemplate.setUpdateTime(new Date()); + jobTemplateMapper.update(exists_jobTemplate); + + return ReturnT.SUCCESS; + } + + @Override + public ReturnT remove(int id) { + JobTemplate xxlJobTemplate = jobTemplateMapper.loadById(id); + if (xxlJobTemplate == null) { + return ReturnT.SUCCESS; + } + + jobTemplateMapper.delete(id); + jobLogMapper.delete(id); + jobLogGlueMapper.deleteByJobId(id); + return ReturnT.SUCCESS; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JsonServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JsonServiceImpl.java new file mode 100644 index 0000000..e6df159 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/JsonServiceImpl.java @@ -0,0 +1,36 @@ +package com.czsj.bigdata.service.impl; + +import com.alibaba.fastjson.JSON; +import com.czsj.bigdata.dto.JsonBuildDto; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.service.JsonService; +import com.czsj.bigdata.service.JobDatasourceService; +import com.czsj.bigdata.tool.flinkx.DataxJsonHelper; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * + * @Author: czsj + * @Date: 2022/9/16 11:14 + * @Description: JSON构建实现类 + **/ +@Service +public class JsonServiceImpl implements JsonService { + + @Autowired + private JobDatasourceService jobJdbcDatasourceService; + + @Override + public String buildJobDataxJson(JsonBuildDto dataXJsonBuildDto) { + DataxJsonHelper dataxJsonHelper = new DataxJsonHelper(); + // reader + JobDatasource readerDatasource = jobJdbcDatasourceService.getById(dataXJsonBuildDto.getReaderDatasourceId()); + dataxJsonHelper.initReader(dataXJsonBuildDto, readerDatasource); + // writer + JobDatasource writerDatasource = jobJdbcDatasourceService.getById(dataXJsonBuildDto.getWriterDatasourceId()); + dataxJsonHelper.initWriter(dataXJsonBuildDto, writerDatasource); + + return JSON.toJSONString(dataxJsonHelper.buildJob()); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/RpcServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/RpcServiceImpl.java new file mode 100644 index 0000000..baf7333 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/RpcServiceImpl.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.service.impl; + + +import com.czsj.bigdata.service.RpcService; + + + +public class RpcServiceImpl implements RpcService { + @Override + public String getMonitor() { + return "json"; + } + + @Override + public String runjob(String jonifo) { + return ""; + } + + @Override + public String getLog(String executorAddress) { + return ""; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/service/impl/SysServersServiceImpl.java b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/SysServersServiceImpl.java new file mode 100644 index 0000000..f84ffd3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/service/impl/SysServersServiceImpl.java @@ -0,0 +1,125 @@ +package com.czsj.bigdata.service.impl; + +import java.net.InetSocketAddress; +import java.util.List; + +import com.alibaba.fastjson.JSONObject; +import com.czsj.bigdata.entity.SysServers; +import com.czsj.bigdata.mapper.SysServersMapper; +import com.czsj.bigdata.service.ISysServersService; +import com.czsj.bigdata.service.RpcService; +import com.czsj.common.annotation.Excel; +import com.czsj.common.config.RPCClient; +import com.czsj.common.utils.DateUtils; +import com.czsj.common.utils.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + + +/** + * 执行器管理Service业务层处理 + * + * @author czsj + * @date 2022-04-28 + */ +@Service +public class SysServersServiceImpl implements ISysServersService +{ + @Autowired + private SysServersMapper sysServersMapper; + + /** + * 查询执行器管理 + * + * @param id 执行器管理主键 + * @return 执行器管理 + */ + @Override + public SysServers selectSysServersById(Long id) + { + return sysServersMapper.selectSysServersById(id); + } + + /** + * 查询执行器管理列表 + * + * @param sysServers 执行器管理 + * @return 执行器管理 + */ + @Override + public List selectSysServersList(SysServers sysServers) + { + return sysServersMapper.selectSysServersList(sysServers); + } + + /** + * 新增执行器管理 + * + * @param sysServers 执行器管理 + * @return 结果 + */ + @Override + public int insertSysServers(SysServers sysServers) + { + SysServers sysServer = new SysServers(); + sysServers.setCreateTime(DateUtils.getNowDate()); + if(!StringUtils.isEmpty(sysServers.getServeraddress())){ + if(!"localhost".equals(sysServers.getServeraddress())) { + RpcService service = RPCClient.getRemoteProxyObj(RpcService.class, new InetSocketAddress(sysServers.getServeraddress(), 8088)); + if (!StringUtils.isEmpty(service.getMonitor())) { + sysServer = JSONObject.parseObject(service.getMonitor(), SysServers.class); + } else { + throw new RuntimeException("获取服务器状态失败,请检查执行器服务或端口号是否开启!"); + } + } + }else{ + throw new RuntimeException("服务IP不能为空!"); + } + sysServer.setGroupcode(sysServers.getGroupname()); + sysServer.setGroupname(sysServers.getGroupname()); + sysServer.setServeraddress(sysServers.getServeraddress()); + sysServer.setCreateTime(sysServers.getCreateTime()); + sysServer.setCreateBy(sysServers.getCreateBy()); + return sysServersMapper.insertSysServers(sysServer); + } + + /** + * 修改执行器管理 + * + * @param sysServers 执行器管理 + * @return 结果 + */ + @Override + public int updateSysServers(SysServers sysServers) + { + if("localhost".equals(sysServers.getServeraddress())){ + throw new RuntimeException("本机地址不能删除"); + } + return sysServersMapper.updateSysServers(sysServers); + } + + /** + * 批量删除执行器管理 + * + * @param ids 需要删除的执行器管理主键 + * @return 结果 + */ + @Override + public int deleteSysServersByIds(Long[] ids) + { + + return sysServersMapper.deleteSysServersByIds(ids); + } + + /** + * 删除执行器管理信息 + * + * @param id 执行器管理主键 + * @return 结果 + */ + @Override + public int deleteSysServersById(Long id) + { + return sysServersMapper.deleteSysServersById(id); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/database/ColumnInfo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/database/ColumnInfo.java new file mode 100644 index 0000000..40ba3fc --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/database/ColumnInfo.java @@ -0,0 +1,35 @@ +package com.czsj.bigdata.tool.database; + +import lombok.Data; + +/** + * 字段信息 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/30 + */ +@Data +public class ColumnInfo { + /** + * 字段名称 + */ + private String name; + /** + * 注释 + */ + private String comment; + /** + * 字段类型 + */ + private String type; + + /** + * 是否是主键列 + */ + private Boolean ifPrimaryKey; + /** + * 是否可为null 0 不可为空 1 可以为null + */ + private int isnull; +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/database/DasColumn.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/database/DasColumn.java new file mode 100644 index 0000000..86527d0 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/database/DasColumn.java @@ -0,0 +1,29 @@ +package com.czsj.bigdata.tool.database; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 原始jdbc字段对象 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName DasColumn + * @Version 1.0 + * @since 2019/7/17 16:29 + */ +@Data +@AllArgsConstructor +@NoArgsConstructor +public class DasColumn { + + private String columnName; + + private String columnTypeName; + + private String columnClassName; + + private String columnComment; + private int isNull; + private boolean isprimaryKey; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/database/TableInfo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/database/TableInfo.java new file mode 100644 index 0000000..5658258 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/database/TableInfo.java @@ -0,0 +1,29 @@ +package com.czsj.bigdata.tool.database; + +import lombok.Data; + +import java.util.List; + +/** + * 表信息 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/30 + */ +@Data +public class TableInfo { + /** + * 表名 + */ + private String name; + + /** + * 注释 + */ + private String comment; + /** + * 所有列 + */ + private List columns; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/BaseDataxPlugin.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/BaseDataxPlugin.java new file mode 100644 index 0000000..faf4a80 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/BaseDataxPlugin.java @@ -0,0 +1,18 @@ +package com.czsj.bigdata.tool.datax; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 抽象实现类 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseDataxPlugin + * @Version 1.0 + * @since 2019/7/31 9:45 + */ +public abstract class BaseDataxPlugin implements DataxPluginInterface { + + protected Logger logger = LoggerFactory.getLogger(this.getClass()); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxJsonHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxJsonHelper.java new file mode 100644 index 0000000..f2aa13e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxJsonHelper.java @@ -0,0 +1,387 @@ +package com.czsj.bigdata.tool.datax; + + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.czsj.bigdata.dto.*; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.datax.reader.*; +import com.czsj.bigdata.tool.datax.writer.*; +import com.czsj.bigdata.tool.pojo.DataxHbasePojo; +import com.czsj.bigdata.tool.pojo.DataxHivePojo; +import com.czsj.bigdata.tool.pojo.DataxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.DataxRdbmsPojo; +import com.czsj.bigdata.util.JdbcConstants; +import com.czsj.core.util.Constants; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.util.CollectionUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.czsj.bigdata.util.JdbcConstants.*; + + +/** + * 构建 com.wugui.datax json的工具类 + * + * @author jingwk + * @ClassName DataxJsonHelper + * @Version 2.1.1 + * @since 2022/03/14 08:24 + */ +@Data +public class DataxJsonHelper implements DataxJsonInterface { + + /** + * 读取的表,根据datax示例,支持多个表(先不考虑,后面再去实现, 这里先用list保存吧) + *

+ * 目的表的表名称。支持写入一个或者多个表。当配置为多张表时,必须确保所有表结构保持一致 + */ + private List readerTables; + /** + * 读取的字段列表 + */ + private List readerColumns; + /** + * reader jdbc 数据源 + */ + private JobDatasource readerDatasource; + /** + * writer jdbc 数据源 + */ + private JobDatasource writerDatasource; + /** + * 写入的表 + */ + private List writerTables; + /** + * 写入的字段列表 + */ + private List writerColumns; + + private Map buildReader; + + private Map buildWriter; + + private BaseDataxPlugin readerPlugin; + + private BaseDataxPlugin writerPlugin; + + private HiveReaderDto hiveReaderDto; + + private HiveWriterDto hiveWriterDto; + + private HbaseReaderDto hbaseReaderDto; + + private HbaseWriterDto hbaseWriterDto; + + private RdbmsReaderDto rdbmsReaderDto; + + private RdbmsWriterDto rdbmsWriterDto; + + private MongoDBReaderDto mongoDBReaderDto; + + private MongoDBWriterDto mongoDBWriterDto; + + + //用于保存额外参数 + private Map extraParams = Maps.newHashMap(); + + public void initReader(DataXJsonBuildDto dataxJsonDto, JobDatasource readerDatasource) { + + this.readerDatasource = readerDatasource; + this.readerTables = dataxJsonDto.getReaderTables(); + this.readerColumns = dataxJsonDto.getReaderColumns(); + this.hiveReaderDto = dataxJsonDto.getHiveReader(); + this.rdbmsReaderDto = dataxJsonDto.getRdbmsReader(); + this.hbaseReaderDto = dataxJsonDto.getHbaseReader(); + // reader 插件 + String datasource = readerDatasource.getDatasource(); + + this.readerColumns = convertKeywordsColumns(datasource, this.readerColumns); + if (MYSQL.equals(datasource)) { + readerPlugin = new MysqlReader(); + buildReader = buildReader(); + } else if (ORACLE.equals(datasource)) { + readerPlugin = new OracleReader(); + buildReader = buildReader(); + } else if (SQL_SERVER.equals(datasource)) { + readerPlugin = new SqlServerReader(); + buildReader = buildReader(); + } else if (POSTGRESQL.equals(datasource)) { + readerPlugin = new PostgresqlReader(); + buildReader = buildReader(); + } else if (CLICKHOUSE.equals(datasource)) { + readerPlugin = new ClickHouseReader(); + buildReader = buildReader(); + } else if (HIVE.equals(datasource)) { + readerPlugin = new HiveReader(); + buildReader = buildHiveReader(); + } else if (HBASE.equals(datasource)) { + readerPlugin = new HBaseReader(); + buildReader = buildHBaseReader(); + } else if (MONGODB.equals(datasource)) { + readerPlugin = new MongoDBReader(); + buildReader = buildMongoDBReader(); + } + } + + public void initWriter(DataXJsonBuildDto dataxJsonDto, JobDatasource readerDatasource) { + this.writerDatasource = readerDatasource; + this.writerTables = dataxJsonDto.getWriterTables(); + this.writerColumns = dataxJsonDto.getWriterColumns(); + this.hiveWriterDto = dataxJsonDto.getHiveWriter(); + this.rdbmsWriterDto = dataxJsonDto.getRdbmsWriter(); + this.hbaseWriterDto = dataxJsonDto.getHbaseWriter(); + this.mongoDBWriterDto = dataxJsonDto.getMongoDBWriter(); + // writer + String datasource = readerDatasource.getDatasource(); + this.writerColumns = convertKeywordsColumns(datasource, this.writerColumns); + if (MYSQL.equals(datasource)) { + writerPlugin = new MysqlWriter(); + buildWriter = this.buildWriter(); + } else if (ORACLE.equals(datasource)) { + writerPlugin = new OraclelWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.SQL_SERVER.equals(datasource)) { + writerPlugin = new SqlServerlWriter(); + buildWriter = this.buildWriter(); + } else if (POSTGRESQL.equals(datasource)) { + writerPlugin = new PostgresqllWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.CLICKHOUSE.equals(datasource)) { + writerPlugin = new ClickHouseWriter(); + buildWriter = buildWriter(); + } else if (JdbcConstants.HIVE.equals(datasource)) { + writerPlugin = new HiveWriter(); + buildWriter = this.buildHiveWriter(); + } else if (JdbcConstants.HBASE.equals(datasource)) { + writerPlugin = new HBaseWriter(); + buildWriter = this.buildHBaseWriter(); + } else if (JdbcConstants.MONGODB.equals(datasource)) { + writerPlugin = new MongoDBWriter(); + buildWriter = this.buildMongoDBWriter(); + } + } + + private List convertKeywordsColumns(String datasource, List columns) { + if (columns == null) { + return null; + } + + List toColumns = new ArrayList<>(); + columns.forEach(s -> { + toColumns.add(doConvertKeywordsColumn(datasource, s)); + }); + return toColumns; + } + + private String doConvertKeywordsColumn(String dbType, String column) { + if (column == null) { + return null; + } + + column = column.trim(); + column = column.replace("[", ""); + column = column.replace("]", ""); + column = column.replace("`", ""); + column = column.replace("\"", ""); + column = column.replace("'", ""); + + switch (dbType) { + case MYSQL: + return String.format("`%s`", column); + case SQL_SERVER: + return String.format("[%s]", column); + case POSTGRESQL: + case ORACLE: + return String.format("\"%s\"", column); + default: + return column; + } + } + + @Override + public Map buildJob() { + Map res = Maps.newLinkedHashMap(); + Map jobMap = Maps.newLinkedHashMap(); + jobMap.put("setting", buildSetting()); + jobMap.put("content", ImmutableList.of(buildContent())); + res.put("job", jobMap); + return res; + } + + @Override + public Map buildSetting() { + Map res = Maps.newLinkedHashMap(); + Map speedMap = Maps.newLinkedHashMap(); + Map errorLimitMap = Maps.newLinkedHashMap(); + speedMap.putAll(ImmutableMap.of("channel", 3, "byte", 1048576)); + errorLimitMap.putAll(ImmutableMap.of("record", 0, "percentage", 0.02)); + res.put("speed", speedMap); + res.put("errorLimit", errorLimitMap); + return res; + } + + @Override + public Map buildContent() { + Map res = Maps.newLinkedHashMap(); + res.put("reader", this.buildReader); + res.put("writer", this.buildWriter); + return res; + } + + @Override + public Map buildReader() { + DataxRdbmsPojo dataxPluginPojo = new DataxRdbmsPojo(); + dataxPluginPojo.setJobDatasource(readerDatasource); + dataxPluginPojo.setTables(readerTables); + dataxPluginPojo.setRdbmsColumns(readerColumns); + dataxPluginPojo.setSplitPk(rdbmsReaderDto.getReaderSplitPk()); + if (StringUtils.isNotBlank(rdbmsReaderDto.getQuerySql())) { + dataxPluginPojo.setQuerySql(rdbmsReaderDto.getQuerySql()); + } + //where + if (StringUtils.isNotBlank(rdbmsReaderDto.getWhereParams())) { + dataxPluginPojo.setWhereParam(rdbmsReaderDto.getWhereParams()); + } + return readerPlugin.build(dataxPluginPojo); + } + + @Override + public Map buildHiveReader() { + DataxHivePojo dataxHivePojo = new DataxHivePojo(); + dataxHivePojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + readerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("index", c.split(Constants.SPLIT_SCOLON)[0]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + dataxHivePojo.setColumns(columns); + dataxHivePojo.setReaderDefaultFS(hiveReaderDto.getReaderDefaultFS()); + dataxHivePojo.setReaderFieldDelimiter(hiveReaderDto.getReaderFieldDelimiter()); + dataxHivePojo.setReaderFileType(hiveReaderDto.getReaderFileType()); + dataxHivePojo.setReaderPath(hiveReaderDto.getReaderPath()); + dataxHivePojo.setSkipHeader(hiveReaderDto.getReaderSkipHeader()); + return readerPlugin.buildHive(dataxHivePojo); + } + + @Override + public Map buildHBaseReader() { + DataxHbasePojo dataxHbasePojo = new DataxHbasePojo(); + dataxHbasePojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + for (String readerColumn : readerColumns) { + Map column = Maps.newLinkedHashMap(); + column.put("name", readerColumn); + column.put("type", "string"); + columns.add(column); + } + dataxHbasePojo.setColumns(columns); + dataxHbasePojo.setReaderHbaseConfig(readerDatasource.getZkAdress()); + String readerTable=!CollectionUtils.isEmpty(readerTables)?readerTables.get(0):Constants.STRING_BLANK; + dataxHbasePojo.setReaderTable(readerTable); + dataxHbasePojo.setReaderMode(hbaseReaderDto.getReaderMode()); + dataxHbasePojo.setReaderRange(hbaseReaderDto.getReaderRange()); + return readerPlugin.buildHbase(dataxHbasePojo); + } + + @Override + public Map buildMongoDBReader() { + DataxMongoDBPojo dataxMongoDBPojo = new DataxMongoDBPojo(); + dataxMongoDBPojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + buildColumns(readerColumns, columns); + dataxMongoDBPojo.setColumns(columns); + dataxMongoDBPojo.setAddress(readerDatasource.getJdbcUrl()); + dataxMongoDBPojo.setDbName(readerDatasource.getDatabaseName()); + dataxMongoDBPojo.setReaderTable(readerTables.get(0)); + return readerPlugin.buildMongoDB(dataxMongoDBPojo); + } + + + @Override + public Map buildWriter() { + DataxRdbmsPojo dataxPluginPojo = new DataxRdbmsPojo(); + dataxPluginPojo.setJobDatasource(writerDatasource); + dataxPluginPojo.setTables(writerTables); + dataxPluginPojo.setRdbmsColumns(writerColumns); + dataxPluginPojo.setPreSql(rdbmsWriterDto.getPreSql()); + dataxPluginPojo.setPostSql(rdbmsWriterDto.getPostSql()); + return writerPlugin.build(dataxPluginPojo); + } + + @Override + public Map buildHiveWriter() { + DataxHivePojo dataxHivePojo = new DataxHivePojo(); + dataxHivePojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + writerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[1]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + dataxHivePojo.setColumns(columns); + dataxHivePojo.setWriterDefaultFS(hiveWriterDto.getWriterDefaultFS()); + dataxHivePojo.setWriteFieldDelimiter(hiveWriterDto.getWriteFieldDelimiter()); + dataxHivePojo.setWriterFileType(hiveWriterDto.getWriterFileType()); + dataxHivePojo.setWriterPath(hiveWriterDto.getWriterPath()); + dataxHivePojo.setWriteMode(hiveWriterDto.getWriteMode()); + dataxHivePojo.setWriterFileName(hiveWriterDto.getWriterFileName()); + return writerPlugin.buildHive(dataxHivePojo); + } + + @Override + public Map buildHBaseWriter() { + DataxHbasePojo dataxHbasePojo = new DataxHbasePojo(); + dataxHbasePojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + for (int i = 0; i < writerColumns.size(); i++) { + Map column = Maps.newLinkedHashMap(); + column.put("index", i + 1); + column.put("name", writerColumns.get(i)); + column.put("type", "string"); + columns.add(column); + } + dataxHbasePojo.setColumns(columns); + dataxHbasePojo.setWriterHbaseConfig(writerDatasource.getZkAdress()); + String writerTable=!CollectionUtils.isEmpty(writerTables)?writerTables.get(0):Constants.STRING_BLANK; + dataxHbasePojo.setWriterTable(writerTable); + dataxHbasePojo.setWriterVersionColumn(hbaseWriterDto.getWriterVersionColumn()); + dataxHbasePojo.setWriterRowkeyColumn(hbaseWriterDto.getWriterRowkeyColumn()); + dataxHbasePojo.setWriterMode(hbaseWriterDto.getWriterMode()); + return writerPlugin.buildHbase(dataxHbasePojo); + } + + + @Override + public Map buildMongoDBWriter() { + DataxMongoDBPojo dataxMongoDBPojo = new DataxMongoDBPojo(); + dataxMongoDBPojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + buildColumns(writerColumns, columns); + dataxMongoDBPojo.setColumns(columns); + dataxMongoDBPojo.setAddress(writerDatasource.getJdbcUrl()); + dataxMongoDBPojo.setDbName(writerDatasource.getDatabaseName()); + dataxMongoDBPojo.setWriterTable(readerTables.get(0)); + dataxMongoDBPojo.setUpsertInfo(mongoDBWriterDto.getUpsertInfo()); + return writerPlugin.buildMongoDB(dataxMongoDBPojo); + } + + private void buildColumns(List columns, List> returnColumns) { + columns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[0]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[1]); + returnColumns.add(column); + }); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxJsonInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxJsonInterface.java new file mode 100644 index 0000000..d2a02ec --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxJsonInterface.java @@ -0,0 +1,36 @@ +package com.czsj.bigdata.tool.datax; + +import java.util.Map; + +/** + * 构建 com.wugui.datax json的基础接口 + * + * @author jingwk + * @ClassName DataxJsonHelper + * @Version 2.1.1 + * @since 2022/03/14 12:24 + */ +public interface DataxJsonInterface { + + Map buildJob(); + + Map buildSetting(); + + Map buildContent(); + + Map buildReader(); + + Map buildHiveReader(); + + Map buildHiveWriter(); + + Map buildHBaseReader(); + + Map buildHBaseWriter(); + + Map buildMongoDBReader(); + + Map buildMongoDBWriter(); + + Map buildWriter(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxPluginInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxPluginInterface.java new file mode 100644 index 0000000..23d948f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/DataxPluginInterface.java @@ -0,0 +1,61 @@ +package com.czsj.bigdata.tool.datax; + +import com.czsj.bigdata.tool.pojo.DataxHbasePojo; +import com.czsj.bigdata.tool.pojo.DataxHivePojo; +import com.czsj.bigdata.tool.pojo.DataxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.DataxRdbmsPojo; + +import java.util.Map; + +/** + * 插件基础接口 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName DataxPluginInterface + * @Version 1.0 + * @since 2019/7/30 22:59 + */ +public interface DataxPluginInterface { + /** + * 获取reader插件名称 + * + * @return + */ + String getName(); + + /** + * 构建 + * + * @return dataxPluginPojo + */ + Map build(DataxRdbmsPojo dataxPluginPojo); + + + /** + * hive json构建 + * @param dataxHivePojo + * @return + */ + Map buildHive(DataxHivePojo dataxHivePojo); + + /** + * hbase json构建 + * @param dataxHbasePojo + * @return + */ + Map buildHbase(DataxHbasePojo dataxHbasePojo); + + /** + * mongodb json构建 + * @param dataxMongoDBPojo + * @return + */ + Map buildMongoDB(DataxMongoDBPojo dataxMongoDBPojo); + + /** + * 获取示例 + * + * @return + */ + Map sample(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/BaseReaderPlugin.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/BaseReaderPlugin.java new file mode 100644 index 0000000..abd224e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/BaseReaderPlugin.java @@ -0,0 +1,72 @@ +package com.czsj.bigdata.tool.datax.reader; + +import cn.hutool.core.util.StrUtil; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.datax.BaseDataxPlugin; +import com.czsj.bigdata.tool.pojo.DataxHbasePojo; +import com.czsj.bigdata.tool.pojo.DataxHivePojo; +import com.czsj.bigdata.tool.pojo.DataxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.DataxRdbmsPojo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +/** + * Reader + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseReaderPlugin + * @Version 1.0 + * @since 2019/8/2 16:27 + */ +public abstract class BaseReaderPlugin extends BaseDataxPlugin { + + + @Override + public Map build(DataxRdbmsPojo plugin) { + //构建 + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + Map connectionObj = Maps.newLinkedHashMap(); + + JobDatasource jobDatasource = plugin.getJobDatasource(); + parameterObj.put("username", jobDatasource.getJdbcUsername()); + parameterObj.put("password", jobDatasource.getJdbcPassword()); + + //判断是否是 querySql + if (StrUtil.isNotBlank(plugin.getQuerySql())) { + connectionObj.put("querySql", ImmutableList.of(plugin.getQuerySql())); + } else { + parameterObj.put("column", plugin.getRdbmsColumns()); + //判断是否有where + if (StringUtils.isNotBlank(plugin.getWhereParam())) { + parameterObj.put("where", plugin.getWhereParam()); + } + connectionObj.put("table", plugin.getTables()); + } + parameterObj.put("splitPk",plugin.getSplitPk()); + connectionObj.put("jdbcUrl", ImmutableList.of(jobDatasource.getJdbcUrl())); + + parameterObj.put("connection", ImmutableList.of(connectionObj)); + + readerObj.put("parameter", parameterObj); + + return readerObj; + } + + @Override + public Map buildHive(DataxHivePojo dataxHivePojo) { + return null; + } + + @Override + public Map buildHbase(DataxHbasePojo dataxHbasePojo) { return null; } + + @Override + public Map buildMongoDB(DataxMongoDBPojo dataxMongoDBPojo) { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/ClickHouseReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/ClickHouseReader.java new file mode 100644 index 0000000..44471bd --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/ClickHouseReader.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.tool.datax.reader; + +import java.util.Map; + +public class ClickHouseReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "clickhousereader"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/DataxReaderInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/DataxReaderInterface.java new file mode 100644 index 0000000..42335b5 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/DataxReaderInterface.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.tool.datax.reader; + + +import com.czsj.bigdata.tool.datax.DataxPluginInterface; + +/** + * 用于构建reader的接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/30 + */ +public interface DataxReaderInterface extends DataxPluginInterface { + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/HBaseReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/HBaseReader.java new file mode 100644 index 0000000..ba92ca3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/HBaseReader.java @@ -0,0 +1,38 @@ +package com.czsj.bigdata.tool.datax.reader; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.DataxHbasePojo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +public class HBaseReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "hbase11xreader"; + } + + @Override + public Map sample() { + return null; + } + + public Map buildHbase(DataxHbasePojo plugin) { + //构建 + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + Map confige = Maps.newLinkedHashMap(); + confige.put("hbase.zookeeper.quorum",plugin.getReaderHbaseConfig()); + parameterObj.put("hbaseConfig", confige); + parameterObj.put("table", plugin.getReaderTable()); + parameterObj.put("mode", plugin.getReaderMode()); + parameterObj.put("column", plugin.getColumns()); + if(StringUtils.isNotBlank(plugin.getReaderRange().getStartRowkey()) && StringUtils.isNotBlank(plugin.getReaderRange().getEndRowkey())){ + parameterObj.put("range", plugin.getReaderRange()); + } + parameterObj.put("maxVersion", plugin.getReaderMaxVersion()); + readerObj.put("parameter", parameterObj); + return readerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/HiveReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/HiveReader.java new file mode 100644 index 0000000..0701806 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/HiveReader.java @@ -0,0 +1,42 @@ +package com.czsj.bigdata.tool.datax.reader; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.DataxHivePojo; + +import java.util.Map; + +/** + * hive reader 构建类 + * + * @author jingwk + * @version 2.0 + * @since 2022/01/05 + */ +public class HiveReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "hdfsreader"; + } + + @Override + public Map sample() { + return null; + } + + + @Override + public Map buildHive(DataxHivePojo plugin) { + //构建 + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + parameterObj.put("path", plugin.getReaderPath()); + parameterObj.put("defaultFS", plugin.getReaderDefaultFS()); + parameterObj.put("fileType", plugin.getReaderFileType()); + parameterObj.put("fieldDelimiter", plugin.getReaderFieldDelimiter()); + parameterObj.put("skipHeader", plugin.getSkipHeader()); + parameterObj.put("column", plugin.getColumns()); + readerObj.put("parameter", parameterObj); + return readerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/MongoDBReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/MongoDBReader.java new file mode 100644 index 0000000..cc5ffa6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/MongoDBReader.java @@ -0,0 +1,43 @@ +package com.czsj.bigdata.tool.datax.reader; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.pojo.DataxMongoDBPojo; +import com.czsj.core.util.Constants; + +import java.util.Map; + +public class MongoDBReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "mongodbreader"; + } + + @Override + public Map sample() { + return null; + } + + public Map buildMongoDB(DataxMongoDBPojo plugin) { + //构建 + JobDatasource dataSource = plugin.getJdbcDatasource(); + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + String[] addressList = null; + String str = dataSource.getJdbcUrl().replace(Constants.MONGO_URL_PREFIX, Constants.STRING_BLANK); + if (str.contains(Constants.SPLIT_AT) && str.contains(Constants.SPLIT_DIVIDE)) { + addressList = str.substring(str.indexOf(Constants.SPLIT_AT) + 1, str.indexOf(Constants.SPLIT_DIVIDE)).split(Constants.SPLIT_COMMA); + } else if (str.contains(Constants.SPLIT_DIVIDE)) { + addressList = str.substring(0, str.indexOf(Constants.SPLIT_DIVIDE)).split(Constants.SPLIT_COMMA); + } + parameterObj.put("address", addressList); + parameterObj.put("userName", dataSource.getJdbcUsername() == null ? Constants.STRING_BLANK : dataSource.getJdbcUsername()); + parameterObj.put("userPassword", dataSource.getJdbcPassword() == null ? Constants.STRING_BLANK : dataSource.getJdbcPassword()); + parameterObj.put("dbName", dataSource.getDatabaseName()); + parameterObj.put("collectionName", plugin.getReaderTable()); + parameterObj.put("column", plugin.getColumns()); + readerObj.put("parameter", parameterObj); + return readerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/MysqlReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/MysqlReader.java new file mode 100644 index 0000000..4693705 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/MysqlReader.java @@ -0,0 +1,25 @@ +package com.czsj.bigdata.tool.datax.reader; + + +import java.util.Map; + +/** + * mysql reader 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MysqlReader + * @Version 1.0 + * @since 2019/7/30 23:07 + */ +public class MysqlReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "mysqlreader"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/OracleReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/OracleReader.java new file mode 100644 index 0000000..f745b4d --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/OracleReader.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.datax.reader; + +import java.util.Map; + +/** + * oracle reader 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class OracleReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "oraclereader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/PostgresqlReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/PostgresqlReader.java new file mode 100644 index 0000000..e2dbec2 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/PostgresqlReader.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.datax.reader; + +import java.util.Map; + +/** + * postgresql 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class PostgresqlReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "postgresqlreader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/SqlServerReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/SqlServerReader.java new file mode 100644 index 0000000..778e72a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/reader/SqlServerReader.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.datax.reader; + +import java.util.Map; + +/** + * sqlserver reader 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class SqlServerReader extends BaseReaderPlugin implements DataxReaderInterface { + @Override + public String getName() { + return "sqlserverreader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/BaseWriterPlugin.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/BaseWriterPlugin.java new file mode 100644 index 0000000..949d1e3 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/BaseWriterPlugin.java @@ -0,0 +1,77 @@ +package com.czsj.bigdata.tool.datax.writer; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.datax.BaseDataxPlugin; +import com.czsj.bigdata.tool.pojo.DataxHbasePojo; +import com.czsj.bigdata.tool.pojo.DataxHivePojo; +import com.czsj.bigdata.tool.pojo.DataxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.DataxRdbmsPojo; +import com.czsj.core.util.Constants; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * datax writer base + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseWriterPlugin + * @Version 1.0 + * @since 2019/8/2 16:28 + */ +public abstract class BaseWriterPlugin extends BaseDataxPlugin { + @Override + public Map build(DataxRdbmsPojo plugin) { + Map writerObj = Maps.newLinkedHashMap(); + writerObj.put("name", getName()); + + Map parameterObj = Maps.newLinkedHashMap(); +// parameterObj.put("writeMode", "insert"); + JobDatasource jobDatasource = plugin.getJobDatasource(); + parameterObj.put("username", jobDatasource.getJdbcUsername()); + parameterObj.put("password", jobDatasource.getJdbcPassword()); + parameterObj.put("column", plugin.getRdbmsColumns()); + parameterObj.put("preSql", splitSql(plugin.getPreSql())); + parameterObj.put("postSql", splitSql(plugin.getPostSql())); + + Map connectionObj = Maps.newLinkedHashMap(); + connectionObj.put("table", plugin.getTables()); + connectionObj.put("jdbcUrl", jobDatasource.getJdbcUrl()); + + parameterObj.put("connection", ImmutableList.of(connectionObj)); + writerObj.put("parameter", parameterObj); + + return writerObj; + } + + private String[] splitSql(String sql) { + String[] sqlArr = null; + if (StringUtils.isNotBlank(sql)) { + Pattern p = Pattern.compile("\r\n|\r|\n|\n\r"); + Matcher m = p.matcher(sql); + String sqlStr = m.replaceAll(Constants.STRING_BLANK); + sqlArr = sqlStr.split(Constants.SPLIT_COLON); + } + return sqlArr; + } + + @Override + public Map buildHive(DataxHivePojo dataxHivePojo) { + return null; + } + + + @Override + public Map buildHbase(DataxHbasePojo dataxHbasePojo) { + return null; + } + + @Override + public Map buildMongoDB(DataxMongoDBPojo plugin) { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/ClickHouseWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/ClickHouseWriter.java new file mode 100644 index 0000000..5d78369 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/ClickHouseWriter.java @@ -0,0 +1,15 @@ +package com.czsj.bigdata.tool.datax.writer; + +import java.util.Map; + +public class ClickHouseWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "clickhousewriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/DataxWriterInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/DataxWriterInterface.java new file mode 100644 index 0000000..677f021 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/DataxWriterInterface.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.tool.datax.writer; + + +import com.czsj.bigdata.tool.datax.DataxPluginInterface; + +/** + * 用于构建writer的接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/30 + */ +public interface DataxWriterInterface extends DataxPluginInterface { + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/HBaseWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/HBaseWriter.java new file mode 100644 index 0000000..0d14166 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/HBaseWriter.java @@ -0,0 +1,39 @@ +package com.czsj.bigdata.tool.datax.writer; + +import com.alibaba.fastjson.JSON; +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.DataxHbasePojo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +public class HBaseWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "hbase11xwriter"; + } + + @Override + public Map sample() { + return null; + } + + public Map buildHbase(DataxHbasePojo plugin) { + //构建 + Map writerObj = Maps.newLinkedHashMap(); + writerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + Map confige = Maps.newLinkedHashMap(); + confige.put("hbase.zookeeper.quorum", plugin.getWriterHbaseConfig()); + parameterObj.put("hbaseConfig", confige); + parameterObj.put("table", plugin.getWriterTable()); + parameterObj.put("mode", plugin.getWriterMode()); + parameterObj.put("column", plugin.getColumns()); + parameterObj.put("rowkeyColumn", JSON.parseArray(plugin.getWriterRowkeyColumn())); + if (StringUtils.isNotBlank(plugin.getWriterVersionColumn().getValue())) { + parameterObj.put("versionColumn", plugin.getWriterVersionColumn()); + } + writerObj.put("parameter", parameterObj); + return writerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/HiveWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/HiveWriter.java new file mode 100644 index 0000000..aa7d47d --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/HiveWriter.java @@ -0,0 +1,43 @@ +package com.czsj.bigdata.tool.datax.writer; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.DataxHivePojo; + +import java.util.Map; + +/** + * hive writer构建类 + * + * @author jingwk + * @version 2.0 + * @since 2022/01/05 + */ +public class HiveWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "hdfswriter"; + } + + + @Override + public Map sample() { + return null; + } + + @Override + public Map buildHive(DataxHivePojo plugin) { + Map writerObj = Maps.newLinkedHashMap(); + writerObj.put("name", getName()); + + Map parameterObj = Maps.newLinkedHashMap(); + parameterObj.put("defaultFS", plugin.getWriterDefaultFS()); + parameterObj.put("fileType", plugin.getWriterFileType()); + parameterObj.put("path", plugin.getWriterPath()); + parameterObj.put("fileName", plugin.getWriterFileName()); + parameterObj.put("writeMode", plugin.getWriteMode()); + parameterObj.put("fieldDelimiter", plugin.getWriteFieldDelimiter()); + parameterObj.put("column", plugin.getColumns()); + writerObj.put("parameter", parameterObj); + return writerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/MongoDBWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/MongoDBWriter.java new file mode 100644 index 0000000..4ec7108 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/MongoDBWriter.java @@ -0,0 +1,50 @@ +package com.czsj.bigdata.tool.datax.writer; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.dto.UpsertInfo; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.pojo.DataxMongoDBPojo; +import com.czsj.core.util.Constants; + +import java.util.Map; + +public class MongoDBWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "mongodbwriter"; + } + + @Override + public Map sample() { + return null; + } + + + @Override + public Map buildMongoDB(DataxMongoDBPojo plugin) { + //构建 + Map writerObj = Maps.newLinkedHashMap(); + JobDatasource dataSource = plugin.getJdbcDatasource(); + writerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + String[] addressList = null; + String str = dataSource.getJdbcUrl().replace(Constants.MONGO_URL_PREFIX, Constants.STRING_BLANK); + if (str.contains(Constants.SPLIT_AT) && str.contains(Constants.SPLIT_DIVIDE)) { + addressList = str.substring(str.indexOf(Constants.SPLIT_AT) + 1, str.indexOf(Constants.SPLIT_DIVIDE)).split(Constants.SPLIT_COMMA); + } else if (str.contains(Constants.SPLIT_DIVIDE)) { + addressList = str.substring(0, str.indexOf(Constants.SPLIT_DIVIDE)).split(Constants.SPLIT_COMMA); + } + parameterObj.put("address", addressList); + parameterObj.put("userName", dataSource.getJdbcUsername() == null ? Constants.STRING_BLANK : dataSource.getJdbcUsername()); + parameterObj.put("userPassword", dataSource.getJdbcPassword() == null ? Constants.STRING_BLANK : dataSource.getJdbcPassword()); + parameterObj.put("dbName", dataSource.getDatabaseName()); + parameterObj.put("collectionName", plugin.getWriterTable()); + parameterObj.put("column", plugin.getColumns()); + UpsertInfo upsert = plugin.getUpsertInfo(); + if (upsert != null) { + parameterObj.put("upsertInfo", upsert); + } + writerObj.put("parameter", parameterObj); + return writerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/MysqlWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/MysqlWriter.java new file mode 100644 index 0000000..68e029a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/MysqlWriter.java @@ -0,0 +1,24 @@ +package com.czsj.bigdata.tool.datax.writer; + +import java.util.Map; + +/** + * mysql writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MysqlWriter + * @Version 1.0 + * @since 2019/7/30 23:08 + */ +public class MysqlWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "mysqlwriter"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/OraclelWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/OraclelWriter.java new file mode 100644 index 0000000..102f4c6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/OraclelWriter.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.datax.writer; + +import java.util.Map; + +/** + * oracle writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class OraclelWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "oraclewriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/PostgresqllWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/PostgresqllWriter.java new file mode 100644 index 0000000..8fdec61 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/PostgresqllWriter.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.tool.datax.writer; + +import java.util.Map; + +/** + * postgresql writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class PostgresqllWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "postgresqlwriter"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/SqlServerlWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/SqlServerlWriter.java new file mode 100644 index 0000000..cc27802 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/datax/writer/SqlServerlWriter.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.tool.datax.writer; + + +import java.util.Map; + +/** + * sql server writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class SqlServerlWriter extends BaseWriterPlugin implements DataxWriterInterface { + @Override + public String getName() { + return "sqlserverwriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/BaseFlinkxPlugin.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/BaseFlinkxPlugin.java new file mode 100644 index 0000000..55fd84b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/BaseFlinkxPlugin.java @@ -0,0 +1,18 @@ +package com.czsj.bigdata.tool.flinkx; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 抽象实现类 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseFlinkxPlugin + * @Version 1.0 + * @since 2019/7/31 9:45 + */ +public abstract class BaseFlinkxPlugin implements FlinkxPluginInterface { + + protected Logger logger = LoggerFactory.getLogger(this.getClass()); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/DataxJsonHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/DataxJsonHelper.java new file mode 100644 index 0000000..a849533 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/DataxJsonHelper.java @@ -0,0 +1,402 @@ +package com.czsj.bigdata.tool.flinkx; + + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.czsj.bigdata.dto.*; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.flinkx.reader.*; +import com.czsj.bigdata.tool.flinkx.writer.*; +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.FlinkxRdbmsPojo; +import com.czsj.bigdata.util.JdbcConstants; +import com.czsj.core.util.Constants; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.util.CollectionUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * JSON的构建类 + */ +@Data +public class DataxJsonHelper implements JsonInterface { + + /** + * 读取的表,根据flinkx示例,支持多个表(先不考虑,后面再去实现, 这里先用list保存吧) + *

+ * 目的表的表名称。支持写入一个或者多个表。当配置为多张表时,必须确保所有表结构保持一致 + */ + private List readerTables; + /** + * 读取的字段列表 + */ + private List readerColumns; + /** + * reader jdbc 数据源 + */ + private JobDatasource readerDatasource; + /** + * writer jdbc 数据源 + */ + private JobDatasource writerDatasource; + /** + * 写入的表 + */ + private List writerTables; + /** + * 写入的字段列表 + */ + private List writerColumns; + + private Map buildReader; + + private Map buildWriter; + + private BaseFlinkxPlugin readerPlugin; + + private BaseFlinkxPlugin writerPlugin; + + private HiveReaderDto hiveReaderDto; + + private HiveWriterDto hiveWriterDto; + + private HbaseReaderDto hbaseReaderDto; + + private HbaseWriterDto hbaseWriterDto; + + private RdbmsReaderDto rdbmsReaderDto; + + private RdbmsWriterDto rdbmsWriterDto; + + private MongoDBReaderDto mongoDBReaderDto; + + private MongoDBWriterDto mongoDBWriterDto; + + private ClickhouseReaderDto clickhouseReaderDto; + + private ClickhouseWriterDto clickhouseWriterDto; + + + //用于保存额外参数 + private Map extraParams = Maps.newHashMap(); + + public void initReader(JsonBuildDto dataXJsonBuildDto, JobDatasource readerDatasource) { + + this.readerDatasource = readerDatasource; + this.readerTables = dataXJsonBuildDto.getReaderTables(); + this.readerColumns = dataXJsonBuildDto.getReaderColumns(); + this.hiveReaderDto = dataXJsonBuildDto.getHiveReader(); + this.rdbmsReaderDto = dataXJsonBuildDto.getRdbmsReader(); + this.hbaseReaderDto = dataXJsonBuildDto.getHbaseReader(); + this.clickhouseReaderDto = dataXJsonBuildDto.getClickhouseReader(); + // reader 插件 + String datasource = readerDatasource.getDatasource(); + +// this.readerColumns = convertKeywordsColumns(datasource, this.readerColumns); + if (JdbcConstants.MYSQL.equals(datasource)) { + readerPlugin = new MysqlReader(); + buildReader = buildReader(); + } else if (JdbcConstants.ORACLE.equals(datasource)) { + readerPlugin = new OracleReader(); + buildReader = buildReader(); + } else if (JdbcConstants.HANA.equals(datasource)) { + readerPlugin = new HanaReader(); + buildReader = buildReader(); + } else if (JdbcConstants.SQL_SERVER.equals(datasource)) { + readerPlugin = new SqlServerReader(); + buildReader = buildReader(); + } else if (JdbcConstants.POSTGRESQL.equals(datasource)) { + readerPlugin = new PostgresqlReader(); + buildReader = buildReader(); + } else if (JdbcConstants.CLICKHOUSE.equals(datasource)) { + readerPlugin = new ClickHouseReader(); + buildReader = buildReader(); + } else if (JdbcConstants.HIVE.equals(datasource)) { + readerPlugin = new HiveReader(); + buildReader = buildHiveReader(); + } else if (JdbcConstants.HBASE.equals(datasource)) { + readerPlugin = new HBaseReader(); + buildReader = buildHBaseReader(); + } else if (JdbcConstants.MONGODB.equals(datasource)) { + readerPlugin = new MongoDBReader(); + buildReader = buildMongoDBReader(); + } + } + + public void initWriter(JsonBuildDto flinkxJsonDto, JobDatasource readerDatasource) { + this.writerDatasource = readerDatasource; + this.writerTables = flinkxJsonDto.getWriterTables(); + this.writerColumns = flinkxJsonDto.getWriterColumns(); + this.hiveWriterDto = flinkxJsonDto.getHiveWriter(); + this.rdbmsWriterDto = flinkxJsonDto.getRdbmsWriter(); + this.hbaseWriterDto = flinkxJsonDto.getHbaseWriter(); + this.mongoDBWriterDto = flinkxJsonDto.getMongoDBWriter(); + // writer + String datasource = readerDatasource.getDatasource(); +// this.writerColumns = convertKeywordsColumns(datasource, this.writerColumns); + if (JdbcConstants.MYSQL.equals(datasource)) { + writerPlugin = new MysqlWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.ORACLE.equals(datasource)) { + writerPlugin = new OraclelWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.HANA.equals(datasource)) { + writerPlugin = new HanaWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.SQL_SERVER.equals(datasource)) { + writerPlugin = new SqlServerlWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.POSTGRESQL.equals(datasource)) { + writerPlugin = new PostgresqllWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.CLICKHOUSE.equals(datasource)) { + writerPlugin = new ClickHouseWriter(); + buildWriter = buildWriter(); + } else if (JdbcConstants.HIVE.equals(datasource)) { + writerPlugin = new HiveWriter(); + buildWriter = this.buildHiveWriter(); + } else if (JdbcConstants.HBASE.equals(datasource)) { + writerPlugin = new HBaseWriter(); + buildWriter = this.buildHBaseWriter(); + } else if (JdbcConstants.MONGODB.equals(datasource)) { + writerPlugin = new MongoDBWriter(); + buildWriter = this.buildMongoDBWriter(); + } + } + + private List convertKeywordsColumns(String datasource, List columns) { + if (columns == null) { + return null; + } + + List toColumns = new ArrayList<>(); + columns.forEach(s -> { + toColumns.add(doConvertKeywordsColumn(datasource, s)); + }); + return toColumns; + } + + private String doConvertKeywordsColumn(String dbType, String column) { + if (column == null) { + return null; + } + + column = column.trim(); + column = column.replace("[", ""); + column = column.replace("]", ""); + column = column.replace("`", ""); + column = column.replace("\"", ""); + column = column.replace("'", ""); + + switch (dbType) { + case JdbcConstants.MYSQL: + return String.format("`%s`", column); + case JdbcConstants.SQL_SERVER: + return String.format("[%s]", column); + case JdbcConstants.POSTGRESQL: + case JdbcConstants.ORACLE: + return String.format("\"%s\"", column); + case JdbcConstants.HANA: + return String.format("\"%s\"", column); + default: + return column; + } + } + + @Override + public Map buildJob() { + Map res = Maps.newLinkedHashMap(); + Map jobMap = Maps.newLinkedHashMap(); + jobMap.put("setting", buildSetting()); + jobMap.put("content", ImmutableList.of(buildContent())); + res.put("job", jobMap); + return res; + } + + @Override + public Map buildSetting() { + Map res = Maps.newLinkedHashMap(); + Map speedMap = Maps.newLinkedHashMap(); + Map errorLimitMap = Maps.newLinkedHashMap(); + + Map restoreMap = Maps.newLinkedHashMap(); + Map logMap = Maps.newLinkedHashMap(); + speedMap.putAll(ImmutableMap.of("channel", 1, "bytes", 0)); + errorLimitMap.putAll(ImmutableMap.of("record", 100)); + restoreMap.putAll(ImmutableMap.of("maxRowNumForCheckpoint", 0,"isRestore",false,"restoreColumnName","","restoreColumnIndex",0)); + logMap.putAll(ImmutableMap.of("isLogger", false,"level","debug","path","","pattern","")); + res.put("speed", speedMap); + res.put("errorLimit", errorLimitMap); + res.put("restore",restoreMap); + res.put("log",logMap); + return res; + } + + @Override + public Map buildContent() { + Map res = Maps.newLinkedHashMap(); + res.put("reader", this.buildReader); + res.put("writer", this.buildWriter); + return res; + } + + @Override + public Map buildReader() { + FlinkxRdbmsPojo flinkxPluginPojo = new FlinkxRdbmsPojo(); + flinkxPluginPojo.setJobDatasource(readerDatasource); + flinkxPluginPojo.setTables(readerTables); + List columns = readerPlugin.getColumn(readerColumns); + flinkxPluginPojo.setRdbmsColumns(columns); + flinkxPluginPojo.setSplitPk(rdbmsReaderDto.getReaderSplitPk()); + if (StringUtils.isNotBlank(rdbmsReaderDto.getQuerySql())) { + flinkxPluginPojo.setQuerySql(rdbmsReaderDto.getQuerySql()); + } + //where + if (StringUtils.isNotBlank(rdbmsReaderDto.getWhereParams())) { + flinkxPluginPojo.setWhereParam(rdbmsReaderDto.getWhereParams()); + } + return readerPlugin.build(flinkxPluginPojo); + } + + @Override + public Map buildHiveReader() { + FlinkxHivePojo flinkxHivePojo = new FlinkxHivePojo(); + flinkxHivePojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + readerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[1]); + column.put("index", Integer.parseInt(c.split(Constants.SPLIT_SCOLON)[0])); + column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + flinkxHivePojo.setColumns(columns); + flinkxHivePojo.setReaderDefaultFS(hiveReaderDto.getReaderDefaultFS()); + flinkxHivePojo.setReaderFieldDelimiter(hiveReaderDto.getReaderFieldDelimiter()); + flinkxHivePojo.setReaderFileType(hiveReaderDto.getReaderFileType()); + flinkxHivePojo.setReaderPath(hiveReaderDto.getReaderPath()); + flinkxHivePojo.setSkipHeader(hiveReaderDto.getReaderSkipHeader()); + return readerPlugin.buildHive(flinkxHivePojo); + } + + @Override + public Map buildHBaseReader() { + FlinkxHbasePojo flinkxHbasePojo = new FlinkxHbasePojo(); + flinkxHbasePojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + for (String readerColumn : readerColumns) { + Map column = Maps.newLinkedHashMap(); + column.put("name", readerColumn); + column.put("type", "string"); + columns.add(column); + } + flinkxHbasePojo.setColumns(columns); + flinkxHbasePojo.setReaderHbaseConfig(readerDatasource.getZkAdress()); + String readerTable=!CollectionUtils.isEmpty(readerTables)?readerTables.get(0):Constants.STRING_BLANK; + flinkxHbasePojo.setReaderTable(readerTable); + flinkxHbasePojo.setReaderMode(hbaseReaderDto.getReaderMode()); + flinkxHbasePojo.setReaderRange(hbaseReaderDto.getReaderRange()); + return readerPlugin.buildHbase(flinkxHbasePojo); + } + + @Override + public Map buildMongoDBReader() { + FlinkxMongoDBPojo flinkxMongoDBPojo = new FlinkxMongoDBPojo(); + flinkxMongoDBPojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + buildColumns(readerColumns, columns); + flinkxMongoDBPojo.setColumns(columns); + flinkxMongoDBPojo.setAddress(readerDatasource.getJdbcUrl()); + flinkxMongoDBPojo.setDbName(readerDatasource.getDatabaseName()); + flinkxMongoDBPojo.setReaderTable(readerTables.get(0)); + return readerPlugin.buildMongoDB(flinkxMongoDBPojo); + } + + + @Override + public Map buildWriter() { + FlinkxRdbmsPojo flinkxPluginPojo = new FlinkxRdbmsPojo(); + flinkxPluginPojo.setJobDatasource(writerDatasource); + flinkxPluginPojo.setTables(writerTables); + List list =writerPlugin.getColumn(writerColumns); + flinkxPluginPojo.setRdbmsColumns(list); + flinkxPluginPojo.setPreSql(rdbmsWriterDto.getPreSql()); + flinkxPluginPojo.setPostSql(rdbmsWriterDto.getPostSql()); + return writerPlugin.build(flinkxPluginPojo); + } + + @Override + public Map buildHiveWriter() { + FlinkxHivePojo flinkxHivePojo = new FlinkxHivePojo(); + flinkxHivePojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + writerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[1]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + flinkxHivePojo.setColumns(columns); + flinkxHivePojo.setWriterDefaultFS(hiveWriterDto.getWriterDefaultFS()); + flinkxHivePojo.setWriteFieldDelimiter(hiveWriterDto.getWriteFieldDelimiter()); + flinkxHivePojo.setWriterFileType(hiveWriterDto.getWriterFileType()); + flinkxHivePojo.setWriterPath(hiveWriterDto.getWriterPath()); + flinkxHivePojo.setWriteMode(hiveWriterDto.getWriteMode()); + flinkxHivePojo.setWriterFileName(hiveWriterDto.getWriterFileName()); + return writerPlugin.buildHive(flinkxHivePojo); + } + + @Override + public Map buildHBaseWriter() { + FlinkxHbasePojo flinkxHbasePojo = new FlinkxHbasePojo(); + flinkxHbasePojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + for (int i = 0; i < writerColumns.size(); i++) { + Map column = Maps.newLinkedHashMap(); + column.put("index", i + 1); + column.put("name", writerColumns.get(i)); + column.put("type", "string"); + columns.add(column); + } + flinkxHbasePojo.setColumns(columns); + flinkxHbasePojo.setWriterHbaseConfig(writerDatasource.getZkAdress()); + String writerTable=!CollectionUtils.isEmpty(writerTables)?writerTables.get(0):Constants.STRING_BLANK; + flinkxHbasePojo.setWriterTable(writerTable); + flinkxHbasePojo.setWriterVersionColumn(hbaseWriterDto.getWriterVersionColumn()); + flinkxHbasePojo.setWriterRowkeyColumn(hbaseWriterDto.getWriterRowkeyColumn()); + flinkxHbasePojo.setWriterMode(hbaseWriterDto.getWriterMode()); + return writerPlugin.buildHbase(flinkxHbasePojo); + } + + + @Override + public Map buildMongoDBWriter() { + FlinkxMongoDBPojo flinkxMongoDBPojo = new FlinkxMongoDBPojo(); + flinkxMongoDBPojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + buildColumns(writerColumns, columns); + flinkxMongoDBPojo.setColumns(columns); + flinkxMongoDBPojo.setAddress(writerDatasource.getJdbcUrl()); + flinkxMongoDBPojo.setDbName(writerDatasource.getDatabaseName()); + flinkxMongoDBPojo.setWriterTable(writerTables.get(0)); + flinkxMongoDBPojo.setUpsertInfo(mongoDBWriterDto.getUpsertInfo()); + return writerPlugin.buildMongoDB(flinkxMongoDBPojo); + } + + private void buildColumns(List columns, List> returnColumns) { + columns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[0]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[1]); + returnColumns.add(column); + }); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/FlinkxPluginInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/FlinkxPluginInterface.java new file mode 100644 index 0000000..6eb63b7 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/FlinkxPluginInterface.java @@ -0,0 +1,65 @@ +package com.czsj.bigdata.tool.flinkx; + +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.FlinkxRdbmsPojo; + +import java.util.List; +import java.util.Map; + +/** + * 插件基础接口 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName FlinkxPluginInterface + * @Version 1.0 + * @since 2019/7/30 22:59 + */ +public interface FlinkxPluginInterface { + /** + * 获取reader插件名称 + * + * @return + */ + String getName(); + + /** + * 构建 + * + * @return flinkxPluginPojo + */ + Map build(FlinkxRdbmsPojo flinkxPluginPojo); + + + /** + * hive json构建 + * @param flinkxHivePojo + * @return + */ + Map buildHive(FlinkxHivePojo flinkxHivePojo); + + /** + * hbase json构建 + * @param flinkxHbasePojo + * @return + */ + Map buildHbase(FlinkxHbasePojo flinkxHbasePojo); + + /** + * mongodb json构建 + * @param flinkxMongoDBPojo + * @return + */ + Map buildMongoDB(FlinkxMongoDBPojo flinkxMongoDBPojo); + + /** + * 获取示例 + * + * @return + */ + Map sample(); + + + List getColumn(List columns); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/JsonInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/JsonInterface.java new file mode 100644 index 0000000..cb66910 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/JsonInterface.java @@ -0,0 +1,36 @@ +package com.czsj.bigdata.tool.flinkx; + +import java.util.Map; + +/** + * 构建 com.czsj json的基础接口 + * + * @author jingwk + * @ClassName FlinkxJsonHelper + * @Version 2.1.1 + * @since 2022/03/14 12:24 + */ +public interface JsonInterface { + + Map buildJob(); + + Map buildSetting(); + + Map buildContent(); + + Map buildReader(); + + Map buildHiveReader(); + + Map buildHiveWriter(); + + Map buildHBaseReader(); + + Map buildHBaseWriter(); + + Map buildMongoDBReader(); + + Map buildMongoDBWriter(); + + Map buildWriter(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/SeatunnelJsonHelper.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/SeatunnelJsonHelper.java new file mode 100644 index 0000000..28c2d4f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/SeatunnelJsonHelper.java @@ -0,0 +1,415 @@ +package com.czsj.bigdata.tool.flinkx; + + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.czsj.bigdata.dto.*; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.flinkx.reader.*; +import com.czsj.bigdata.tool.flinkx.writer.*; +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.FlinkxRdbmsPojo; +import com.czsj.bigdata.util.JdbcConstants; +import com.czsj.core.util.Constants; +import lombok.Data; +import org.apache.commons.lang3.StringUtils; +import org.springframework.util.CollectionUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * JSON的构建类 + */ +@Data +public class SeatunnelJsonHelper implements JsonInterface { + + /** + * 读取的表,根据flinkx示例,支持多个表(先不考虑,后面再去实现, 这里先用list保存吧) + *

+ * 目的表的表名称。支持写入一个或者多个表。当配置为多张表时,必须确保所有表结构保持一致 + */ + private List readerTables; + /** + * 读取的字段列表 + */ + private List readerColumns; + /** + * reader jdbc 数据源 + */ + private JobDatasource readerDatasource; + /** + * writer jdbc 数据源 + */ + private JobDatasource writerDatasource; + /** + * 写入的表 + */ + private List writerTables; + /** + * 写入的字段列表 + */ + private List writerColumns; + + private Map buildReader; + + private Map buildWriter; + + private BaseFlinkxPlugin readerPlugin; + + private BaseFlinkxPlugin writerPlugin; + + private HiveReaderDto hiveReaderDto; + + private HiveWriterDto hiveWriterDto; + + private HbaseReaderDto hbaseReaderDto; + + private HbaseWriterDto hbaseWriterDto; + + private RdbmsReaderDto rdbmsReaderDto; + + private RdbmsWriterDto rdbmsWriterDto; + + private MongoDBReaderDto mongoDBReaderDto; + + private MongoDBWriterDto mongoDBWriterDto; + + private ClickhouseReaderDto clickhouseReaderDto; + + private ClickhouseWriterDto clickhouseWriterDto; + + + //用于保存额外参数 + private Map extraParams = Maps.newHashMap(); + + public void initReader(JsonBuildDto flinkxJsonDto, JobDatasource readerDatasource) { + + this.readerDatasource = readerDatasource; + this.readerTables = flinkxJsonDto.getReaderTables(); + this.readerColumns = flinkxJsonDto.getReaderColumns(); + this.hiveReaderDto = flinkxJsonDto.getHiveReader(); + this.rdbmsReaderDto = flinkxJsonDto.getRdbmsReader(); + this.hbaseReaderDto = flinkxJsonDto.getHbaseReader(); + this.clickhouseReaderDto = flinkxJsonDto.getClickhouseReader(); + // reader 插件 + String datasource = readerDatasource.getDatasource(); + +// this.readerColumns = convertKeywordsColumns(datasource, this.readerColumns); + if (JdbcConstants.MYSQL.equals(datasource)) { + readerPlugin = new MysqlReader(); + buildReader = buildReader(); + } else if (JdbcConstants.ORACLE.equals(datasource)) { + readerPlugin = new OracleReader(); + buildReader = buildReader(); + } else if (JdbcConstants.HANA.equals(datasource)) { + readerPlugin = new HanaReader(); + buildReader = buildReader(); + } else if (JdbcConstants.SQL_SERVER.equals(datasource)) { + readerPlugin = new SqlServerReader(); + buildReader = buildReader(); + } else if (JdbcConstants.POSTGRESQL.equals(datasource)) { + readerPlugin = new PostgresqlReader(); + buildReader = buildReader(); + } else if (JdbcConstants.CLICKHOUSE.equals(datasource)) { + readerPlugin = new ClickHouseReader(); + buildReader = buildReader(); + } else if (JdbcConstants.HIVE.equals(datasource)) { + readerPlugin = new HiveReader(); + buildReader = buildHiveReader(); + } else if (JdbcConstants.HBASE.equals(datasource)) { + readerPlugin = new HBaseReader(); + buildReader = buildHBaseReader(); + } else if (JdbcConstants.MONGODB.equals(datasource)) { + readerPlugin = new MongoDBReader(); + buildReader = buildMongoDBReader(); + } + } + + public void initWriter(JsonBuildDto flinkxJsonDto, JobDatasource readerDatasource) { + this.writerDatasource = readerDatasource; + this.writerTables = flinkxJsonDto.getWriterTables(); + this.writerColumns = flinkxJsonDto.getWriterColumns(); + this.hiveWriterDto = flinkxJsonDto.getHiveWriter(); + this.rdbmsWriterDto = flinkxJsonDto.getRdbmsWriter(); + this.hbaseWriterDto = flinkxJsonDto.getHbaseWriter(); + this.mongoDBWriterDto = flinkxJsonDto.getMongoDBWriter(); + // writer + String datasource = readerDatasource.getDatasource(); +// this.writerColumns = convertKeywordsColumns(datasource, this.writerColumns); + if (JdbcConstants.MYSQL.equals(datasource)) { + writerPlugin = new MysqlWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.ORACLE.equals(datasource)) { + writerPlugin = new OraclelWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.HANA.equals(datasource)) { + writerPlugin = new HanaWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.SQL_SERVER.equals(datasource)) { + writerPlugin = new SqlServerlWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.POSTGRESQL.equals(datasource)) { + writerPlugin = new PostgresqllWriter(); + buildWriter = this.buildWriter(); + } else if (JdbcConstants.CLICKHOUSE.equals(datasource)) { + writerPlugin = new ClickHouseWriter(); + buildWriter = buildWriter(); + } else if (JdbcConstants.HIVE.equals(datasource)) { + writerPlugin = new HiveWriter(); + buildWriter = this.buildHiveWriter(); + } else if (JdbcConstants.HBASE.equals(datasource)) { + writerPlugin = new HBaseWriter(); + buildWriter = this.buildHBaseWriter(); + } else if (JdbcConstants.MONGODB.equals(datasource)) { + writerPlugin = new MongoDBWriter(); + buildWriter = this.buildMongoDBWriter(); + } + } + + private List convertKeywordsColumns(String datasource, List columns) { + if (columns == null) { + return null; + } + + List toColumns = new ArrayList<>(); + columns.forEach(s -> { + toColumns.add(doConvertKeywordsColumn(datasource, s)); + }); + return toColumns; + } + + private String doConvertKeywordsColumn(String dbType, String column) { + if (column == null) { + return null; + } + + column = column.trim(); + column = column.replace("[", ""); + column = column.replace("]", ""); + column = column.replace("`", ""); + column = column.replace("\"", ""); + column = column.replace("'", ""); + + switch (dbType) { + case JdbcConstants.MYSQL: + return String.format("`%s`", column); + case JdbcConstants.SQL_SERVER: + return String.format("[%s]", column); + case JdbcConstants.POSTGRESQL: + case JdbcConstants.ORACLE: + return String.format("\"%s\"", column); + case JdbcConstants.HANA: + return String.format("\"%s\"", column); + default: + return column; + } + } + + @Override + public Map buildJob() { + Map res = Maps.newLinkedHashMap(); + Map jobMap = Maps.newLinkedHashMap(); + jobMap.put("setting", buildSetting()); + jobMap.put("content", ImmutableList.of(buildContent())); + res.put("job", jobMap); + return res; + } + + @Override + public Map buildSetting() { + Map res = Maps.newLinkedHashMap(); + Map speedMap = Maps.newLinkedHashMap(); + Map errorLimitMap = Maps.newLinkedHashMap(); + + Map restoreMap = Maps.newLinkedHashMap(); + Map logMap = Maps.newLinkedHashMap(); + speedMap.putAll(ImmutableMap.of("channel", 1, "bytes", 0)); + errorLimitMap.putAll(ImmutableMap.of("record", 100)); + restoreMap.putAll(ImmutableMap.of("maxRowNumForCheckpoint", 0,"isRestore",false,"restoreColumnName","","restoreColumnIndex",0)); + logMap.putAll(ImmutableMap.of("isLogger", false,"level","debug","path","","pattern","")); + res.put("speed", speedMap); + res.put("errorLimit", errorLimitMap); + res.put("restore",restoreMap); + res.put("log",logMap); + return res; + } + + @Override + public Map buildContent() { + Map res = Maps.newLinkedHashMap(); + res.put("reader", this.buildReader); + res.put("writer", this.buildWriter); + return res; + } + + @Override + public Map buildReader() { + FlinkxRdbmsPojo flinkxPluginPojo = new FlinkxRdbmsPojo(); + flinkxPluginPojo.setJobDatasource(readerDatasource); + flinkxPluginPojo.setTables(readerTables); + List> columns = Lists.newArrayList(); + readerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[0]); +// column.put("index", Integer.parseInt(c.split(Constants.SPLIT_SCOLON)[0])); +// column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + flinkxPluginPojo.setRdbmsColumns(columns); + flinkxPluginPojo.setSplitPk(rdbmsReaderDto.getReaderSplitPk()); + if (StringUtils.isNotBlank(rdbmsReaderDto.getQuerySql())) { + flinkxPluginPojo.setQuerySql(rdbmsReaderDto.getQuerySql()); + } + //where + if (StringUtils.isNotBlank(rdbmsReaderDto.getWhereParams())) { + flinkxPluginPojo.setWhereParam(rdbmsReaderDto.getWhereParams()); + } + return readerPlugin.build(flinkxPluginPojo); + } + + @Override + public Map buildHiveReader() { + FlinkxHivePojo flinkxHivePojo = new FlinkxHivePojo(); + flinkxHivePojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + readerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[1]); + column.put("index", Integer.parseInt(c.split(Constants.SPLIT_SCOLON)[0])); + column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + flinkxHivePojo.setColumns(columns); + flinkxHivePojo.setReaderDefaultFS(hiveReaderDto.getReaderDefaultFS()); + flinkxHivePojo.setReaderFieldDelimiter(hiveReaderDto.getReaderFieldDelimiter()); + flinkxHivePojo.setReaderFileType(hiveReaderDto.getReaderFileType()); + flinkxHivePojo.setReaderPath(hiveReaderDto.getReaderPath()); + flinkxHivePojo.setSkipHeader(hiveReaderDto.getReaderSkipHeader()); + return readerPlugin.buildHive(flinkxHivePojo); + } + + @Override + public Map buildHBaseReader() { + FlinkxHbasePojo flinkxHbasePojo = new FlinkxHbasePojo(); + flinkxHbasePojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + for (String readerColumn : readerColumns) { + Map column = Maps.newLinkedHashMap(); + column.put("name", readerColumn); + column.put("type", "string"); + columns.add(column); + } + flinkxHbasePojo.setColumns(columns); + flinkxHbasePojo.setReaderHbaseConfig(readerDatasource.getZkAdress()); + String readerTable=!CollectionUtils.isEmpty(readerTables)?readerTables.get(0):Constants.STRING_BLANK; + flinkxHbasePojo.setReaderTable(readerTable); + flinkxHbasePojo.setReaderMode(hbaseReaderDto.getReaderMode()); + flinkxHbasePojo.setReaderRange(hbaseReaderDto.getReaderRange()); + return readerPlugin.buildHbase(flinkxHbasePojo); + } + + @Override + public Map buildMongoDBReader() { + FlinkxMongoDBPojo flinkxMongoDBPojo = new FlinkxMongoDBPojo(); + flinkxMongoDBPojo.setJdbcDatasource(readerDatasource); + List> columns = Lists.newArrayList(); + buildColumns(readerColumns, columns); + flinkxMongoDBPojo.setColumns(columns); + flinkxMongoDBPojo.setAddress(readerDatasource.getJdbcUrl()); + flinkxMongoDBPojo.setDbName(readerDatasource.getDatabaseName()); + flinkxMongoDBPojo.setReaderTable(readerTables.get(0)); + return readerPlugin.buildMongoDB(flinkxMongoDBPojo); + } + + + @Override + public Map buildWriter() { + FlinkxRdbmsPojo flinkxPluginPojo = new FlinkxRdbmsPojo(); + flinkxPluginPojo.setJobDatasource(writerDatasource); + flinkxPluginPojo.setTables(writerTables); + List> columns = Lists.newArrayList(); + writerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[0]); +// column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + flinkxPluginPojo.setRdbmsColumns(columns); + flinkxPluginPojo.setPreSql(rdbmsWriterDto.getPreSql()); + flinkxPluginPojo.setPostSql(rdbmsWriterDto.getPostSql()); + return writerPlugin.build(flinkxPluginPojo); + } + + @Override + public Map buildHiveWriter() { + FlinkxHivePojo flinkxHivePojo = new FlinkxHivePojo(); + flinkxHivePojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + writerColumns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[1]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[2]); + columns.add(column); + }); + flinkxHivePojo.setColumns(columns); + flinkxHivePojo.setWriterDefaultFS(hiveWriterDto.getWriterDefaultFS()); + flinkxHivePojo.setWriteFieldDelimiter(hiveWriterDto.getWriteFieldDelimiter()); + flinkxHivePojo.setWriterFileType(hiveWriterDto.getWriterFileType()); + flinkxHivePojo.setWriterPath(hiveWriterDto.getWriterPath()); + flinkxHivePojo.setWriteMode(hiveWriterDto.getWriteMode()); + flinkxHivePojo.setWriterFileName(hiveWriterDto.getWriterFileName()); + return writerPlugin.buildHive(flinkxHivePojo); + } + + @Override + public Map buildHBaseWriter() { + FlinkxHbasePojo flinkxHbasePojo = new FlinkxHbasePojo(); + flinkxHbasePojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + for (int i = 0; i < writerColumns.size(); i++) { + Map column = Maps.newLinkedHashMap(); + column.put("index", i + 1); + column.put("name", writerColumns.get(i)); + column.put("type", "string"); + columns.add(column); + } + flinkxHbasePojo.setColumns(columns); + flinkxHbasePojo.setWriterHbaseConfig(writerDatasource.getZkAdress()); + String writerTable=!CollectionUtils.isEmpty(writerTables)?writerTables.get(0):Constants.STRING_BLANK; + flinkxHbasePojo.setWriterTable(writerTable); + flinkxHbasePojo.setWriterVersionColumn(hbaseWriterDto.getWriterVersionColumn()); + flinkxHbasePojo.setWriterRowkeyColumn(hbaseWriterDto.getWriterRowkeyColumn()); + flinkxHbasePojo.setWriterMode(hbaseWriterDto.getWriterMode()); + return writerPlugin.buildHbase(flinkxHbasePojo); + } + + + @Override + public Map buildMongoDBWriter() { + FlinkxMongoDBPojo flinkxMongoDBPojo = new FlinkxMongoDBPojo(); + flinkxMongoDBPojo.setJdbcDatasource(writerDatasource); + List> columns = Lists.newArrayList(); + buildColumns(writerColumns, columns); + flinkxMongoDBPojo.setColumns(columns); + flinkxMongoDBPojo.setAddress(writerDatasource.getJdbcUrl()); + flinkxMongoDBPojo.setDbName(writerDatasource.getDatabaseName()); + flinkxMongoDBPojo.setWriterTable(writerTables.get(0)); + flinkxMongoDBPojo.setUpsertInfo(mongoDBWriterDto.getUpsertInfo()); + return writerPlugin.buildMongoDB(flinkxMongoDBPojo); + } + + private void buildColumns(List columns, List> returnColumns) { + columns.forEach(c -> { + Map column = Maps.newLinkedHashMap(); + column.put("name", c.split(Constants.SPLIT_SCOLON)[0]); + column.put("type", c.split(Constants.SPLIT_SCOLON)[1]); + returnColumns.add(column); + }); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/BaseReaderPlugin.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/BaseReaderPlugin.java new file mode 100644 index 0000000..a73146b --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/BaseReaderPlugin.java @@ -0,0 +1,91 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import cn.hutool.core.util.StrUtil; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gson.JsonObject; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.flinkx.BaseFlinkxPlugin; +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.FlinkxRdbmsPojo; +import com.czsj.bigdata.util.AESUtil; +import com.czsj.core.util.Constants; +import org.apache.commons.lang3.StringUtils; + +import java.util.List; +import java.util.Map; + +/** + * Reader + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseReaderPlugin + * @Version 1.0 + * @since 2019/8/2 16:27 + */ +public abstract class BaseReaderPlugin extends BaseFlinkxPlugin { + + /** + * 默认的字段是 ["column1","column2"],如果不同 则需要覆盖掉 + * @param columns + * @return + */ + @Override + public List getColumn(List columns) { + List data = Lists.newArrayList(); + columns.forEach(c -> { + data.add(c.split(Constants.SPLIT_SCOLON)[0]); + }); + return data; + } + + @Override + public Map build(FlinkxRdbmsPojo plugin) { + //构建 + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + Map connectionObj = Maps.newLinkedHashMap(); + + JobDatasource jobDatasource = plugin.getJobDatasource(); + //将用户名和密码进行解密 + parameterObj.put("username", AESUtil.decrypt(jobDatasource.getJdbcUsername())); + parameterObj.put("password", AESUtil.decrypt(jobDatasource.getJdbcPassword())); + + //判断是否是 querySql + if (StrUtil.isNotBlank(plugin.getQuerySql())) { + connectionObj.put("querySql", ImmutableList.of(plugin.getQuerySql())); + } else { + parameterObj.put("column", plugin.getRdbmsColumns()); + //判断是否有where + if (StringUtils.isNotBlank(plugin.getWhereParam())) { + parameterObj.put("where", plugin.getWhereParam()); + } + connectionObj.put("table", plugin.getTables()); + } + parameterObj.put("splitPk",plugin.getSplitPk()); + connectionObj.put("jdbcUrl", ImmutableList.of(jobDatasource.getJdbcUrl())); + + parameterObj.put("connection", ImmutableList.of(connectionObj)); + + readerObj.put("parameter", parameterObj); + + return readerObj; + } + + @Override + public Map buildHive(FlinkxHivePojo flinkxHivePojo) { + return null; + } + + @Override + public Map buildHbase(FlinkxHbasePojo flinkxHbasePojo) { return null; } + + @Override + public Map buildMongoDB(FlinkxMongoDBPojo flinkxMongoDBPojo) { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/ClickHouseReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/ClickHouseReader.java new file mode 100644 index 0000000..2a97c22 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/ClickHouseReader.java @@ -0,0 +1,16 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import java.util.Map; + +public class ClickHouseReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "clickhousereader"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/FlinkxReaderInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/FlinkxReaderInterface.java new file mode 100644 index 0000000..c8863c6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/FlinkxReaderInterface.java @@ -0,0 +1,15 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import com.czsj.bigdata.tool.flinkx.FlinkxPluginInterface; + +/** + * 用于构建reader的接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/30 + */ +public interface FlinkxReaderInterface extends FlinkxPluginInterface { + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HBaseReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HBaseReader.java new file mode 100644 index 0000000..2ba4217 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HBaseReader.java @@ -0,0 +1,42 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +public class HBaseReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "hbasereader"; + } + + @Override + public Map sample() { + return null; + } + + public Map buildHbase(FlinkxHbasePojo plugin) { + //构建 + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + Map confige = Maps.newLinkedHashMap(); + confige.put("hbase.zookeeper.property.clientPort", plugin.getReaderHbaseConfig().split(":")[1]); + // confige.put("hbase.rootdir", plugin.getWriterHbaseConfig()); + confige.put("hbase.cluster.distributed", "true"); + confige.put("hbase.zookeeper.quorum", plugin.getReaderHbaseConfig().split(":")[0]); + confige.put("zookeeper.znode.parent", "/hbase"); + parameterObj.put("hbaseConfig", confige); + parameterObj.put("table", plugin.getReaderTable()); + parameterObj.put("mode", plugin.getReaderMode()); + parameterObj.put("column", plugin.getColumns()); + if(StringUtils.isNotBlank(plugin.getReaderRange().getStartRowkey()) && StringUtils.isNotBlank(plugin.getReaderRange().getEndRowkey())){ + parameterObj.put("range", plugin.getReaderRange()); + } + parameterObj.put("maxVersion", plugin.getReaderMaxVersion()); + readerObj.put("parameter", parameterObj); + return readerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HanaReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HanaReader.java new file mode 100644 index 0000000..a0dd767 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HanaReader.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.tool.flinkx.reader; + + +import java.util.Map; + +/** + * Hana reader 构建类 + * + * @author zxl + * @version 1.0 + * @since 2022/10/15 + */ +public class HanaReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "saphanareader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HiveReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HiveReader.java new file mode 100644 index 0000000..2fddc5f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/HiveReader.java @@ -0,0 +1,42 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; + +import java.util.Map; + +/** + * hive reader 构建类 + * + * @author jingwk + * @version 2.0 + * @since 2022/01/05 + */ +public class HiveReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "hdfsreader"; + } + + @Override + public Map sample() { + return null; + } + + + @Override + public Map buildHive(FlinkxHivePojo plugin) { + //构建 + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + parameterObj.put("path", plugin.getReaderPath()); + parameterObj.put("defaultFS", plugin.getReaderDefaultFS()); + parameterObj.put("fileType", plugin.getReaderFileType()); + parameterObj.put("fieldDelimiter", plugin.getReaderFieldDelimiter()); + parameterObj.put("skipHeader", plugin.getSkipHeader()); + parameterObj.put("column", plugin.getColumns()); + readerObj.put("parameter", parameterObj); + return readerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/MongoDBReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/MongoDBReader.java new file mode 100644 index 0000000..811e237 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/MongoDBReader.java @@ -0,0 +1,33 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; + +import java.util.Map; + +public class MongoDBReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "mongodbreader"; + } + + @Override + public Map sample() { + return null; + } + + public Map buildMongoDB(FlinkxMongoDBPojo plugin) { + //构建 + JobDatasource dataSource = plugin.getJdbcDatasource(); + Map readerObj = Maps.newLinkedHashMap(); + readerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + parameterObj.put("url", dataSource.getJdbcUrl()+""+dataSource.getDatabaseName()); + parameterObj.put("database",dataSource.getDatabaseName()); + parameterObj.put("collectionName", plugin.getReaderTable()); + parameterObj.put("column", plugin.getColumns()); + readerObj.put("parameter", parameterObj); + return readerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/MysqlReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/MysqlReader.java new file mode 100644 index 0000000..6fde521 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/MysqlReader.java @@ -0,0 +1,25 @@ +package com.czsj.bigdata.tool.flinkx.reader; + + +import java.util.Map; + +/** + * mysql reader 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MysqlReader + * @Version 1.0 + * @since 2019/7/30 23:07 + */ +public class MysqlReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "mysqlreader"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/OracleReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/OracleReader.java new file mode 100644 index 0000000..ddab9d4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/OracleReader.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import java.util.Map; + +/** + * oracle reader 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class OracleReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "oraclereader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/PostgresqlReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/PostgresqlReader.java new file mode 100644 index 0000000..148f60e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/PostgresqlReader.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import java.util.Map; + +/** + * postgresql 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class PostgresqlReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "postgresqlreader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/SqlServerReader.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/SqlServerReader.java new file mode 100644 index 0000000..bb3fdb6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/reader/SqlServerReader.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.flinkx.reader; + +import java.util.Map; + +/** + * sqlserver reader 构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class SqlServerReader extends BaseReaderPlugin implements FlinkxReaderInterface { + @Override + public String getName() { + return "sqlserverreader"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/BaseWriterPlugin.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/BaseWriterPlugin.java new file mode 100644 index 0000000..a68fe28 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/BaseWriterPlugin.java @@ -0,0 +1,96 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.czsj.bigdata.util.AESUtil; +import com.czsj.bigdata.tool.flinkx.BaseFlinkxPlugin; +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; +import com.czsj.bigdata.tool.pojo.FlinkxRdbmsPojo; +import com.czsj.core.util.Constants; +import com.czsj.bigdata.entity.JobDatasource; +import org.apache.commons.lang3.StringUtils; + +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * flinkx writer base + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseWriterPlugin + * @Version 1.0 + * @since 2019/8/2 16:28 + */ +public abstract class BaseWriterPlugin extends BaseFlinkxPlugin { + @Override + public Map build(FlinkxRdbmsPojo plugin) { + Map writerObj = Maps.newLinkedHashMap(); + writerObj.put("name", getName()); + + Map parameterObj = Maps.newLinkedHashMap(); +// parameterObj.put("writeMode", "insert"); + JobDatasource jobDatasource = plugin.getJobDatasource(); + parameterObj.put("username", AESUtil.decrypt(jobDatasource.getJdbcUsername())); + parameterObj.put("password", AESUtil.decrypt(jobDatasource.getJdbcPassword())); + //类型 + parameterObj.put("writeMode", "insert"); + parameterObj.put("column", plugin.getRdbmsColumns()); + parameterObj.put("preSql", splitSql(plugin.getPreSql())); + parameterObj.put("postSql", splitSql(plugin.getPostSql())); + + Map connectionObj = Maps.newLinkedHashMap(); + connectionObj.put("table", plugin.getTables()); + connectionObj.put("jdbcUrl", jobDatasource.getJdbcUrl()); + + parameterObj.put("connection", ImmutableList.of(connectionObj)); + writerObj.put("parameter", parameterObj); + + return writerObj; + } + + private String[] splitSql(String sql) { + String[] sqlArr = null; + if (StringUtils.isNotBlank(sql)) { + Pattern p = Pattern.compile("\r\n|\r|\n|\n\r"); + Matcher m = p.matcher(sql); + String sqlStr = m.replaceAll(Constants.STRING_BLANK); + sqlArr = sqlStr.split(Constants.SPLIT_COLON); + } + return sqlArr; + } + + @Override + public Map buildHive(FlinkxHivePojo flinkxHivePojo) { + return null; + } + + + @Override + public Map buildHbase(FlinkxHbasePojo flinkxHbasePojo) { + return null; + } + + @Override + public Map buildMongoDB(FlinkxMongoDBPojo plugin) { + return null; + } + + /** + * 默认的字段是 ["column1","column2"],如果不同 则需要覆盖掉 + * @param columns + * @return + */ + @Override + public List getColumn(List columns) { + List data = Lists.newArrayList(); + columns.forEach(c -> { + data.add(c.split(Constants.SPLIT_SCOLON)[0]); + }); + return data; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/ClickHouseWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/ClickHouseWriter.java new file mode 100644 index 0000000..50bebd2 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/ClickHouseWriter.java @@ -0,0 +1,15 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import java.util.Map; + +public class ClickHouseWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "clickhousewriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/FlinkxWriterInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/FlinkxWriterInterface.java new file mode 100644 index 0000000..15f5374 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/FlinkxWriterInterface.java @@ -0,0 +1,15 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import com.czsj.bigdata.tool.flinkx.FlinkxPluginInterface; + +/** + * 用于构建writer的接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/30 + */ +public interface FlinkxWriterInterface extends FlinkxPluginInterface { + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HBaseWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HBaseWriter.java new file mode 100644 index 0000000..312b443 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HBaseWriter.java @@ -0,0 +1,43 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.FlinkxHbasePojo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; + +public class HBaseWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "hbasewriter"; + } + + @Override + public Map sample() { + return null; + } + + public Map buildHbase(FlinkxHbasePojo plugin) { + //构建 + Map writerObj = Maps.newLinkedHashMap(); + writerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + Map confige = Maps.newLinkedHashMap(); + + confige.put("hbase.zookeeper.property.clientPort", plugin.getWriterHbaseConfig().split(":")[1]); +// confige.put("hbase.rootdir", plugin.getWriterHbaseConfig()); + confige.put("hbase.cluster.distributed", "true"); + confige.put("hbase.zookeeper.quorum", plugin.getWriterHbaseConfig().split(":")[0]); + confige.put("zookeeper.znode.parent", "/hbase"); + parameterObj.put("hbaseConfig", confige); + parameterObj.put("table", plugin.getWriterTable()); + parameterObj.put("mode", plugin.getWriterMode()); + parameterObj.put("column", plugin.getColumns()); + parameterObj.put("rowkeyColumn", plugin.getWriterRowkeyColumn()); + if (StringUtils.isNotBlank(plugin.getWriterVersionColumn().getValue())) { + parameterObj.put("versionColumn", plugin.getWriterVersionColumn()); + } + writerObj.put("parameter", parameterObj); + return writerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HanaWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HanaWriter.java new file mode 100644 index 0000000..0ba8fbd --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HanaWriter.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import java.util.Map; + +/** + * oracle writer构建类 + * + * @author zxl + * @version 1.0 + * @since 2022/10/15 + */ +public class HanaWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "saphanawriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HiveWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HiveWriter.java new file mode 100644 index 0000000..868b155 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/HiveWriter.java @@ -0,0 +1,43 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.tool.pojo.FlinkxHivePojo; + +import java.util.Map; + +/** + * hive writer构建类 + * + * @author jingwk + * @version 2.0 + * @since 2022/01/05 + */ +public class HiveWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "hdfswriter"; + } + + + @Override + public Map sample() { + return null; + } + + @Override + public Map buildHive(FlinkxHivePojo plugin) { + Map writerObj = Maps.newLinkedHashMap(); + writerObj.put("name", getName()); + + Map parameterObj = Maps.newLinkedHashMap(); + parameterObj.put("defaultFS", plugin.getWriterDefaultFS()); + parameterObj.put("fileType", plugin.getWriterFileType()); + parameterObj.put("path", plugin.getWriterPath()); + parameterObj.put("fileName", plugin.getWriterFileName()); + parameterObj.put("writeMode", plugin.getWriteMode()); + parameterObj.put("fieldDelimiter", plugin.getWriteFieldDelimiter()); + parameterObj.put("column", plugin.getColumns()); + writerObj.put("parameter", parameterObj); + return writerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/MongoDBWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/MongoDBWriter.java new file mode 100644 index 0000000..36f29c2 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/MongoDBWriter.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import com.google.common.collect.Maps; +import com.czsj.bigdata.dto.UpsertInfo; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.pojo.FlinkxMongoDBPojo; + +import java.util.Map; + +public class MongoDBWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "mongodbwriter"; + } + + @Override + public Map sample() { + return null; + } + + + @Override + public Map buildMongoDB(FlinkxMongoDBPojo plugin) { + //构建 + Map writerObj = Maps.newLinkedHashMap(); + JobDatasource dataSource = plugin.getJdbcDatasource(); + writerObj.put("name", getName()); + Map parameterObj = Maps.newLinkedHashMap(); + parameterObj.put("url", dataSource.getJdbcUrl()+""+dataSource.getDatabaseName()); + parameterObj.put("database",dataSource.getDatabaseName()); + parameterObj.put("collectionName", plugin.getWriterTable()); + parameterObj.put("column", plugin.getColumns()); + parameterObj.put("writeMode", "insert"); + UpsertInfo upsert = plugin.getUpsertInfo(); + if (upsert != null) { + parameterObj.put("upsertInfo", upsert); + } + writerObj.put("parameter", parameterObj); + return writerObj; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/MysqlWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/MysqlWriter.java new file mode 100644 index 0000000..bc1a9f1 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/MysqlWriter.java @@ -0,0 +1,24 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import java.util.Map; + +/** + * mysql writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MysqlWriter + * @Version 1.0 + * @since 2019/7/30 23:08 + */ +public class MysqlWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "mysqlwriter"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/OraclelWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/OraclelWriter.java new file mode 100644 index 0000000..946c353 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/OraclelWriter.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import java.util.Map; + +/** + * oracle writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class OraclelWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "oraclewriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/PostgresqllWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/PostgresqllWriter.java new file mode 100644 index 0000000..94a0678 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/PostgresqllWriter.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.tool.flinkx.writer; + +import java.util.Map; + +/** + * postgresql writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class PostgresqllWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "postgresqlwriter"; + } + + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/SqlServerlWriter.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/SqlServerlWriter.java new file mode 100644 index 0000000..d2c9144 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/flinkx/writer/SqlServerlWriter.java @@ -0,0 +1,23 @@ +package com.czsj.bigdata.tool.flinkx.writer; + + +import java.util.Map; + +/** + * sql server writer构建类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class SqlServerlWriter extends BaseWriterPlugin implements FlinkxWriterInterface { + @Override + public String getName() { + return "sqlserverwriter"; + } + + @Override + public Map sample() { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/BaseDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/BaseDatabaseMeta.java new file mode 100644 index 0000000..496e851 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/BaseDatabaseMeta.java @@ -0,0 +1,62 @@ +package com.czsj.bigdata.tool.meta; + +/** + * meta信息interface + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseDatabaseMeta + * @Version 1.0 + * @since 2019/7/17 15:45 + */ +public abstract class BaseDatabaseMeta implements DatabaseInterface { + + @Override + public String getSQLQueryFields(String tableName) { + return "SELECT * FROM " + tableName + " where 1=0"; + } + + @Override + public String getSQLQueryTablesNameComments() { + return "select table_name,table_comment from information_schema.tables where table_schema=?"; + } + + @Override + public String getSQLQueryTableNameComment() { + return "select table_name,table_comment from information_schema.tables where table_schema=? and table_name = ?"; + } + + @Override + public String getSQLQueryPrimaryKey() { + return null; + } + + @Override + public String getSQLQueryComment(String schemaName, String tableName, String columnName) { + return null; + } + + @Override + public String getSQLQueryColumns(String... args) { + return null; + } + + @Override + public String getMaxId(String tableName, String primaryKey) { + return String.format("select max(%s) from %s",primaryKey,tableName); + } + + @Override + public String getSQLQueryTableSchema(String... args) { + return null; + } + + @Override + public String getSQLQueryTables() { + return null; + } + + @Override + public String getSQLQueryTables(String... tableSchema) { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/ClickHouseDataBaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/ClickHouseDataBaseMeta.java new file mode 100644 index 0000000..7137d27 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/ClickHouseDataBaseMeta.java @@ -0,0 +1,19 @@ +package com.czsj.bigdata.tool.meta; + +public class ClickHouseDataBaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + private volatile static ClickHouseDataBaseMeta single; + public static ClickHouseDataBaseMeta getInstance() { + if (single == null) { + synchronized (ClickHouseDataBaseMeta.class) { + if (single == null) { + single = new ClickHouseDataBaseMeta(); + } + } + } + return single; + } + @Override + public String getSQLQueryTables() { + return "show tables"; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/DatabaseInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/DatabaseInterface.java new file mode 100644 index 0000000..a50c920 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/DatabaseInterface.java @@ -0,0 +1,66 @@ +package com.czsj.bigdata.tool.meta; + +public interface DatabaseInterface { + + /** + * Returns the minimal SQL to launch in order to determine the layout of the resultset for a given com.com.czsj.bigdata.tool.database table + * + * @param tableName The name of the table to determine the layout for + * @return The SQL to launch. + */ + String getSQLQueryFields(String tableName); + + /** + * 获取主键字段 + * + * @return + */ + String getSQLQueryPrimaryKey(); + + String getSQLQueryTableNameComment(); + + String getSQLQueryTablesNameComments(); + + /** + * 获取所有表名的sql + * + * @return + */ + String getSQLQueryTables(String... tableSchema); + + /** + * 获取所有表名的sql + * + * @return + */ + String getSQLQueryTables(); + + /** + * 获取 Table schema + * + * @return + */ + String getSQLQueryTableSchema(String... args); + /** + * 获取所有的字段的sql + * + * @return + */ + String getSQLQueryColumns(String... args); + + /** + * 获取表和字段注释的sql语句 + * + * @return The SQL to launch. + */ + String getSQLQueryComment(String schemaName, String tableName, String columnName); + + + /** + * 获取当前表maxId + * @param tableName + * @param primaryKey + * @return + */ + String getMaxId(String tableName,String primaryKey); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/DatabaseMetaFactory.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/DatabaseMetaFactory.java new file mode 100644 index 0000000..6c1ef10 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/DatabaseMetaFactory.java @@ -0,0 +1,36 @@ +package com.czsj.bigdata.tool.meta; + + +import com.czsj.bigdata.util.JdbcConstants; + +/** + * meta信息工厂 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName DatabaseMetaFactory + * @Version 1.0 + * @since 2019/7/17 15:55 + */ +public class DatabaseMetaFactory { + + //根据数据库类型返回对应的接口 + public static DatabaseInterface getByDbType(String dbType) { + if (JdbcConstants.MYSQL.equals(dbType)) { + return MySQLDatabaseMeta.getInstance(); + } else if (JdbcConstants.ORACLE.equals(dbType)) { + return OracleDatabaseMeta.getInstance(); + } else if (JdbcConstants.POSTGRESQL.equals(dbType)) { + return PostgresqlDatabaseMeta.getInstance(); + } else if (JdbcConstants.SQL_SERVER.equals(dbType)) { + return SqlServerDatabaseMeta.getInstance(); + } else if (JdbcConstants.HIVE.equals(dbType)) { + return HiveDatabaseMeta.getInstance(); + }else if(JdbcConstants.CLICKHOUSE.equals(dbType)) { + return ClickHouseDataBaseMeta.getInstance(); + } else if(JdbcConstants.HBASE20XSQL.equals(dbType)) { + return Hbase20xsqlMeta.getInstance(); + } else { + throw new UnsupportedOperationException("暂不支持的类型:".concat(dbType)); + } + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/HanaDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/HanaDatabaseMeta.java new file mode 100644 index 0000000..407d2a8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/HanaDatabaseMeta.java @@ -0,0 +1,60 @@ +package com.czsj.bigdata.tool.meta; + +/** + * HANA数据库 meta信息查询 + * + * @author zxl + * @ClassName HANADatabaseMeta + * @Version 1.0 + * @since 2019/7/17 15:48 + */ +public class HanaDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + + private volatile static HanaDatabaseMeta single; + + public static HanaDatabaseMeta getInstance() { + if (single == null) { + synchronized (HanaDatabaseMeta.class) { + if (single == null) { + single = new HanaDatabaseMeta(); + } + } + } + return single; + } + + @Override + public String getSQLQueryComment(String schemaName, String tableName, String columnName) { + return String.format("SELECT COMMENTS FROM public.table_columns where SCHEMA_NAME = '%s' and TABLE_NAME = '%s' and COLUMN_NAME = '%s'", schemaName, tableName, columnName); + } + + @Override + public String getSQLQueryPrimaryKey() { + return "select column_name from public.table_columns where SCHEMA_NAME=? and TABLE_NAME=? and INDEX_TYPE = 'FULL'"; + } + + @Override + public String getSQLQueryTablesNameComments() { + return "select schema_name||'.'||table_name,comments from public.tables"; + } + + @Override + public String getSQLQueryTableNameComment() { + return "select schema_name||'.'||table_name,comments from public.tables where schema_name=? and table_name = ?"; + } + + @Override + public String getSQLQueryTables(String... tableSchema) { + return "select distinct schema_name||'.'||table_name from public.tables where schema_name='" + tableSchema[0] + "'"; + } + + @Override + public String getSQLQueryTableSchema(String... args) { + return "select distinct schema_name from public.tables"; + } + + @Override + public String getSQLQueryColumns(String... args) { + return "select column_name from public.table_columns where schema_name=? and table_name = ?"; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/Hbase20xsqlMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/Hbase20xsqlMeta.java new file mode 100644 index 0000000..846eaa6 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/Hbase20xsqlMeta.java @@ -0,0 +1,31 @@ +package com.czsj.bigdata.tool.meta; + +/** + * MySQL数据库 meta信息查询 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MySQLDatabaseMeta + * @Version 1.0 + * @since 2019/7/17 15:48 + */ +public class Hbase20xsqlMeta extends BaseDatabaseMeta implements DatabaseInterface { + + private volatile static Hbase20xsqlMeta single; + + public static Hbase20xsqlMeta getInstance() { + if (single == null) { + synchronized (Hbase20xsqlMeta.class) { + if (single == null) { + single = new Hbase20xsqlMeta(); + } + } + } + return single; + } + + + @Override + public String getSQLQueryTables(String... tableSchema) { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/HiveDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/HiveDatabaseMeta.java new file mode 100644 index 0000000..3e8fb5e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/HiveDatabaseMeta.java @@ -0,0 +1,31 @@ +package com.czsj.bigdata.tool.meta; + +/** + * hive元数据信息 + * + * @author jingwk + * @ClassName HiveDatabaseMeta + * @Version 2.0 + * @since 2022/01/05 15:45 + */ +public class HiveDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + private volatile static HiveDatabaseMeta single; + + public static HiveDatabaseMeta getInstance() { + if (single == null) { + synchronized (HiveDatabaseMeta.class) { + if (single == null) { + single = new HiveDatabaseMeta(); + } + } + } + return single; + } + + @Override + public String getSQLQueryTables() { + return "show tables"; + } + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/MySQLDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/MySQLDatabaseMeta.java new file mode 100644 index 0000000..f83e4c8 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/MySQLDatabaseMeta.java @@ -0,0 +1,45 @@ +package com.czsj.bigdata.tool.meta; + +/** + * MySQL数据库 meta信息查询 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MySQLDatabaseMeta + * @Version 1.0 + * @since 2019/7/17 15:48 + */ +public class MySQLDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + + private volatile static MySQLDatabaseMeta single; + + public static MySQLDatabaseMeta getInstance() { + if (single == null) { + synchronized (MySQLDatabaseMeta.class) { + if (single == null) { + single = new MySQLDatabaseMeta(); + } + } + } + return single; + } + + @Override + public String getSQLQueryComment(String schemaName, String tableName, String columnName) { + return String.format("SELECT COLUMN_COMMENT FROM information_schema.COLUMNS where TABLE_SCHEMA = '%s' and TABLE_NAME = '%s' and COLUMN_NAME = '%s'", schemaName, tableName, columnName); + } + + @Override + public String getSQLQueryPrimaryKey() { + return "select column_name from information_schema.columns where table_schema=? and table_name=? and column_key = 'PRI'"; + } + + @Override + public String getSQLQueryTables() { + return "show tables"; + } + + @Override + public String getSQLQueryColumns(String... args) { + return "select column_name from information_schema.columns where table_schema=? and table_name=?"; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/OracleDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/OracleDatabaseMeta.java new file mode 100644 index 0000000..e6256f4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/OracleDatabaseMeta.java @@ -0,0 +1,71 @@ +package com.czsj.bigdata.tool.meta; +/** + * Oracle数据库 meta信息查询 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MySQLDatabaseMeta + * @Version 1.0 + * @since 2019/7/17 15:48 + */ +public class OracleDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + + private volatile static OracleDatabaseMeta single; + + public static OracleDatabaseMeta getInstance() { + if (single == null) { + synchronized (OracleDatabaseMeta.class) { + if (single == null) { + single = new OracleDatabaseMeta(); + } + } + } + return single; + } + + + @Override + public String getSQLQueryComment(String schemaName, String tableName, String columnName) { + return String.format("select B.comments \n" + + " from user_tab_columns A, user_col_comments B\n" + + " where a.COLUMN_NAME = b.column_name\n" + + " and A.Table_Name = B.Table_Name\n" + + " and A.Table_Name = upper('%s')\n" + + " AND A.column_name = '%s'", tableName, columnName); + } + + @Override + public String getSQLQueryPrimaryKey() { + return "select cu.column_name from user_cons_columns cu, user_constraints au where cu.constraint_name = au.constraint_name and au.owner = ? and au.constraint_type = 'P' and au.table_name = ?"; + } + + @Override + public String getSQLQueryTablesNameComments() { + return "select table_name,comments from user_tab_comments"; + } + + @Override + public String getSQLQueryTableNameComment() { + return "select table_name,comments from user_tab_comments where table_name = ?"; + } + + @Override + public String getSQLQueryTables(String... tableSchema) { + return "select table_name from dba_tables where owner='" + tableSchema[0] + "'"; + } + + @Override + public String getSQLQueryTableSchema(String... args) { + return "select username from sys.dba_users"; + } + + + @Override + public String getSQLQueryTables() { + return "select table_name from user_tab_comments"; + } + + @Override + public String getSQLQueryColumns(String... args) { + return "select table_name,comments from user_tab_comments where table_name = ?"; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/PostgresqlDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/PostgresqlDatabaseMeta.java new file mode 100644 index 0000000..fb2646c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/PostgresqlDatabaseMeta.java @@ -0,0 +1,60 @@ +package com.czsj.bigdata.tool.meta; + +/** + * Postgresql数据库 meta信息查询 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName PostgresqlDatabaseMeta + * @Version 1.0 + * @since 2019/8/2 11:02 + */ +public class PostgresqlDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + + private volatile static PostgresqlDatabaseMeta single; + + public static PostgresqlDatabaseMeta getInstance() { + if (single == null) { + synchronized (PostgresqlDatabaseMeta.class) { + if (single == null) { + single = new PostgresqlDatabaseMeta(); + } + } + } + return single; + } + + @Override + public String getSQLQueryPrimaryKey() { + return "select column_name from information_schema.columns where table_schema='public' and table_name='tb_cis_patient_info' and is_identity = 'YES'"; + } + + @Override + public String getSQLQueryTables() { + return "select relname as tabname from pg_class c \n" + + "where relkind = 'r' and relname not like 'pg_%' and relname not like 'sql_%' group by relname order by relname limit 500"; + } + + + @Override + public String getSQLQueryTables(String... tableSchema) { + return "SELECT concat_ws('.',\"table_schema\",\"table_name\") FROM information_schema.tables \n" + + "where (\"table_name\" not like 'pg_%' AND \"table_name\" not like 'sql_%') \n" + + "and table_type='BASE TABLE' and table_schema='" + tableSchema[0] + "'"; + } + + @Override + public String getSQLQueryTableSchema(String... args) { + return "select table_schema FROM information_schema.tables where \"table_name\" not like 'pg_%' or \"table_name\" not like 'sql_%' group by table_schema;"; + } + + @Override + public String getSQLQueryColumns(String... args) { + return "SELECT a.attname as name \n" + + "FROM pg_class as c,pg_attribute as a where c.relname = ? and a.attrelid = c.oid and a.attnum>0"; + } + + @Override + public String getSQLQueryComment(String schemaName, String tableName, String columnName) { + return null; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/SqlServerDatabaseMeta.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/SqlServerDatabaseMeta.java new file mode 100644 index 0000000..26ab60f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/meta/SqlServerDatabaseMeta.java @@ -0,0 +1,43 @@ +package com.czsj.bigdata.tool.meta; + +/** + * SqlServer数据库 meta信息查询 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName SqlServerDatabaseMeta + * @Version 1.0 + * @since 2019/8/2 15:45 + */ +public class SqlServerDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface { + private volatile static SqlServerDatabaseMeta single; + + public static SqlServerDatabaseMeta getInstance() { + if (single == null) { + synchronized (SqlServerDatabaseMeta.class) { + if (single == null) { + single = new SqlServerDatabaseMeta(); + } + } + } + return single; + } + + @Override + public String getSQLQueryTables() { + return "SELECT Name FROM SysObjects Where XType='U' ORDER BY Name"; + } + + @Override + public String getSQLQueryTables(String... tableSchema) { + return "select schema_name(schema_id)+'.'+object_name(object_id) from sys.objects \n" + + "where type ='U' \n" + + "and schema_name(schema_id) ='" + tableSchema[0] + "'"; + + } + + @Override + public String getSQLQueryTableSchema(String... args) { + return "select distinct schema_name(schema_id) from sys.objects where type ='U';"; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxHbasePojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxHbasePojo.java new file mode 100644 index 0000000..ae9f916 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxHbasePojo.java @@ -0,0 +1,42 @@ +package com.czsj.bigdata.tool.pojo; + + +import com.czsj.bigdata.dto.Range; +import com.czsj.bigdata.dto.VersionColumn; +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; +import java.util.Map; + +@Data +public class DataxHbasePojo { + + private List> columns; + + /** + * 数据源信息 + */ + private JobDatasource jdbcDatasource; + + + private String readerHbaseConfig; + + private String readerTable; + + private String readerMode; + + private String readerMaxVersion; + + private Range readerRange; + + private String writerHbaseConfig; + + private String writerTable; + + private String writerMode; + + private VersionColumn writerVersionColumn; + + private String writerRowkeyColumn; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxHivePojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxHivePojo.java new file mode 100644 index 0000000..9b61d7f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxHivePojo.java @@ -0,0 +1,51 @@ +package com.czsj.bigdata.tool.pojo; + +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; +import java.util.Map; + +/** + * 用于传参,构建json + * + * @author jingwk + * @ClassName DataxHivePojo + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Data +public class DataxHivePojo { + + /** + * hive列名 + */ + private List> columns; + + /** + * 数据源信息 + */ + private JobDatasource jdbcDatasource; + + private String readerPath; + + private String readerDefaultFS; + + private String readerFileType; + + private String readerFieldDelimiter; + + private String writerDefaultFS; + + private String writerFileType; + + private String writerPath; + + private String writerFileName; + + private String writeMode; + + private String writeFieldDelimiter; + + private Boolean skipHeader; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxMongoDBPojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxMongoDBPojo.java new file mode 100644 index 0000000..9ceba93 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxMongoDBPojo.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.tool.pojo; + +import com.czsj.bigdata.dto.UpsertInfo; +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; +import java.util.Map; + +/** + * 用于传参,构建json + * + * @author jingwk + * @ClassName DataxMongoDBPojo + * @Version 2.0 + * @since 2022/03/14 11:15 + */ +@Data +public class DataxMongoDBPojo { + + /** + * hive列名 + */ + private List> columns; + + /** + * 数据源信息 + */ + private JobDatasource jdbcDatasource; + + private String address; + + private String dbName; + + private String readerTable; + + private String writerTable; + + private UpsertInfo upsertInfo; + +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxRdbmsPojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxRdbmsPojo.java new file mode 100644 index 0000000..86e4a3f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/DataxRdbmsPojo.java @@ -0,0 +1,58 @@ +package com.czsj.bigdata.tool.pojo; + +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; + +/** + * 用于传参,构建json + * + * @author jingwk + * @ClassName DataxRdbmsPojo + * @Version 2.0 + * @since 2022/01/11 15:19 + */ +@Data +public class DataxRdbmsPojo { + + /** + * 表名 + */ + private List tables; + + /** + * 列名 + */ + private List rdbmsColumns; + + /** + * 数据源信息 + */ + private JobDatasource jobDatasource; + + /** + * querySql 属性,如果指定了,则优先于columns参数 + */ + private String querySql; + + /** + * preSql 属性 + */ + private String preSql; + + /** + * postSql 属性 + */ + private String postSql; + + /** + * 切分主键 + */ + private String splitPk; + + /** + * where + */ + private String whereParam; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxHbasePojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxHbasePojo.java new file mode 100644 index 0000000..fe29642 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxHbasePojo.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.tool.pojo; + +import com.czsj.bigdata.dto.Range; +import com.czsj.bigdata.dto.VersionColumn; +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; +import java.util.Map; + +@Data +public class FlinkxHbasePojo { + + private List> columns; + + /** + * 数据源信息 + */ + private JobDatasource jdbcDatasource; + + + private String readerHbaseConfig; + + private String readerTable; + + private String readerMode; + + private String readerMaxVersion; + + private Range readerRange; + + private String writerHbaseConfig; + + private String writerTable; + + private String writerMode; + + private VersionColumn writerVersionColumn; + + private String writerRowkeyColumn; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxHivePojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxHivePojo.java new file mode 100644 index 0000000..b2ff3ad --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxHivePojo.java @@ -0,0 +1,51 @@ +package com.czsj.bigdata.tool.pojo; + +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; +import java.util.Map; + +/** + * 用于传参,构建json + * + * @author jingwk + * @ClassName FlinkxHivePojo + * @Version 2.0 + * @since 2022/01/11 17:15 + */ +@Data +public class FlinkxHivePojo { + + /** + * hive列名 + */ + private List> columns; + + /** + * 数据源信息 + */ + private JobDatasource jdbcDatasource; + + private String readerPath; + + private String readerDefaultFS; + + private String readerFileType; + + private String readerFieldDelimiter; + + private String writerDefaultFS; + + private String writerFileType; + + private String writerPath; + + private String writerFileName; + + private String writeMode; + + private String writeFieldDelimiter; + + private Boolean skipHeader; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxMongoDBPojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxMongoDBPojo.java new file mode 100644 index 0000000..b694a6c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxMongoDBPojo.java @@ -0,0 +1,41 @@ +package com.czsj.bigdata.tool.pojo; + +import com.czsj.bigdata.dto.UpsertInfo; +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; +import java.util.Map; + +/** + * 用于传参,构建json + * + * @author jingwk + * @ClassName FlinkxMongoDBPojo + * @Version 2.0 + * @since 2022/03/14 11:15 + */ +@Data +public class FlinkxMongoDBPojo { + + /** + * hive列名 + */ + private List> columns; + + /** + * 数据源信息 + */ + private JobDatasource jdbcDatasource; + + private String address; + + private String dbName; + + private String readerTable; + + private String writerTable; + + private UpsertInfo upsertInfo; + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxRdbmsPojo.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxRdbmsPojo.java new file mode 100644 index 0000000..92a9c53 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/pojo/FlinkxRdbmsPojo.java @@ -0,0 +1,59 @@ +package com.czsj.bigdata.tool.pojo; + + +import com.czsj.bigdata.entity.JobDatasource; +import lombok.Data; + +import java.util.List; + +/** + * 用于传参,构建json + * + * @author jingwk + * @ClassName FlinkxRdbmsPojo + * @Version 2.0 + * @since 2022/01/11 15:19 + */ +@Data +public class FlinkxRdbmsPojo { + + /** + * 表名 + */ + private List tables; + + /** + * 列名 + */ + private Object rdbmsColumns; + + /** + * 数据源信息 + */ + private JobDatasource jobDatasource; + + /** + * querySql 属性,如果指定了,则优先于columns参数 + */ + private String querySql; + + /** + * preSql 属性 + */ + private String preSql; + + /** + * postSql 属性 + */ + private String postSql; + + /** + * 切分主键 + */ + private String splitPk; + + /** + * where + */ + private String whereParam; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/BaseQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/BaseQueryTool.java new file mode 100644 index 0000000..01cb945 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/BaseQueryTool.java @@ -0,0 +1,515 @@ +package com.czsj.bigdata.tool.query; + +import cn.hutool.core.util.StrUtil; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.czsj.bigdata.core.util.LocalCacheUtil; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.tool.database.ColumnInfo; +import com.czsj.bigdata.tool.database.DasColumn; +import com.czsj.bigdata.tool.database.TableInfo; +import com.czsj.bigdata.tool.meta.DatabaseInterface; +import com.czsj.bigdata.tool.meta.DatabaseMetaFactory; +import com.czsj.bigdata.util.AESUtil; +import com.czsj.bigdata.util.JdbcConstants; +import com.czsj.bigdata.util.JdbcUtils; +import com.czsj.core.util.Constants; +import com.zaxxer.hikari.HikariDataSource; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.sql.DataSource; +import java.sql.*; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +/** + * 抽象查询工具 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName BaseQueryTool + * @Version 1.0 + * @since 2019/7/18 9:22 + */ +public abstract class BaseQueryTool implements QueryToolInterface { + + protected static final Logger logger = LoggerFactory.getLogger(BaseQueryTool.class); + /** + * 用于获取查询语句 + */ + private DatabaseInterface sqlBuilder; + + private DataSource datasource; + + private Connection connection; + /** + * 当前数据库名 + */ + private String currentSchema; + private String currentDatabase; + + /** + * 构造方法 + * + * @param jobDatasource + */ + BaseQueryTool(JobDatasource jobDatasource) throws SQLException { + if (LocalCacheUtil.get(jobDatasource.getDatasourceName()) == null) { + getDataSource(jobDatasource); + } else { + this.connection = (Connection) LocalCacheUtil.get(jobDatasource.getDatasourceName()); + if (!this.connection.isValid(500)) { + LocalCacheUtil.remove(jobDatasource.getDatasourceName()); + getDataSource(jobDatasource); + } + } + sqlBuilder = DatabaseMetaFactory.getByDbType(jobDatasource.getDatasource()); + currentSchema = getSchema(jobDatasource.getJdbcUsername()); + currentDatabase = jobDatasource.getDatasource(); + LocalCacheUtil.set(jobDatasource.getDatasourceName(), this.connection, 4 * 60 * 60 * 1000); + } + + private void getDataSource(JobDatasource jobDatasource) throws SQLException { + String userName = AESUtil.decrypt(jobDatasource.getJdbcUsername()); + + //这里默认使用 hikari 数据源 + HikariDataSource dataSource = new HikariDataSource(); + dataSource.setUsername(userName); + dataSource.setPassword(AESUtil.decrypt(jobDatasource.getJdbcPassword())); + dataSource.setJdbcUrl(jobDatasource.getJdbcUrl()); + dataSource.setDriverClassName(jobDatasource.getJdbcDriverClass()); + dataSource.setMaximumPoolSize(1); + dataSource.setMinimumIdle(0); + dataSource.setConnectionTimeout(30000); + this.datasource = dataSource; + this.connection = this.datasource.getConnection(); + } + + //根据connection获取schema + private String getSchema(String jdbcUsername) { + String res = null; + try { + res = connection.getCatalog(); + } catch (SQLException e) { + try { + res = connection.getSchema(); + } catch (SQLException e1) { + logger.error("[SQLException getSchema Exception] --> " + + "the exception message is:" + e1.getMessage()); + } + logger.error("[getSchema Exception] --> " + + "the exception message is:" + e.getMessage()); + } + // 如果res是null,则将用户名当作 schema + if (StrUtil.isBlank(res) && StringUtils.isNotBlank(jdbcUsername)) { + res = jdbcUsername.toUpperCase(); + } + return res; + } + + @Override + public TableInfo buildTableInfo(String tableName) { + //获取表信息 + List> tableInfos = this.getTableInfo(tableName); + if (tableInfos.isEmpty()) { + throw new NullPointerException("查询出错! "); + } + + TableInfo tableInfo = new TableInfo(); + //表名,注释 + List tValues = new ArrayList(tableInfos.get(0).values()); + + tableInfo.setName(StrUtil.toString(tValues.get(0))); + tableInfo.setComment(StrUtil.toString(tValues.get(1))); + + + //获取所有字段 + List fullColumn = getColumns(tableName); + tableInfo.setColumns(fullColumn); + + //获取主键列 + List primaryKeys = getPrimaryKeys(tableName); + logger.info("主键列为:{}", primaryKeys); + + //设置ifPrimaryKey标志 + fullColumn.forEach(e -> { + if (primaryKeys.contains(e.getName())) { + e.setIfPrimaryKey(true); + } else { + e.setIfPrimaryKey(false); + } + }); + return tableInfo; + } + + //无论怎么查,返回结果都应该只有表名和表注释,遍历map拿value值即可 + @Override + public List> getTableInfo(String tableName) { + String sqlQueryTableNameComment = sqlBuilder.getSQLQueryTableNameComment(); + logger.info(sqlQueryTableNameComment); + List> res = null; + try { + res = JdbcUtils.executeQuery(connection, sqlQueryTableNameComment, ImmutableList.of(currentSchema, tableName)); + } catch (SQLException e) { + logger.error("[getTableInfo Exception] --> " + + "the exception message is:" + e.getMessage()); + } + return res; + } + + @Override + public List> getTables() { + String sqlQueryTables = sqlBuilder.getSQLQueryTables(); + logger.info(sqlQueryTables); + List> res = null; + try { + res = JdbcUtils.executeQuery(connection, sqlQueryTables, ImmutableList.of(currentSchema)); + } catch (SQLException e) { + logger.error("[getTables Exception] --> " + + "the exception message is:" + e.getMessage()); + } + return res; + } + + @Override + public List getColumns(String tableName) { + + List fullColumn = Lists.newArrayList(); + //获取指定表的所有字段 + try { + //获取查询指定表所有字段的sql语句 + String querySql = sqlBuilder.getSQLQueryFields(tableName); + logger.info("querySql: {}", querySql); + + //获取所有字段 + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(querySql); + ResultSetMetaData metaData = resultSet.getMetaData(); + + List dasColumns = buildDasColumn(tableName, metaData); + statement.close(); + + //构建 fullColumn + fullColumn = buildFullColumn(dasColumns); + + } catch (SQLException e) { + logger.error("[getColumns Exception] --> " + + "the exception message is:" + e.getMessage()); + } + return fullColumn; + } + + private List buildFullColumn(List dasColumns) { + List res = Lists.newArrayList(); + dasColumns.forEach(e -> { + ColumnInfo columnInfo = new ColumnInfo(); + columnInfo.setName(e.getColumnName()); + columnInfo.setComment(e.getColumnComment()); + columnInfo.setType(e.getColumnTypeName()); + columnInfo.setIfPrimaryKey(e.isIsprimaryKey()); + columnInfo.setIsnull(e.getIsNull()); + res.add(columnInfo); + }); + return res; + } + + //构建DasColumn对象 + private List buildDasColumn(String tableName, ResultSetMetaData metaData) { + List res = Lists.newArrayList(); + try { + int columnCount = metaData.getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + DasColumn dasColumn = new DasColumn(); + dasColumn.setColumnClassName(metaData.getColumnClassName(i)); + dasColumn.setColumnTypeName(metaData.getColumnTypeName(i)); + dasColumn.setColumnName(metaData.getColumnName(i)); + dasColumn.setIsNull(metaData.isNullable(i)); + + res.add(dasColumn); + } + + Statement statement = connection.createStatement(); + + if (currentDatabase.equals(JdbcConstants.MYSQL) || currentDatabase.equals(JdbcConstants.ORACLE)) { + DatabaseMetaData databaseMetaData = connection.getMetaData(); + + ResultSet resultSet = databaseMetaData.getPrimaryKeys(null, null, tableName); + + while (resultSet.next()) { + String name = resultSet.getString("COLUMN_NAME"); + res.forEach(e -> { + if (e.getColumnName().equals(name)) { + e.setIsprimaryKey(true); + + } else { + e.setIsprimaryKey(false); + } + }); + } + + res.forEach(e -> { + String sqlQueryComment = sqlBuilder.getSQLQueryComment(currentSchema, tableName, e.getColumnName()); + //查询字段注释 + try { + ResultSet resultSetComment = statement.executeQuery(sqlQueryComment); + while (resultSetComment.next()) { + e.setColumnComment(resultSetComment.getString(1)); + } + JdbcUtils.close(resultSetComment); + } catch (SQLException e1) { + logger.error("[buildDasColumn executeQuery Exception] --> " + + "the exception message is:" + e1.getMessage()); + } + }); + } + + JdbcUtils.close(statement); + } catch (SQLException e) { + logger.error("[buildDasColumn Exception] --> " + + "the exception message is:" + e.getMessage()); + } + return res; + } + + //获取指定表的主键,可能是多个,所以用list + private List getPrimaryKeys(String tableName) { + List res = Lists.newArrayList(); + String sqlQueryPrimaryKey = sqlBuilder.getSQLQueryPrimaryKey(); + try { + List> pkColumns = JdbcUtils.executeQuery(connection, sqlQueryPrimaryKey, ImmutableList.of(currentSchema, tableName)); + //返回主键名称即可 + pkColumns.forEach(e -> res.add((String) new ArrayList<>(e.values()).get(0))); + } catch (SQLException e) { + logger.error("[getPrimaryKeys Exception] --> " + + "the exception message is:" + e.getMessage()); + } + return res; + } + + @Override + public List getColumnNames(String tableName, String datasource) { + + List res = Lists.newArrayList(); + Statement stmt = null; + ResultSet rs = null; + try { + //获取查询指定表所有字段的sql语句 + String querySql = sqlBuilder.getSQLQueryFields(tableName); + logger.info("querySql: {}", querySql); + + //获取所有字段 + stmt = connection.createStatement(); + rs = stmt.executeQuery(querySql); + ResultSetMetaData metaData = rs.getMetaData(); + + int columnCount = metaData.getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + String columnName = metaData.getColumnName(i); + if (JdbcConstants.HIVE.equals(datasource)) { + if (columnName.contains(Constants.SPLIT_POINT)) { + res.add(i - 1 + Constants.SPLIT_SCOLON + columnName.substring(columnName.indexOf(Constants.SPLIT_POINT) + 1) + Constants.SPLIT_SCOLON + metaData.getColumnTypeName(i)); + } else { + res.add(i - 1 + Constants.SPLIT_SCOLON + columnName + Constants.SPLIT_SCOLON + metaData.getColumnTypeName(i)); + } + } else { + res.add(columnName); + } + + } + } catch (SQLException e) { + logger.error("[getColumnNames Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + return res; + } + + @Override + public List getTableNames(String tableSchema) { + List tables = new ArrayList(); + Statement stmt = null; + ResultSet rs = null; + try { + stmt = connection.createStatement(); + //获取sql + String sql = getSQLQueryTables(tableSchema); + rs = stmt.executeQuery(sql); + while (rs.next()) { + String tableName = rs.getString(1); + tables.add(tableName); + } + tables.sort(Comparator.naturalOrder()); + } catch (SQLException e) { + logger.error("[getTableNames Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + return tables; + } + + @Override + public List getTableNames() { + List tables = new ArrayList(); + Statement stmt = null; + ResultSet rs = null; + try { + stmt = connection.createStatement(); + //获取sql + String sql = getSQLQueryTables(); + rs = stmt.executeQuery(sql); + while (rs.next()) { + String tableName = rs.getString(1); + tables.add(tableName); + } + } catch (SQLException e) { + logger.error("[getTableNames Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + return tables; + } + + public Boolean dataSourceTest() { + try { + DatabaseMetaData metaData = connection.getMetaData(); + if (metaData.getDatabaseProductName().length() > 0) { + return true; + } + } catch (SQLException e) { + logger.error("[dataSourceTest Exception] --> " + + "the exception message is:" + e.getMessage()); + } + return false; + } + + + protected String getSQLQueryTables(String tableSchema) { + return sqlBuilder.getSQLQueryTables(tableSchema); + } + + /** + * 不需要其他参数的可不重写 + * + * @return + */ + protected String getSQLQueryTables() { + return sqlBuilder.getSQLQueryTables(); + } + + @Override + public List getColumnsByQuerySql(String querySql) throws SQLException { + + List res = Lists.newArrayList(); + Statement stmt = null; + ResultSet rs = null; + try { + querySql = querySql.replace(";", ""); + //拼装sql语句,在后面加上 where 1=0 即可 + String sql = querySql.concat(" where 1=0"); + //判断是否已有where,如果是,则加 and 1=0 + //从最后一个 ) 开始找 where,或者整个语句找 + if (querySql.contains(")")) { + if (querySql.substring(querySql.indexOf(")")).contains("where")) { + sql = querySql.concat(" and 1=0"); + } + } else { + if (querySql.contains("where")) { + sql = querySql.concat(" and 1=0"); + } + } + //获取所有字段 + stmt = connection.createStatement(); + rs = stmt.executeQuery(sql); + ResultSetMetaData metaData = rs.getMetaData(); + + int columnCount = metaData.getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + res.add(metaData.getColumnName(i)); + } + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + return res; + } + + @Override + public long getMaxIdVal(String tableName, String primaryKey) { + Statement stmt = null; + ResultSet rs = null; + long maxVal = 0; + try { + stmt = connection.createStatement(); + //获取sql + String sql = getSQLMaxID(tableName, primaryKey); + rs = stmt.executeQuery(sql); + rs.next(); + maxVal = rs.getLong(1); + } catch (SQLException e) { + logger.error("[getMaxIdVal Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + + + return maxVal; + } + + private String getSQLMaxID(String tableName, String primaryKey) { + return sqlBuilder.getMaxId(tableName, primaryKey); + } + + public void executeCreateTableSql(String querySql) { + if (StringUtils.isBlank(querySql)) { + return; + } + Statement stmt = null; + try { + stmt = connection.createStatement(); + stmt.executeUpdate(querySql); + } catch (SQLException e) { + logger.error("[executeCreateTableSql Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(stmt); + } + } + + public List getTableSchema() { + List schemas = new ArrayList<>(); + Statement stmt = null; + ResultSet rs = null; + try { + stmt = connection.createStatement(); + //获取sql + String sql = getSQLQueryTableSchema(); + rs = stmt.executeQuery(sql); + while (rs.next()) { + String tableName = rs.getString(1); + schemas.add(tableName); + } + } catch (SQLException e) { + logger.error("[getTableNames Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + return schemas; + } + + protected String getSQLQueryTableSchema() { + return sqlBuilder.getSQLQueryTableSchema(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/ClickHouseQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/ClickHouseQueryTool.java new file mode 100644 index 0000000..36bb159 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/ClickHouseQueryTool.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.SQLException; + +/** + * ClickHouse + */ + +public class ClickHouseQueryTool extends BaseQueryTool implements QueryToolInterface { + /** + * 构造方法 + * + * @param jobJdbcDatasource + */ + public ClickHouseQueryTool(JobDatasource jobJdbcDatasource) throws SQLException { + super(jobJdbcDatasource); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HBaseQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HBaseQueryTool.java new file mode 100644 index 0000000..feed772 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HBaseQueryTool.java @@ -0,0 +1,119 @@ +package com.czsj.bigdata.tool.query; + + + +import com.czsj.bigdata.core.util.LocalCacheUtil; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.core.util.Constants; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.client.*; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + + +public class HBaseQueryTool { + + private Configuration conf = HBaseConfiguration.create(); + private ExecutorService pool = Executors.newScheduledThreadPool(2); + private Connection connection = null; + private Admin admin; + private Table table; + + public HBaseQueryTool(JobDatasource jobDatasource) throws IOException { + if (LocalCacheUtil.get(jobDatasource.getDatasourceName()) == null) { + getDataSource(jobDatasource); + } else { + connection = (Connection) LocalCacheUtil.get(jobDatasource.getDatasourceName()); + if (connection == null || connection.isClosed()) { + LocalCacheUtil.remove(jobDatasource.getDatasourceName()); + getDataSource(jobDatasource); + } + } + LocalCacheUtil.set(jobDatasource.getDatasourceName(), connection, 4 * 60 * 60 * 1000); + } + + private void getDataSource(JobDatasource jobDatasource) throws IOException { + String[] zkAdress = jobDatasource.getZkAdress().split(Constants.SPLIT_SCOLON); + conf.set("hbase.zookeeper.quorum", zkAdress[0]); + conf.set("hbase.zookeeper.property.clientPort", zkAdress[1]); + connection = ConnectionFactory.createConnection(conf, pool); + admin = connection.getAdmin(); + } + + // 关闭连接 + public void sourceClose() { + try { + if (admin != null) { + admin.close(); + } + if (null != connection) { + connection.close(); + } + if (table != null) { + table.close(); + } + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * 测试是否连接成功 + * + * @return + * @throws IOException + */ + public boolean dataSourceTest() throws IOException { + Admin admin = connection.getAdmin(); + HTableDescriptor[] tableDescriptor = admin.listTables(); + return tableDescriptor.length > 0; + } + + /** + * 获取HBase表名称 + * + * @return + * @throws IOException + */ + public List getTableNames() throws IOException { + List list = new ArrayList<>(); + Admin admin = connection.getAdmin(); + TableName[] names = admin.listTableNames(); + for (TableName name : names) { + list.add(name.getNameAsString()); + } + return list; + } + + /** + * 通过表名查询所有l列祖和列 + * + * @param tableName + * @return + * @throws IOException + */ + public List getColumns(String tableName) throws IOException { + List list = new ArrayList<>(); + table = connection.getTable(TableName.valueOf(tableName)); + Scan scan = new Scan(); + //Filter filter = new PageFilter(1); + //scan.setFilter(filter); + scan.getStartRow(); + ResultScanner scanner = table.getScanner(scan); + Iterator it = scanner.iterator(); + if (it.hasNext()) { + Result re = it.next(); + List listCells = re.listCells(); + for (Cell cell : listCells) { + list.add(new String(CellUtil.cloneFamily(cell)) + ":" + new String(CellUtil.cloneQualifier(cell))); + } + } + return list; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HanaQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HanaQueryTool.java new file mode 100644 index 0000000..c832d18 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HanaQueryTool.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.SQLException; + +/** + * Hana数据库使用的查询工具 + * + * @author zxl + * @ClassName HanaQueryTool + * @Version 1.0 + * @since 2022/10/14 14:36 + */ +public class HanaQueryTool extends BaseQueryTool implements QueryToolInterface { + + public HanaQueryTool(JobDatasource jobDatasource) throws SQLException { + super(jobDatasource); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/Hbase20XsqlQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/Hbase20XsqlQueryTool.java new file mode 100644 index 0000000..15e2705 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/Hbase20XsqlQueryTool.java @@ -0,0 +1,114 @@ +package com.czsj.bigdata.tool.query; + +import com.google.common.collect.Lists; +import com.czsj.bigdata.core.util.LocalCacheUtil; +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.util.JdbcUtils; + + +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + +/** + * for HBase2.X and Phoenix5.X + */ + +public class Hbase20XsqlQueryTool extends BaseQueryTool implements QueryToolInterface { + Connection conn = null; + + + /** + * 构造方法 + * + * @param jobJdbcDatasource + */ + public Hbase20XsqlQueryTool(JobDatasource jobJdbcDatasource) throws SQLException { + super(jobJdbcDatasource); + + if (LocalCacheUtil.get(jobJdbcDatasource.getDatasourceName()) == null) { + getDataSource(jobJdbcDatasource); + } else { + conn = (Connection) LocalCacheUtil.get(jobJdbcDatasource.getDatasourceName()); + if (conn == null) { + LocalCacheUtil.remove(jobJdbcDatasource.getDatasourceName()); + } + } + LocalCacheUtil.set(jobJdbcDatasource.getDatasourceName(), conn, 4 * 60 * 60 * 1000); + + } + + @Override + public List getTableNames(String tableSchema) { + DatabaseMetaData metaData = null; + List tables = new ArrayList(); + ResultSet rs = null; + try { + metaData = conn.getMetaData(); + rs = metaData.getTables(conn.getCatalog(), null, "%", new String[]{"TABLE"}); + while (rs.next()) { + tables.add(rs.getString("TABLE_NAME")); + } + + } catch (SQLException e) { + logger.error("[getTableNames Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + } + + + return tables; + } + + @Override + public List getColumnNames(String tableName, String datasource) { + DatabaseMetaData metaData = null; + List columnNames = Lists.newArrayList(); + ResultSet rs = null; + try { + metaData = conn.getMetaData(); + rs = metaData.getColumns(conn.getCatalog(), null, tableName, "%"); + while (rs.next()) { + columnNames.add(rs.getString("COLUMN_NAME")); + // 获取字段的数据类型 rs.getString("TYPE_NAME") + } + + } catch (SQLException e) { + logger.error("[getColumnNames Exception] --> " + + "the exception message is:" + e.getMessage()); + } finally { + JdbcUtils.close(rs); + } + + + return columnNames; + } + + + private static int getSize(ResultSet rs) { + try { + if (rs.getType() == ResultSet.TYPE_FORWARD_ONLY) { + return -1; + } + + rs.last(); + int total = rs.getRow(); + rs.beforeFirst(); + return total; + } catch (SQLException sqle) { + return -1; + } catch (AbstractMethodError ame) { + return -1; + } + } + + + private void getDataSource(JobDatasource jobDatasource) throws SQLException { + conn = DriverManager.getConnection(jobDatasource.getJdbcUrl()); + + + } + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HiveQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HiveQueryTool.java new file mode 100644 index 0000000..c2c6e49 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/HiveQueryTool.java @@ -0,0 +1,17 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; +import java.sql.SQLException; + +/** + * hive + * + * @author wenkaijing + * @version 2.0 + * @since 2022/01/05 + */ +public class HiveQueryTool extends BaseQueryTool implements QueryToolInterface { + public HiveQueryTool(JobDatasource jobDatasource) throws SQLException { + super(jobDatasource); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/MongoDBQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/MongoDBQueryTool.java new file mode 100644 index 0000000..667ca53 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/MongoDBQueryTool.java @@ -0,0 +1,155 @@ +package com.czsj.bigdata.tool.query; + + +import com.mongodb.*; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoIterable; +import com.czsj.bigdata.core.util.LocalCacheUtil; +import com.czsj.bigdata.entity.JobDatasource; +import org.apache.commons.lang3.StringUtils; +import org.bson.Document; + +import java.io.IOException; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + + +public class MongoDBQueryTool { + + + private static MongoClient connection = null; + private static MongoDatabase collections; + + + public MongoDBQueryTool(JobDatasource jobDatasource) throws IOException { + if (LocalCacheUtil.get(jobDatasource.getDatasourceName()) == null) { + getDataSource(jobDatasource); + } else { + connection = (MongoClient) LocalCacheUtil.get(jobDatasource.getDatasourceName()); + if (connection == null) { + LocalCacheUtil.remove(jobDatasource.getDatasourceName()); + getDataSource(jobDatasource); + } + } + LocalCacheUtil.set(jobDatasource.getDatasourceName(), connection, 4 * 60 * 60 * 1000); + } + + private void getDataSource(JobDatasource jobDatasource) throws IOException { + if (StringUtils.isBlank(jobDatasource.getJdbcUsername()) && StringUtils.isBlank(jobDatasource.getJdbcPassword())) { + connection = new MongoClient(new MongoClientURI(jobDatasource.getJdbcUrl())); + } else { + MongoCredential credential = MongoCredential.createCredential(jobDatasource.getJdbcUsername(), jobDatasource.getDatabaseName(), jobDatasource.getJdbcPassword().toCharArray()); + connection = new MongoClient(parseServerAddress(jobDatasource.getJdbcUrl()), Arrays.asList(credential)); + } + collections = connection.getDatabase(jobDatasource.getDatabaseName()); + } + + + // 关闭连接 + public static void sourceClose() { + if (connection != null) { + connection.close(); + } + } + + /** + * 获取DB名称列表 + * + * @return + */ + public List getDBNames() { + MongoIterable dbs = connection.listDatabaseNames(); + List dbNames = new ArrayList<>(); + dbs.forEach((Block) dbNames::add); + return dbNames; + } + + /** + * 测试是否连接成功 + * + * @return + */ + public boolean dataSourceTest(String dbName) { + collections = connection.getDatabase(dbName); + return collections.listCollectionNames().iterator().hasNext(); + } + + /** + * 获取Collection名称列表 + * + * @return + */ + public List getCollectionNames(String dbName) { + collections = connection.getDatabase(dbName); + List collectionNames = new ArrayList<>(); + collections.listCollectionNames().forEach((Block) collectionNames::add); + return collectionNames; + } + + /** + * 通过CollectionName查询列 + * + * @param collectionName + * @return + */ + public List getColumns(String collectionName) { + MongoCollection collection = collections.getCollection(collectionName); + Document document = collection.find(new BasicDBObject()).first(); + List list = new ArrayList<>(); + if (null == document || document.size() <= 0) { + return list; + } + document.forEach((k, v) -> { + if (null != v) { + String type = v.getClass().getSimpleName(); + list.add(k + ":" + type); + } + /*if ("Document".equals(type)) { + ((Document) v).forEach((k1, v1) -> { + String simpleName = v1.getClass().getSimpleName(); + }); + } */ + + }); + return list; + } + + /** + * 判断地址类型是否符合要求 + * + * @param addressList + * @return + */ + private static boolean isHostPortPattern(List addressList) { + for (Object address : addressList) { + String regex = "(\\S+):([0-9]+)"; + if (!((String) address).matches(regex)) { + return false; + } + } + return true; + } + + /** + * 转换为mongo地址协议 + * + * @param rawAddress + * @return + */ + private static List parseServerAddress(String rawAddress) throws UnknownHostException { + List addressList = new ArrayList<>(); + for (String address : Arrays.asList(rawAddress.split(","))) { + String[] tempAddress = address.split(":"); + try { + ServerAddress sa = new ServerAddress(tempAddress[0], Integer.parseInt(tempAddress[1])); + addressList.add(sa); + } catch (Exception e) { + throw new UnknownHostException(); + } + } + return addressList; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/MySQLQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/MySQLQueryTool.java new file mode 100644 index 0000000..64d429f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/MySQLQueryTool.java @@ -0,0 +1,21 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.SQLException; + +/** + * mysql数据库使用的查询工具 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MySQLQueryTool + * @Version 1.0 + * @since 2019/7/18 9:31 + */ +public class MySQLQueryTool extends BaseQueryTool implements QueryToolInterface { + + public MySQLQueryTool(JobDatasource jobDatasource) throws SQLException { + super(jobDatasource); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/OracleQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/OracleQueryTool.java new file mode 100644 index 0000000..868b815 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/OracleQueryTool.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.SQLException; + +/** + * Oracle数据库使用的查询工具 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName MySQLQueryTool + * @Version 1.0 + * @since 2019/7/18 9:31 + */ +public class OracleQueryTool extends BaseQueryTool implements QueryToolInterface { + + public OracleQueryTool(JobDatasource jobDatasource) throws SQLException { + super(jobDatasource); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/PostgresqlQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/PostgresqlQueryTool.java new file mode 100644 index 0000000..3f91583 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/PostgresqlQueryTool.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.SQLException; + +/** + * TODO + * + * @author zhouhongfa@gz-yibo.com + * @ClassName PostgresqlQueryTool + * @Version 1.0 + * @since 2019/8/2 11:28 + */ +public class PostgresqlQueryTool extends BaseQueryTool implements QueryToolInterface { + public PostgresqlQueryTool(JobDatasource jobDatasource) throws SQLException { + super(jobDatasource); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/QueryToolFactory.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/QueryToolFactory.java new file mode 100644 index 0000000..390a97f --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/QueryToolFactory.java @@ -0,0 +1,100 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; +import com.czsj.bigdata.util.JdbcConstants; +import com.czsj.bigdata.util.RdbmsException; + +import java.sql.SQLException; + +/** + * 工具类,获取单例实体 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName QueryToolFactory + * @Version 1.0 + * @since 2019/7/18 9:36 + */ +public class QueryToolFactory { + + public static BaseQueryTool getByDbType(JobDatasource jobDatasource) { + //获取dbType + String datasource = jobDatasource.getDatasource(); + if (JdbcConstants.MYSQL.equals(datasource)) { + return getMySQLQueryToolInstance(jobDatasource); + } else if (JdbcConstants.ORACLE.equals(datasource)) { + return getOracleQueryToolInstance(jobDatasource); + } else if (JdbcConstants.POSTGRESQL.equals(datasource)) { + return getPostgresqlQueryToolInstance(jobDatasource); + } else if (JdbcConstants.SQL_SERVER.equals(datasource)) { + return getSqlserverQueryToolInstance(jobDatasource); + }else if (JdbcConstants.HIVE.equals(datasource)) { + return getHiveQueryToolInstance(jobDatasource); + } else if (JdbcConstants.CLICKHOUSE.equals(datasource)) { + return getClickHouseQueryToolInstance(jobDatasource); + }else if (JdbcConstants.HBASE20XSQL.equals(datasource)) { + return getHbase20XsqlQueryToolQueryToolInstance(jobDatasource); + } + throw new UnsupportedOperationException("找不到该类型: ".concat(datasource)); + } + + private static BaseQueryTool getMySQLQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new MySQLQueryTool(jdbcDatasource); + } catch (Exception e) { + throw RdbmsException.asConnException(JdbcConstants.MYSQL, + e,jdbcDatasource.getJdbcUsername(),jdbcDatasource.getDatasourceName()); + } + } + + private static BaseQueryTool getOracleQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new OracleQueryTool(jdbcDatasource); + } catch (SQLException e) { + throw RdbmsException.asConnException(JdbcConstants.ORACLE, + e,jdbcDatasource.getJdbcUsername(),jdbcDatasource.getDatasourceName()); + } + } + + private static BaseQueryTool getPostgresqlQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new PostgresqlQueryTool(jdbcDatasource); + } catch (SQLException e) { + throw RdbmsException.asConnException(JdbcConstants.POSTGRESQL, + e,jdbcDatasource.getJdbcUsername(),jdbcDatasource.getDatasourceName()); + } + } + + private static BaseQueryTool getSqlserverQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new SqlServerQueryTool(jdbcDatasource); + } catch (SQLException e) { + throw RdbmsException.asConnException(JdbcConstants.SQL_SERVER, + e,jdbcDatasource.getJdbcUsername(),jdbcDatasource.getDatasourceName()); + } + } + + private static BaseQueryTool getHiveQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new HiveQueryTool(jdbcDatasource); + } catch (SQLException e) { + throw RdbmsException.asConnException(JdbcConstants.HIVE, + e,jdbcDatasource.getJdbcUsername(),jdbcDatasource.getDatasourceName()); + } + } + private static BaseQueryTool getClickHouseQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new ClickHouseQueryTool(jdbcDatasource); + } catch (SQLException e) { + throw RdbmsException.asConnException(JdbcConstants.CLICKHOUSE,e, jdbcDatasource.getJdbcUsername(), jdbcDatasource.getDatasourceName()); + } + } + + private static Hbase20XsqlQueryTool getHbase20XsqlQueryToolQueryToolInstance(JobDatasource jdbcDatasource) { + try { + return new Hbase20XsqlQueryTool(jdbcDatasource); + } catch (SQLException e) { + throw RdbmsException.asConnException(JdbcConstants.HBASE20XSQL, + e, jdbcDatasource.getJdbcUsername(), jdbcDatasource.getDatasourceName()); + } + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/QueryToolInterface.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/QueryToolInterface.java new file mode 100644 index 0000000..4edd256 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/QueryToolInterface.java @@ -0,0 +1,89 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.tool.database.ColumnInfo; +import com.czsj.bigdata.tool.database.TableInfo; +import com.czsj.bigdata.tool.database.ColumnInfo; +import com.czsj.bigdata.tool.database.TableInfo; + +import java.sql.SQLException; +import java.util.List; +import java.util.Map; + +/** + * 基础查询接口 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/7/18 + */ +public interface QueryToolInterface { + /** + * 构建 tableInfo对象 + * + * @param tableName 表名 + * @return + */ + TableInfo buildTableInfo(String tableName); + + /** + * 获取指定表信息 + * + * @return + */ + List> getTableInfo(String tableName); + + /** + * 获取当前schema下的所有表 + * + * @return + */ + List> getTables(); + + /** + * 根据表名获取所有字段 + * + * @param tableName + * @return2 + */ + List getColumns(String tableName); + + + /** + * 根据表名和获取所有字段名称(不包括表名) + * + * @param tableName + * @return2 + */ + List getColumnNames(String tableName,String datasource); + + + /** + * 获取所有可用表名 + * + * @return2 + */ + List getTableNames(String schema); + + /** + * 获取所有可用表名 + * + * @return2 + */ + List getTableNames(); + + /** + * 通过查询sql获取columns + * @param querySql + * @return + */ + List getColumnsByQuerySql(String querySql) throws SQLException; + + /** + * 获取当前表maxId + * @param tableName + * @param primaryKey + * @return + */ + long getMaxIdVal(String tableName,String primaryKey); + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/tool/query/SqlServerQueryTool.java b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/SqlServerQueryTool.java new file mode 100644 index 0000000..ad69255 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/tool/query/SqlServerQueryTool.java @@ -0,0 +1,18 @@ +package com.czsj.bigdata.tool.query; + +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.SQLException; + +/** + * sql server + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/8/2 + */ +public class SqlServerQueryTool extends BaseQueryTool implements QueryToolInterface { + public SqlServerQueryTool(JobDatasource jobDatasource) throws SQLException { + super(jobDatasource); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/AESUtil.java b/czsj-system/src/main/java/com/czsj/bigdata/util/AESUtil.java new file mode 100644 index 0000000..476cc27 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/AESUtil.java @@ -0,0 +1,110 @@ +package com.czsj.bigdata.util; + +import com.czsj.bigdata.core.conf.JobAdminConfig; +import lombok.extern.slf4j.Slf4j; + +import javax.crypto.Cipher; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; +import java.security.Key; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.util.Base64; + +@Slf4j +public class AESUtil { + + private static String DEFAULT_CIPHER_ALGORITHM = "SHA1PRNG"; + private static String KEY_ALGORITHM = "AES"; + + /** + * 加密 + * + * @param key + * @param messBytes + * @return + */ + private static byte[] encrypt(Key key, byte[] messBytes) throws Exception { + if (key != null) { + + Cipher cipher = Cipher.getInstance(KEY_ALGORITHM); + cipher.init(Cipher.ENCRYPT_MODE, key); + return cipher.doFinal(messBytes); + } + return null; + } + + /** + * AES(256)解密 + * + * @param key + * @param cipherBytes + * @return + */ + private static byte[] decrypt(Key key, byte[] cipherBytes) throws Exception { + if (key != null) { + + Cipher cipher = Cipher.getInstance(KEY_ALGORITHM); + cipher.init(Cipher.DECRYPT_MODE, key); + return cipher.doFinal(cipherBytes); + } + return null; + } + + + /** + * 生成加密秘钥 + * + * @return + * @throws NoSuchAlgorithmException + */ + private static KeyGenerator getKeyGenerator() { + + String key = JobAdminConfig.getAdminConfig().getDataSourceAESKey(); + + KeyGenerator keygen = null; + try { + keygen = KeyGenerator.getInstance(KEY_ALGORITHM); + SecureRandom secureRandom = SecureRandom.getInstance(DEFAULT_CIPHER_ALGORITHM); + secureRandom.setSeed(key.getBytes()); + keygen.init(128, secureRandom); + } catch (NoSuchAlgorithmException e) { + log.warn("Get key generator error {}", e.getMessage()); + } + + return keygen; + } + + public static String encrypt(String message) { + try { + KeyGenerator keygen = getKeyGenerator(); + SecretKey secretKey = new SecretKeySpec(keygen.generateKey().getEncoded(), KEY_ALGORITHM); + return Base64.getEncoder().encodeToString(encrypt(secretKey, message.getBytes(StandardCharsets.UTF_8))); + } catch (Exception e) { + log.warn("content encrypt error {}", e.getMessage()); + } + return null; + } + + public static String decrypt(String ciphertext) { + try { + KeyGenerator keygen = getKeyGenerator(); + SecretKey secretKey = new SecretKeySpec(keygen.generateKey().getEncoded(), KEY_ALGORITHM); + return new String(decrypt(secretKey, Base64.getDecoder().decode(ciphertext)), StandardCharsets.UTF_8); + } catch (Exception e) { + log.warn("content decrypt error {}", e.getMessage()); + } + return null; + } + + public static void main(String[] args) { + String message = "root"; + String ciphertext = encrypt(message); + + System.out.println("加密后密文为: " + ciphertext); + System.out.println("解密后明文为:" + decrypt(ciphertext)); + } + +} \ No newline at end of file diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/CronUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/CronUtils.java new file mode 100644 index 0000000..78a2dbc --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/CronUtils.java @@ -0,0 +1,65 @@ +package com.czsj.bigdata.util; + + +import com.czsj.bigdata.core.cron.CronExpression; + +import java.text.ParseException; +import java.util.Date; + +/** + * cron表达式工具类 + * + * @author czsj + * + */ +public class CronUtils +{ + /** + * 返回一个布尔值代表一个给定的Cron表达式的有效性 + * + * @param cronExpression Cron表达式 + * @return boolean 表达式是否有效 + */ + public static boolean isValid(String cronExpression) + { + return CronExpression.isValidExpression(cronExpression); + } + + /** + * 返回一个字符串值,表示该消息无效Cron表达式给出有效性 + * + * @param cronExpression Cron表达式 + * @return String 无效时返回表达式错误描述,如果有效返回null + */ + public static String getInvalidMessage(String cronExpression) + { + try + { + new CronExpression(cronExpression); + return null; + } + catch (ParseException pe) + { + return pe.getMessage(); + } + } + + /** + * 返回下一个执行时间根据给定的Cron表达式 + * + * @param cronExpression Cron表达式 + * @return Date 下次Cron表达式执行时间 + */ + public static Date getNextExecution(String cronExpression) + { + try + { + CronExpression cron = new CronExpression(cronExpression); + return cron.getNextValidTimeAfter(new Date(System.currentTimeMillis())); + } + catch (ParseException e) + { + throw new IllegalArgumentException(e.getMessage()); + } + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/DBUtilErrorCode.java b/czsj-system/src/main/java/com/czsj/bigdata/util/DBUtilErrorCode.java new file mode 100644 index 0000000..7f1a29c --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/DBUtilErrorCode.java @@ -0,0 +1,96 @@ +package com.czsj.bigdata.util; + +/** + * DBUtilErrorCode + */ +public enum DBUtilErrorCode implements ErrorCode { + //连接错误 + MYSQL_CONN_USERPWD_ERROR("MYSQLErrCode-01","数据库用户名或者密码错误,请检查填写的账号密码或者联系DBA确认账号和密码是否正确"), + MYSQL_CONN_IPPORT_ERROR("MYSQLErrCode-02","数据库服务的IP地址或者Port错误,请检查填写的IP地址和Port或者联系DBA确认IP地址和Port是否正确。如果是同步中心用户请联系DBA确认idb上录入的IP和PORT信息和数据库的当前实际信息是一致的"), + MYSQL_CONN_DB_ERROR("MYSQLErrCode-03","数据库名称错误,请检查数据库实例名称或者联系DBA确认该实例是否存在并且在正常服务"), + + ORACLE_CONN_USERPWD_ERROR("ORACLEErrCode-01","数据库用户名或者密码错误,请检查填写的账号密码或者联系DBA确认账号和密码是否正确"), + ORACLE_CONN_IPPORT_ERROR("ORACLEErrCode-02","数据库服务的IP地址或者Port错误,请检查填写的IP地址和Port或者联系DBA确认IP地址和Port是否正确。如果是同步中心用户请联系DBA确认idb上录入的IP和PORT信息和数据库的当前实际信息是一致的"), + ORACLE_CONN_DB_ERROR("ORACLEErrCode-03","数据库名称错误,请检查数据库实例名称或者联系DBA确认该实例是否存在并且在正常服务"), + + //execute query错误 + MYSQL_QUERY_TABLE_NAME_ERROR("MYSQLErrCode-04","表不存在,请检查表名或者联系DBA确认该表是否存在"), + MYSQL_QUERY_SQL_ERROR("MYSQLErrCode-05","SQL语句执行出错,请检查Where条件是否存在拼写或语法错误"), + MYSQL_QUERY_COLUMN_ERROR("MYSQLErrCode-06","Column信息错误,请检查该列是否存在,如果是常量或者变量,请使用英文单引号’包起来"), + MYSQL_QUERY_SELECT_PRI_ERROR("MYSQLErrCode-07","读表数据出错,因为账号没有读表的权限,请联系DBA确认该账号的权限并授权"), + + ORACLE_QUERY_TABLE_NAME_ERROR("ORACLEErrCode-04","表不存在,请检查表名或者联系DBA确认该表是否存在"), + ORACLE_QUERY_SQL_ERROR("ORACLEErrCode-05","SQL语句执行出错,原因可能是你填写的列不存在或者where条件不符合要求,1,请检查该列是否存在,如果是常量或者变量,请使用英文单引号’包起来; 2,请检查Where条件是否存在拼写或语法错误"), + ORACLE_QUERY_SELECT_PRI_ERROR("ORACLEErrCode-06","读表数据出错,因为账号没有读表的权限,请联系DBA确认该账号的权限并授权"), + ORACLE_QUERY_SQL_PARSER_ERROR("ORACLEErrCode-07","SQL语法出错,请检查Where条件是否存在拼写或语法错误"), + + //PreSql,PostSql错误 + MYSQL_PRE_SQL_ERROR("MYSQLErrCode-08","PreSQL语法错误,请检查"), + MYSQL_POST_SQL_ERROR("MYSQLErrCode-09","PostSql语法错误,请检查"), + MYSQL_QUERY_SQL_PARSER_ERROR("MYSQLErrCode-10","SQL语法出错,请检查Where条件是否存在拼写或语法错误"), + + ORACLE_PRE_SQL_ERROR("ORACLEErrCode-08", "PreSQL语法错误,请检查"), + ORACLE_POST_SQL_ERROR("ORACLEErrCode-09", "PostSql语法错误,请检查"), + + //SplitPK 错误 + MYSQL_SPLIT_PK_ERROR("MYSQLErrCode-11","SplitPK错误,请检查"), + ORACLE_SPLIT_PK_ERROR("ORACLEErrCode-10","SplitPK错误,请检查"), + + //Insert,Delete 权限错误 + MYSQL_INSERT_ERROR("MYSQLErrCode-12","数据库没有写权限,请联系DBA"), + MYSQL_DELETE_ERROR("MYSQLErrCode-13","数据库没有Delete权限,请联系DBA"), + ORACLE_INSERT_ERROR("ORACLEErrCode-11","数据库没有写权限,请联系DBA"), + ORACLE_DELETE_ERROR("ORACLEErrCode-12","数据库没有Delete权限,请联系DBA"), + + JDBC_NULL("DBUtilErrorCode-20","JDBC URL为空,请检查配置"), + JDBC_OB10_ADDRESS_ERROR("DBUtilErrorCode-OB10-01","JDBC OB10格式错误,请联系askdatax"), + CONF_ERROR("DBUtilErrorCode-00", "您的配置错误."), + CONN_DB_ERROR("DBUtilErrorCode-10", "连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境)."), + GET_COLUMN_INFO_FAILED("DBUtilErrorCode-01", "获取表字段相关信息失败."), + UNSUPPORTED_TYPE("DBUtilErrorCode-12", "不支持的数据库类型. 请注意查看 DataX 已经支持的数据库类型以及数据库版本."), + COLUMN_SPLIT_ERROR("DBUtilErrorCode-13", "根据主键进行切分失败."), + SET_SESSION_ERROR("DBUtilErrorCode-14", "设置 session 失败."), + RS_ASYNC_ERROR("DBUtilErrorCode-15", "异步获取ResultSet next失败."), + + REQUIRED_VALUE("DBUtilErrorCode-03", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("DBUtilErrorCode-02", "您填写的参数值不合法."), + ILLEGAL_SPLIT_PK("DBUtilErrorCode-04", "您填写的主键列不合法, DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型."), + SPLIT_FAILED_ILLEGAL_SQL("DBUtilErrorCode-15", "DataX尝试切分表时, 执行数据库 Sql 失败. 请检查您的配置 table/splitPk/where 并作出修改."), + SQL_EXECUTE_FAIL("DBUtilErrorCode-06", "执行数据库 Sql 失败, 请检查您的配置的 column/table/where/querySql或者向 DBA 寻求帮助."), + + // only for reader + READ_RECORD_FAIL("DBUtilErrorCode-07", "读取数据库数据失败. 请检查您的配置的 column/table/where/querySql或者向 DBA 寻求帮助."), + TABLE_QUERYSQL_MIXED("DBUtilErrorCode-08", "您配置凌乱了. 不能同时既配置table又配置querySql"), + TABLE_QUERYSQL_MISSING("DBUtilErrorCode-09", "您配置错误. table和querySql 应该并且只能配置一个."), + + // only for writer + WRITE_DATA_ERROR("DBUtilErrorCode-05", "往您配置的写入表中写入数据时失败."), + NO_INSERT_PRIVILEGE("DBUtilErrorCode-11", "数据库没有写权限,请联系DBA"), + NO_DELETE_PRIVILEGE("DBUtilErrorCode-16", "数据库没有DELETE权限,请联系DBA"), + ; + + private final String code; + + private final String description; + + private DBUtilErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/DataXException.java b/czsj-system/src/main/java/com/czsj/bigdata/util/DataXException.java new file mode 100644 index 0000000..0db384a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/DataXException.java @@ -0,0 +1,61 @@ +package com.czsj.bigdata.util; + + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class DataXException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + private ErrorCode errorCode; + + public DataXException(ErrorCode errorCode, String errorMessage) { + super(errorCode.toString() + " - " + errorMessage); + this.errorCode = errorCode; + } + + private DataXException(ErrorCode errorCode, String errorMessage, Throwable cause) { + super(errorCode.toString() + " - " + getMessage(errorMessage) + " - " + getMessage(cause), cause); + + this.errorCode = errorCode; + } + + public static DataXException asDataXException(ErrorCode errorCode, String message) { + return new DataXException(errorCode, message); + } + + public static DataXException asDataXException(ErrorCode errorCode, String message, Throwable cause) { + if (cause instanceof DataXException) { + return (DataXException) cause; + } + return new DataXException(errorCode, message, cause); + } + + public static DataXException asDataXException(ErrorCode errorCode, Throwable cause) { + if (cause instanceof DataXException) { + return (DataXException) cause; + } + return new DataXException(errorCode, getMessage(cause), cause); + } + + public ErrorCode getErrorCode() { + return this.errorCode; + } + + private static String getMessage(Object obj) { + if (obj == null) { + return ""; + } + + if (obj instanceof Throwable) { + StringWriter str = new StringWriter(); + PrintWriter pw = new PrintWriter(str); + ((Throwable) obj).printStackTrace(pw); + return str.toString(); + // return ((Throwable) obj).getMessage(); + } else { + return obj.toString(); + } + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/DateFormatUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/DateFormatUtils.java new file mode 100644 index 0000000..c33c4d4 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/DateFormatUtils.java @@ -0,0 +1,22 @@ +package com.czsj.bigdata.util; + +import java.util.ArrayList; +import java.util.List; + +public class DateFormatUtils { + + public static final String DATE_FORMAT = "yyyy/MM/dd"; + public static final String DATETIME_FORMAT = "yyyy/MM/dd HH:mm:ss"; + public static final String TIME_FORMAT = "HH:mm:ss"; + public static final String TIMESTAMP = "Timestamp"; + + public static final List formatList() { + List formatList = new ArrayList<>(); + formatList.add(DATE_FORMAT); + formatList.add(TIME_FORMAT); + formatList.add(DATETIME_FORMAT); + formatList.add(TIMESTAMP); + return formatList; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/DruidDataSource.java b/czsj-system/src/main/java/com/czsj/bigdata/util/DruidDataSource.java new file mode 100644 index 0000000..ff66f07 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/DruidDataSource.java @@ -0,0 +1,89 @@ +package com.czsj.bigdata.util; + +import com.alibaba.druid.pool.DruidPooledConnection; +import com.alibaba.fastjson.JSONObject; +import com.czsj.bigdata.entity.JobDatasource; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.*; + +/** + * + * + * @Date: 2022/2/1 13:01 + * @Description: + **/ +public class DruidDataSource { + public static Object executeSql(JobDatasource datasource, String sql, Map map) { + DruidPooledConnection connection = null; + try { + connection = DruidDataSource.getPooledConnection(datasource); + PreparedStatement statement = connection.prepareStatement(sql); + //参数注入 + + Iterator iter = map.values().iterator(); + int j = 1; + while (iter.hasNext()) { + statement.setObject(j, iter.next()); + j = j + 1; + } + boolean hasResultSet = statement.execute(); + if (hasResultSet) { + ResultSet rs = statement.getResultSet(); + int columnCount = rs.getMetaData().getColumnCount(); + + List columns = new ArrayList<>(); + for (int i = 1; i <= columnCount; i++) { + String columnName = rs.getMetaData().getColumnLabel(i); + columns.add(columnName); + } + List list = new ArrayList<>(); + while (rs.next()) { + JSONObject jo = new JSONObject(); + columns.stream().forEach(t -> { + try { + Object value = rs.getObject(t); + jo.put(t, value); + } catch (SQLException throwables) { + throwables.printStackTrace(); + } + }); + list.add(jo); + } + return list; + } else { + int updateCount = statement.getUpdateCount(); + return updateCount + " rows affected"; + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + try { + if (connection != null) + connection.close(); + } catch (SQLException throwables) { + throwables.printStackTrace(); + } + } + } + + public static DruidPooledConnection getPooledConnection(JobDatasource jds) throws SQLException { + com.alibaba.druid.pool.DruidDataSource pool = getJdbcConnectionPool(jds); + DruidPooledConnection connection = pool.getConnection(); + return connection; + } + + public static com.alibaba.druid.pool.DruidDataSource getJdbcConnectionPool(JobDatasource ds) { + com.alibaba.druid.pool.DruidDataSource druidDataSource = new com.alibaba.druid.pool.DruidDataSource(); + druidDataSource.setName(ds.getDatasourceName()); + druidDataSource.setUrl(ds.getJdbcUrl()); + druidDataSource.setUsername(AESUtil.decrypt(ds.getJdbcUsername())); + druidDataSource.setPassword(AESUtil.decrypt(ds.getJdbcPassword())); + druidDataSource.setDriverClassName(ds.getJdbcDriverClass()); + druidDataSource.setConnectionErrorRetryAttempts(3); //失败后重连次数 + druidDataSource.setBreakAfterAcquireFailure(true); + return druidDataSource; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/ErrorCode.java b/czsj-system/src/main/java/com/czsj/bigdata/util/ErrorCode.java new file mode 100644 index 0000000..1f55244 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/ErrorCode.java @@ -0,0 +1,33 @@ +package com.czsj.bigdata.util; + +/** + * 尤其注意:最好提供toString()实现。例如: + * + *
+ * 
+ * @Override
+ * public String toString() {
+ * 	return String.format("Code:[%s], Description:[%s]. ", this.code, this.describe);
+ * }
+ * 
+ * + */ +public interface ErrorCode { + // 错误码编号 + String getCode(); + + // 错误码描述 + String getDescription(); + + /** 必须提供toString的实现 + * + *
+	 * @Override
+	 * public String toString() {
+	 * 	return String.format("Code:[%s], Description:[%s]. ", this.code, this.describe);
+	 * }
+	 * 
+ * + */ + String toString(); +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/FlinkXException.java b/czsj-system/src/main/java/com/czsj/bigdata/util/FlinkXException.java new file mode 100644 index 0000000..45c5c93 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/FlinkXException.java @@ -0,0 +1,61 @@ +package com.czsj.bigdata.util; + + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class FlinkXException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + private ErrorCode errorCode; + + public FlinkXException(ErrorCode errorCode, String errorMessage) { + super(errorCode.toString() + " - " + errorMessage); + this.errorCode = errorCode; + } + + private FlinkXException(ErrorCode errorCode, String errorMessage, Throwable cause) { + super(errorCode.toString() + " - " + getMessage(errorMessage) + " - " + getMessage(cause), cause); + + this.errorCode = errorCode; + } + + public static FlinkXException asFlinkXException(ErrorCode errorCode, String message) { + return new FlinkXException(errorCode, message); + } + + public static FlinkXException asFlinkXException(ErrorCode errorCode, String message, Throwable cause) { + if (cause instanceof FlinkXException) { + return (FlinkXException) cause; + } + return new FlinkXException(errorCode, message, cause); + } + + public static FlinkXException asFlinkXException(ErrorCode errorCode, Throwable cause) { + if (cause instanceof FlinkXException) { + return (FlinkXException) cause; + } + return new FlinkXException(errorCode, getMessage(cause), cause); + } + + public ErrorCode getErrorCode() { + return this.errorCode; + } + + private static String getMessage(Object obj) { + if (obj == null) { + return ""; + } + + if (obj instanceof Throwable) { + StringWriter str = new StringWriter(); + PrintWriter pw = new PrintWriter(str); + ((Throwable) obj).printStackTrace(pw); + return str.toString(); + // return ((Throwable) obj).getMessage(); + } else { + return obj.toString(); + } + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/IPUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/IPUtils.java new file mode 100644 index 0000000..9b80494 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/IPUtils.java @@ -0,0 +1,51 @@ +package com.czsj.bigdata.util; + +import javax.servlet.http.HttpServletRequest; +import java.net.InetAddress; +import java.net.UnknownHostException; + +/** + * + * + * @Date: 2022/4/2 22:08 + * @Description: + **/ +public class IPUtils { + public static String getIpAddr(HttpServletRequest request) { + String ipAddress = null; + try { + ipAddress = request.getHeader("x-forwarded-for"); + if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) { + ipAddress = request.getHeader("Proxy-Client-IP"); + } + if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) { + ipAddress = request.getHeader("WL-Proxy-Client-IP"); + } + if (ipAddress == null || ipAddress.length() == 0 || "unknown".equalsIgnoreCase(ipAddress)) { + ipAddress = request.getRemoteAddr(); + if (ipAddress.equals("127.0.0.1")) { + // 根据网卡取本机配置的IP + InetAddress inet = null; + try { + inet = InetAddress.getLocalHost(); + } catch (UnknownHostException e) { + e.printStackTrace(); + } + ipAddress = inet.getHostAddress(); + } + } + // 对于通过多个代理的情况,第一个IP为客户端真实IP,多个IP按照','分割 + if (ipAddress != null && ipAddress.length() > 15) { // "***.***.***.***".length() + // = 15 + if (ipAddress.indexOf(",") > 0) { + ipAddress = ipAddress.substring(0, ipAddress.indexOf(",")); + } + } + } catch (Exception e) { + ipAddress=""; + } + // ipAddress = this.getRequest().getRemoteAddr(); + + return ipAddress; + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/JSONUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/JSONUtils.java new file mode 100644 index 0000000..e188921 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/JSONUtils.java @@ -0,0 +1,76 @@ +package com.czsj.bigdata.util; + +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; + +/** + * DataX JSON 用户名密码解密 + * + * @author zhouhongfa@gz-yibo.com + * @ClassName JSONUtils + * @Version 1.0 + * @since 2019/7/31 14:54 + */ +public class JSONUtils { + + /** + * decrypt 解密 + */ + public static Integer decrypt = 0; + /** + * decrypt 加密 + */ + public static Integer encrypt = 1; + + /** + * @param content + * @param key + * @param changeType 0加密 or 1解密 + * @return + */ + public static JSONObject change(String content, String key, Integer changeType) { + JSONObject keyObj = JSONObject.parseObject(JSONObject.parseObject(content).getString(key)); + JSONObject params = JSONObject.parseObject(keyObj.getString("parameter")); + String dUsername = null, dPassword = null; + if (decrypt.equals(changeType)) { //解密 + dUsername = AESUtil.decrypt(params.getString("username")); + dPassword = AESUtil.decrypt(params.getString("password")); + + } else if (encrypt.equals(changeType)) {//加密 + + dUsername = AESUtil.encrypt(params.getString("username")); + dPassword = AESUtil.encrypt(params.getString("password")); + } + String username = dUsername == null ? params.getString("username") : dUsername; + String password = dPassword == null ? params.getString("password") : dPassword; + params.put("username", username); + params.put("password", password); + keyObj.put("parameter", params); + return keyObj; + } + + /** + * @param jsonStr + * @param changeType 0加密 or 1解密 + * @return jsonStr + */ + public static String changeJson(String jsonStr, Integer changeType) { + JSONObject json = JSONObject.parseObject(jsonStr); + JSONObject job = json.getJSONObject("job"); + JSONArray contents = job.getJSONArray("content"); + for (int i = 0; i < contents.size(); i++) { + String contentStr = contents.getString(i); + Object obj = contents.get(i); + if (decrypt.equals(changeType)) { //解密 + ((JSONObject) obj).put("reader", change(contentStr, "reader", decrypt)); + ((JSONObject) obj).put("writer", change(contentStr, "writer", decrypt)); + } else if (encrypt.equals(changeType)) {//加密 + ((JSONObject) obj).put("reader", change(contentStr, "reader", encrypt)); + ((JSONObject) obj).put("writer", change(contentStr, "writer", encrypt)); + } + } + job.put("content", contents); + json.put("job", job); + return json.toJSONString(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/JdbcConstants.java b/czsj-system/src/main/java/com/czsj/bigdata/util/JdbcConstants.java new file mode 100644 index 0000000..a82423a --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/JdbcConstants.java @@ -0,0 +1,127 @@ +package com.czsj.bigdata.util; + + +/** + * JdbcConstants + * + * @author jingwk + * @ClassName JdbcConstants + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +public interface JdbcConstants { + + + String HBASE_ZK_QUORUM = "hbase.zookeeper.quorum"; + + String MONGODB ="mongodb"; + + String JTDS = "jtds"; + + String MOCK = "mock"; + + String HSQL = "hsql"; + + String DB2 = "db2"; + + String DB2_DRIVER = "com.ibm.db2.jcc.DB2Driver"; + + String POSTGRESQL = "postgresql"; + String POSTGRESQL_DRIVER = "org.postgresql.Driver"; + + String SYBASE = "sybase"; + + String SQL_SERVER = "sqlserver"; + String SQL_SERVER_DRIVER = "com.microsoft.jdbc.sqlserver.SQLServerDriver"; + String SQL_SERVER_DRIVER_SQLJDBC4 = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + String SQL_SERVER_DRIVER_JTDS = "net.sourceforge.jtds.jdbc.Driver"; + + String ORACLE = "oracle"; + String ORACLE_DRIVER = "oracle.jdbc.OracleDriver"; + String ORACLE_DRIVER2 = "oracle.jdbc.driver.OracleDriver"; + + String ALI_ORACLE = "AliOracle"; + String ALI_ORACLE_DRIVER = "com.alibaba.jdbc.AlibabaDriver"; + + String MYSQL = "mysql"; + String MYSQL_DRIVER = "com.mysql.jdbc.Driver"; + String MYSQL_DRIVER_6 = "com.mysql.cj.jdbc.Driver"; + String MYSQL_DRIVER_REPLICATE = "com.mysql.jdbc."; + + String MARIADB = "mariadb"; + String MARIADB_DRIVER = "org.mariadb.jdbc.Driver"; + + String HANA = "hana"; + String HANA_DRIVER = "com.sap.db.jdbc.Driver"; + + + String DERBY = "derby"; + + String HBASE = "hbase"; + + String HIVE = "hive"; + String HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; + + String H2 = "h2"; + String H2_DRIVER = "org.h2.Driver"; + + String DM = "dm"; + String DM_DRIVER = "dm.jdbc.driver.DmDriver"; + + String KINGBASE = "kingbase"; + String KINGBASE_DRIVER = "com.kingbase.Driver"; + + String GBASE = "gbase"; + String GBASE_DRIVER = "com.gbase.jdbc.Driver"; + + String XUGU = "xugu"; + String XUGU_DRIVER = "com.xugu.cloudjdbc.Driver"; + + String OCEANBASE = "oceanbase"; + String OCEANBASE_DRIVER = "com.mysql.jdbc.Driver"; + String INFORMIX = "informix"; + + /** + * 阿里云odps + */ + String ODPS = "odps"; + String ODPS_DRIVER = "com.aliyun.odps.jdbc.OdpsDriver"; + + String TERADATA = "teradata"; + String TERADATA_DRIVER = "com.teradata.jdbc.TeraDriver"; + + /** + * Log4JDBC + */ + String LOG4JDBC = "log4jdbc"; + String LOG4JDBC_DRIVER = "net.sf.log4jdbc.DriverSpy"; + + String PHOENIX = "phoenix"; + String PHOENIX_DRIVER = "org.apache.phoenix.jdbc.PhoenixDriver"; + String ENTERPRISEDB = "edb"; + String ENTERPRISEDB_DRIVER = "com.edb.Driver"; + + String KYLIN = "kylin"; + String KYLIN_DRIVER = "org.apache.kylin.jdbc.Driver"; + + + String SQLITE = "sqlite"; + String SQLITE_DRIVER = "org.sqlite.JDBC"; + + String ALIYUN_ADS = "aliyun_ads"; + String ALIYUN_DRDS = "aliyun_drds"; + + String PRESTO = "presto"; + String PRESTO_DRIVER = "com.facebook.presto.jdbc.PrestoDriver"; + + String ELASTIC_SEARCH = "elastic_search"; + + String ELASTIC_SEARCH_DRIVER = "com.alibaba.xdriver.elastic.jdbc.ElasticDriver"; + + String CLICKHOUSE = "clickhouse"; + String CLICKHOUSE_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver"; + + // for HBase2.X and Phoenix5.X + String HBASE20XSQL = "hbase20xsql"; + String HBASE20XSQL_DRIVER = "org.apache.phoenix.jdbc.PhoenixDriver"; +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/JdbcUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/JdbcUtils.java new file mode 100644 index 0000000..b860bf0 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/JdbcUtils.java @@ -0,0 +1,666 @@ +package com.czsj.bigdata.util; + + +import org.apache.xerces.impl.dv.util.HexBin; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.sql.DataSource; +import java.io.Closeable; +import java.io.InputStream; +import java.io.PrintStream; +import java.net.URL; +import java.sql.Date; +import java.sql.*; +import java.util.*; + + +/** + * jdbc utils + * + * @author jingwk + * @ClassName JdbcUtils + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +public final class JdbcUtils implements JdbcConstants { + + private static Logger LOG = LoggerFactory.getLogger(JdbcUtils.class); + + + private static final Properties DRIVER_URL_MAPPING = new Properties(); + + private static Boolean mysql_driver_version_6 = null; + + static { + try { + ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader(); + if (ctxClassLoader != null) { + for (Enumeration e = ctxClassLoader.getResources("META-INF/druid-driver.properties"); e.hasMoreElements();) { + URL url = e.nextElement(); + + Properties property = new Properties(); + + InputStream is = null; + try { + is = url.openStream(); + property.load(is); + } finally { + JdbcUtils.close(is); + } + + DRIVER_URL_MAPPING.putAll(property); + } + } + } catch (Exception e) { + LOG.error("load druid-driver.properties error", e); + } + } + + public static void close(Connection x) { + if (x == null) { + return; + } + try { + x.close(); + } catch (Exception e) { + LOG.debug("close connection error", e); + } + } + + public static void close(Statement x) { + if (x == null) { + return; + } + try { + x.close(); + } catch (Exception e) { + LOG.debug("close statement error", e); + } + } + + public static void close(ResultSet x) { + if (x == null) { + return; + } + try { + x.close(); + } catch (Exception e) { + LOG.debug("close result set error", e); + } + } + + public static void close(Closeable x) { + if (x == null) { + return; + } + + try { + x.close(); + } catch (Exception e) { + LOG.debug("close error", e); + } + } + + public static void close(Blob x) { + if (x == null) { + return; + } + + try { + x.free(); + } catch (Exception e) { + LOG.debug("close error", e); + } + } + + public static void close(Clob x) { + if (x == null) { + return; + } + + try { + x.free(); + } catch (Exception e) { + LOG.debug("close error", e); + } + } + + public static void printResultSet(ResultSet rs) throws SQLException { + printResultSet(rs, System.out); + } + + public static void printResultSet(ResultSet rs, PrintStream out) throws SQLException { + printResultSet(rs, out, true, "\t"); + } + + public static void printResultSet(ResultSet rs, PrintStream out, boolean printHeader, String seperator) throws SQLException { + ResultSetMetaData metadata = rs.getMetaData(); + int columnCount = metadata.getColumnCount(); + if (printHeader) { + for (int columnIndex = 1; columnIndex <= columnCount; ++columnIndex) { + if (columnIndex != 1) { + out.print(seperator); + } + out.print(metadata.getColumnName(columnIndex)); + } + } + + out.println(); + + while (rs.next()) { + + for (int columnIndex = 1; columnIndex <= columnCount; ++columnIndex) { + if (columnIndex != 1) { + out.print(seperator); + } + + int type = metadata.getColumnType(columnIndex); + + if (type == Types.VARCHAR || type == Types.CHAR || type == Types.NVARCHAR || type == Types.NCHAR) { + out.print(rs.getString(columnIndex)); + } else if (type == Types.DATE) { + Date date = rs.getDate(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(date.toString()); + } + } else if (type == Types.BIT) { + boolean value = rs.getBoolean(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(Boolean.toString(value)); + } + } else if (type == Types.BOOLEAN) { + boolean value = rs.getBoolean(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(Boolean.toString(value)); + } + } else if (type == Types.TINYINT) { + byte value = rs.getByte(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(Byte.toString(value)); + } + } else if (type == Types.SMALLINT) { + short value = rs.getShort(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(Short.toString(value)); + } + } else if (type == Types.INTEGER) { + int value = rs.getInt(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(Integer.toString(value)); + } + } else if (type == Types.BIGINT) { + long value = rs.getLong(columnIndex); + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(Long.toString(value)); + } + } else if (type == Types.TIMESTAMP) { + out.print(String.valueOf(rs.getTimestamp(columnIndex))); + } else if (type == Types.DECIMAL) { + out.print(String.valueOf(rs.getBigDecimal(columnIndex))); + } else if (type == Types.CLOB) { + out.print(String.valueOf(rs.getString(columnIndex))); + } else if (type == Types.JAVA_OBJECT) { + Object object = rs.getObject(columnIndex); + + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(String.valueOf(object)); + } + } else if (type == Types.LONGVARCHAR) { + Object object = rs.getString(columnIndex); + + if (rs.wasNull()) { + out.print("null"); + } else { + out.print(String.valueOf(object)); + } + } else if (type == Types.NULL) { + out.print("null"); + } else { + Object object = rs.getObject(columnIndex); + + if (rs.wasNull()) { + out.print("null"); + } else { + if (object instanceof byte[]) { + byte[] bytes = (byte[]) object; + String text = HexBin.encode(bytes); + out.print(text); + } else { + out.print(String.valueOf(object)); + } + } + } + } + out.println(); + } + } + + public static String getTypeName(int sqlType) { + switch (sqlType) { + case Types.ARRAY: + return "ARRAY"; + + case Types.BIGINT: + return "BIGINT"; + + case Types.BINARY: + return "BINARY"; + + case Types.BIT: + return "BIT"; + + case Types.BLOB: + return "BLOB"; + + case Types.BOOLEAN: + return "BOOLEAN"; + + case Types.CHAR: + return "CHAR"; + + case Types.CLOB: + return "CLOB"; + + case Types.DATALINK: + return "DATALINK"; + + case Types.DATE: + return "DATE"; + + case Types.DECIMAL: + return "DECIMAL"; + + case Types.DISTINCT: + return "DISTINCT"; + + case Types.DOUBLE: + return "DOUBLE"; + + case Types.FLOAT: + return "FLOAT"; + + case Types.INTEGER: + return "INTEGER"; + + case Types.JAVA_OBJECT: + return "JAVA_OBJECT"; + + case Types.LONGNVARCHAR: + return "LONGNVARCHAR"; + + case Types.LONGVARBINARY: + return "LONGVARBINARY"; + + case Types.NCHAR: + return "NCHAR"; + + case Types.NCLOB: + return "NCLOB"; + + case Types.NULL: + return "NULL"; + + case Types.NUMERIC: + return "NUMERIC"; + + case Types.NVARCHAR: + return "NVARCHAR"; + + case Types.REAL: + return "REAL"; + + case Types.REF: + return "REF"; + + case Types.ROWID: + return "ROWID"; + + case Types.SMALLINT: + return "SMALLINT"; + + case Types.SQLXML: + return "SQLXML"; + + case Types.STRUCT: + return "STRUCT"; + + case Types.TIME: + return "TIME"; + + case Types.TIMESTAMP: + return "TIMESTAMP"; + + case Types.TINYINT: + return "TINYINT"; + + case Types.VARBINARY: + return "VARBINARY"; + + case Types.VARCHAR: + return "VARCHAR"; + + default: + return "OTHER"; + + } + } + + public static String getDbType(String rawUrl, String driverClassName) { + if (rawUrl == null) { + return null; + } + + if (rawUrl.startsWith("jdbc:derby:") || rawUrl.startsWith("jdbc:log4jdbc:derby:")) { + return DERBY; + } else if (rawUrl.startsWith("jdbc:mysql:") || rawUrl.startsWith("jdbc:cobar:") + || rawUrl.startsWith("jdbc:log4jdbc:mysql:")) { + return MYSQL; + } else if (rawUrl.startsWith("jdbc:mariadb:")) { + return MARIADB; + } else if (rawUrl.startsWith("jdbc:oracle:") || rawUrl.startsWith("jdbc:log4jdbc:oracle:")) { + return ORACLE; + } else if (rawUrl.startsWith("jdbc:alibaba:oracle:")) { + return ALI_ORACLE; + } else if (rawUrl.startsWith("jdbc:microsoft:") || rawUrl.startsWith("jdbc:log4jdbc:microsoft:")) { + return SQL_SERVER; + } else if (rawUrl.startsWith("jdbc:sqlserver:") || rawUrl.startsWith("jdbc:log4jdbc:sqlserver:")) { + return SQL_SERVER; + } else if (rawUrl.startsWith("jdbc:sybase:Tds:") || rawUrl.startsWith("jdbc:log4jdbc:sybase:")) { + return SYBASE; + } else if (rawUrl.startsWith("jdbc:jtds:") || rawUrl.startsWith("jdbc:log4jdbc:jtds:")) { + return JTDS; + } else if (rawUrl.startsWith("jdbc:fake:") || rawUrl.startsWith("jdbc:mock:")) { + return MOCK; + } else if (rawUrl.startsWith("jdbc:postgresql:") || rawUrl.startsWith("jdbc:log4jdbc:postgresql:")) { + return POSTGRESQL; + } else if (rawUrl.startsWith("jdbc:edb:")) { + return ENTERPRISEDB; + } else if (rawUrl.startsWith("jdbc:hsqldb:") || rawUrl.startsWith("jdbc:log4jdbc:hsqldb:")) { + return HSQL; + } else if (rawUrl.startsWith("jdbc:odps:")) { + return ODPS; + } else if (rawUrl.startsWith("jdbc:db2:")) { + return DB2; + } else if (rawUrl.startsWith("jdbc:sqlite:")) { + return SQLITE; + } else if (rawUrl.startsWith("jdbc:ingres:")) { + return "ingres"; + } else if (rawUrl.startsWith("jdbc:h2:") || rawUrl.startsWith("jdbc:log4jdbc:h2:")) { + return H2; + } else if (rawUrl.startsWith("jdbc:mckoi:")) { + return "mckoi"; + } else if (rawUrl.startsWith("jdbc:cloudscape:")) { + return "cloudscape"; + } else if (rawUrl.startsWith("jdbc:informix-sqli:") || rawUrl.startsWith("jdbc:log4jdbc:informix-sqli:")) { + return "informix"; + } else if (rawUrl.startsWith("jdbc:timesten:")) { + return "timesten"; + } else if (rawUrl.startsWith("jdbc:as400:")) { + return "as400"; + } else if (rawUrl.startsWith("jdbc:sapdb:")) { + return "sapdb"; + } else if (rawUrl.startsWith("jdbc:JSQLConnect:")) { + return "JSQLConnect"; + } else if (rawUrl.startsWith("jdbc:JTurbo:")) { + return "JTurbo"; + } else if (rawUrl.startsWith("jdbc:firebirdsql:")) { + return "firebirdsql"; + } else if (rawUrl.startsWith("jdbc:interbase:")) { + return "interbase"; + } else if (rawUrl.startsWith("jdbc:pointbase:")) { + return "pointbase"; + } else if (rawUrl.startsWith("jdbc:edbc:")) { + return "edbc"; + } else if (rawUrl.startsWith("jdbc:mimer:multi1:")) { + return "mimer"; + } else if (rawUrl.startsWith("jdbc:dm:")) { + return JdbcConstants.DM; + } else if (rawUrl.startsWith("jdbc:kingbase:")) { + return JdbcConstants.KINGBASE; + } else if (rawUrl.startsWith("jdbc:gbase:")) { + return JdbcConstants.GBASE; + } else if (rawUrl.startsWith("jdbc:xugu:")) { + return JdbcConstants.XUGU; + } else if (rawUrl.startsWith("jdbc:log4jdbc:")) { + return LOG4JDBC; + } else if (rawUrl.startsWith("jdbc:hive:")) { + return HIVE; + } else if (rawUrl.startsWith("jdbc:hive2:")) { + return HIVE; + } else if (rawUrl.startsWith("jdbc:phoenix:")) { + return PHOENIX; + } else if (rawUrl.startsWith("jdbc:elastic:")) { + return ELASTIC_SEARCH; + } else if (rawUrl.startsWith("jdbc:clickhouse:")) { + return CLICKHOUSE; + }else if (rawUrl.startsWith("jdbc:presto:")) { + return PRESTO; + } else { + return null; + } + } + + public static Driver createDriver(String driverClassName) throws SQLException { + return createDriver(null, driverClassName); + } + + public static Driver createDriver(ClassLoader classLoader, String driverClassName) throws SQLException { + Class clazz = null; + if (classLoader != null) { + try { + clazz = classLoader.loadClass(driverClassName); + } catch (ClassNotFoundException e) { + // skip + } + } + + if (clazz == null) { + try { + ClassLoader contextLoader = Thread.currentThread().getContextClassLoader(); + if (contextLoader != null) { + clazz = contextLoader.loadClass(driverClassName); + } + } catch (ClassNotFoundException e) { + // skip + } + } + + if (clazz == null) { + try { + clazz = Class.forName(driverClassName); + } catch (ClassNotFoundException e) { + throw new SQLException(e.getMessage(), e); + } + } + + try { + return (Driver) clazz.newInstance(); + } catch (IllegalAccessException e) { + throw new SQLException(e.getMessage(), e); + } catch (InstantiationException e) { + throw new SQLException(e.getMessage(), e); + } + } + + public static int executeUpdate(DataSource dataSource, String sql, Object... parameters) throws SQLException { + return executeUpdate(dataSource, sql, Arrays.asList(parameters)); + } + + public static int executeUpdate(DataSource dataSource, String sql, List parameters) throws SQLException { + Connection conn = null; + try { + conn = dataSource.getConnection(); + return executeUpdate(conn, sql, parameters); + } finally { + close(conn); + } + } + + public static int executeUpdate(Connection conn, String sql, List parameters) throws SQLException { + PreparedStatement stmt = null; + + int updateCount; + try { + stmt = conn.prepareStatement(sql); + + setParameters(stmt, parameters); + + updateCount = stmt.executeUpdate(); + } finally { + JdbcUtils.close(stmt); + } + + return updateCount; + } + + public static void execute(DataSource dataSource, String sql, Object... parameters) throws SQLException { + execute(dataSource, sql, Arrays.asList(parameters)); + } + + public static void execute(DataSource dataSource, String sql, List parameters) throws SQLException { + Connection conn = null; + try { + conn = dataSource.getConnection(); + execute(conn, sql, parameters); + } finally { + close(conn); + } + } + + public static void execute(Connection conn, String sql) throws SQLException { + execute(conn, sql, Collections.emptyList()); + } + + public static void execute(Connection conn, String sql, List parameters) throws SQLException { + PreparedStatement stmt = null; + + try { + stmt = conn.prepareStatement(sql); + + setParameters(stmt, parameters); + + stmt.executeUpdate(); + } finally { + JdbcUtils.close(stmt); + } + } + + public static List> executeQuery(DataSource dataSource, String sql, Object... parameters) + throws SQLException { + return executeQuery(dataSource, sql, Arrays.asList(parameters)); + } + + public static List> executeQuery(DataSource dataSource, String sql, List parameters) + throws SQLException { + Connection conn = null; + try { + conn = dataSource.getConnection(); + return executeQuery(conn, sql, parameters); + } finally { + close(conn); + } + } + + public static List> executeQuery(Connection conn, String sql, List parameters) + throws SQLException { + List> rows = new ArrayList>(); + + PreparedStatement stmt = null; + ResultSet rs = null; + try { + stmt = conn.prepareStatement(sql); + + setParameters(stmt, parameters); + + rs = stmt.executeQuery(); + + ResultSetMetaData rsMeta = rs.getMetaData(); + + while (rs.next()) { + Map row = new LinkedHashMap(); + + for (int i = 0, size = rsMeta.getColumnCount(); i < size; ++i) { + String columName = rsMeta.getColumnLabel(i + 1); + Object value = rs.getObject(i + 1); + row.put(columName, value); + } + + rows.add(row); + } + } finally { + JdbcUtils.close(rs); + JdbcUtils.close(stmt); + } + + return rows; + } + + private static void setParameters(PreparedStatement stmt, List parameters) throws SQLException { + for (int i = 0, size = parameters.size(); i < size; ++i) { + Object param = parameters.get(i); + stmt.setObject(i + 1, param); + } + } + + public static void insertToTable(DataSource dataSource, String tableName, Map data) + throws SQLException { + Connection conn = null; + try { + conn = dataSource.getConnection(); + insertToTable(conn, tableName, data); + } finally { + close(conn); + } + } + + public static void insertToTable(Connection conn, String tableName, Map data) throws SQLException { + String sql = makeInsertToTableSql(tableName, data.keySet()); + List parameters = new ArrayList(data.values()); + execute(conn, sql, parameters); + } + + public static String makeInsertToTableSql(String tableName, Collection names) { + StringBuilder sql = new StringBuilder() // + .append("insert into ") // + .append(tableName) // + .append("("); // + + int nameCount = 0; + for (String name : names) { + if (nameCount > 0) { + sql.append(","); + } + sql.append(name); + nameCount++; + } + sql.append(") values ("); + for (int i = 0; i < nameCount; ++i) { + if (i != 0) { + sql.append(","); + } + sql.append("?"); + } + sql.append(")"); + + return sql.toString(); + } + + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/JwtTokenUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/JwtTokenUtils.java new file mode 100644 index 0000000..3525a38 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/JwtTokenUtils.java @@ -0,0 +1,84 @@ +package com.czsj.bigdata.util; + +import com.alibaba.fastjson.JSON; +import io.jsonwebtoken.Claims; +import io.jsonwebtoken.ExpiredJwtException; +import io.jsonwebtoken.Jwts; +import io.jsonwebtoken.SignatureAlgorithm; + +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.List; + +import static com.czsj.core.util.Constants.SPLIT_COMMA; + +/** + * Created by jingwk on 2019/12/01 + */ +public class JwtTokenUtils { + + public static final String TOKEN_HEADER = "Authorization"; + public static final String TOKEN_PREFIX = "Bearer "; + + private static final String SECRET = "datax_admin"; + private static final String ISS = "admin"; + + // 角色的key + private static final String ROLE_CLAIMS = "rol"; + + // 过期时间是3600秒,既是24个小时 + private static final long EXPIRATION = 86400L; + + // 选择了记住我之后的过期时间为7天 + private static final long EXPIRATION_REMEMBER = 7 * EXPIRATION; + + // 创建token + public static String createToken(Integer id, String username, String role, boolean isRememberMe) { + long expiration = isRememberMe ? EXPIRATION_REMEMBER : EXPIRATION; + HashMap map = new HashMap<>(); + map.put(ROLE_CLAIMS, role); + return Jwts.builder() + .signWith(SignatureAlgorithm.HS512, SECRET) + .setClaims(map) + .setIssuer(ISS) + .setSubject(id + SPLIT_COMMA + username) + .setIssuedAt(new Date()) + .setExpiration(new Date(System.currentTimeMillis() + expiration * 1000)) + .compact(); + } + + // 从token中获取用户名 + public static String getUsername(String token) { + List userInfo = Arrays.asList(getTokenBody(token).getSubject().split(SPLIT_COMMA)); + return userInfo.get(1); + } + + // 从token中获取用户名 + public static Integer getUserId(String token) { + String s= JSON.toJSONString(getTokenBody(token).getSubject()); + List userInfo = Arrays.asList(getTokenBody(token).getSubject().split(SPLIT_COMMA)); + return Integer.parseInt(userInfo.get(0)); + } + + // 获取用户角色 + public static String getUserRole(String token) { + return (String) getTokenBody(token).get(ROLE_CLAIMS); + } + + // 是否已过期 + public static boolean isExpiration(String token) { + try { + return getTokenBody(token).getExpiration().before(new Date()); + } catch (ExpiredJwtException e) { + return true; + } + } + + private static Claims getTokenBody(String token) { + return Jwts.parser() + .setSigningKey(SECRET) + .parseClaimsJws(token) + .getBody(); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/PageUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/PageUtils.java new file mode 100644 index 0000000..12898eb --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/PageUtils.java @@ -0,0 +1,59 @@ +package com.czsj.bigdata.util; + +import cn.hutool.core.lang.Filter; +import cn.hutool.core.map.MapUtil; +import cn.hutool.core.util.StrUtil; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Map; + +/** + * 分页工具类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/6/15 + */ +public class PageUtils { + /** + * 用于保存分页查询用到的关键字 + */ + public static final List PAGE_QUERY_KEY_LIST = ImmutableList.of("current", "size", "sortBy", "orderby", "order", "sort", "ifCount", "ascs", "descs"); + + + /** + * 过滤pageHelper的参数、空值等 + * 返回查询条件 + * + * @return + */ + public static Map filterColumnQueryParams(Map map) { + return MapUtil.filter(map, (Filter>) e -> { + if (StrUtil.isBlank(StrUtil.toString(e.getValue()))) { + return false; + } + if (PAGE_QUERY_KEY_LIST.contains(e.getKey())) { + return false; + } + return true; + }); + } + + /** + * 返回pageHelper用到的参数 + * + * @return + */ + public static Map filterPageParams(Map map) { + return MapUtil.filter(map, (Filter>) e -> { + if (StrUtil.isBlank(StrUtil.toString(e.getValue()))) { + return false; + } + if (PAGE_QUERY_KEY_LIST.contains(e.getKey())) { + return true; + } + return false; + }); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/RdbmsException.java b/czsj-system/src/main/java/com/czsj/bigdata/util/RdbmsException.java new file mode 100644 index 0000000..8212af1 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/RdbmsException.java @@ -0,0 +1,197 @@ +package com.czsj.bigdata.util; + + +import com.czsj.core.util.Constants; + +/** + * RdbmsException + * + * @author jingwk + * @ClassName RdbmsException + * @Version 2.1.1 + * @since 2022/03/14 07:15 + */ +public class RdbmsException extends DataXException{ + + + public RdbmsException(ErrorCode errorCode, String errorMessage) { + super(errorCode, errorMessage); + } + + public static DataXException asConnException(String dataBaseType, Exception e, String userName, String dbName){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + DBUtilErrorCode dbUtilErrorCode = mySqlConnectionErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_CONN_DB_ERROR && dbName !=null ){ + return DataXException.asDataXException(dbUtilErrorCode,"该数据库名称为:"+dbName+" 具体错误信息为:"+e); + } + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_CONN_USERPWD_ERROR ){ + return DataXException.asDataXException(dbUtilErrorCode,"该数据库用户名为:"+userName+" 具体错误信息为:"+e); + } + return DataXException.asDataXException(dbUtilErrorCode," 具体错误信息为:"+e); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + DBUtilErrorCode dbUtilErrorCode = oracleConnectionErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_CONN_DB_ERROR && dbName != null){ + return DataXException.asDataXException(dbUtilErrorCode,"该数据库名称为:"+dbName+" 具体错误信息为:"+e); + } + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_CONN_USERPWD_ERROR ){ + return DataXException.asDataXException(dbUtilErrorCode,"该数据库用户名为:"+userName+" 具体错误信息为:"+e); + } + return DataXException.asDataXException(dbUtilErrorCode," 具体错误信息为:"+e); + } + return DataXException.asDataXException(DBUtilErrorCode.CONN_DB_ERROR," 具体错误信息为:"+e); + } + + public static DBUtilErrorCode mySqlConnectionErrorAna(String e){ + if (e.contains(Constants.MYSQL_DATABASE)){ + return DBUtilErrorCode.MYSQL_CONN_DB_ERROR; + } + + if (e.contains(Constants.MYSQL_CONNEXP)){ + return DBUtilErrorCode.MYSQL_CONN_IPPORT_ERROR; + } + + if (e.contains(Constants.MYSQL_ACCDENIED)){ + return DBUtilErrorCode.MYSQL_CONN_USERPWD_ERROR; + } + + return DBUtilErrorCode.CONN_DB_ERROR; + } + + public static DBUtilErrorCode oracleConnectionErrorAna(String e){ + if (e.contains(Constants.ORACLE_DATABASE)){ + return DBUtilErrorCode.ORACLE_CONN_DB_ERROR; + } + + if (e.contains(Constants.ORACLE_CONNEXP)){ + return DBUtilErrorCode.ORACLE_CONN_IPPORT_ERROR; + } + + if (e.contains(Constants.ORACLE_ACCDENIED)){ + return DBUtilErrorCode.ORACLE_CONN_USERPWD_ERROR; + } + + return DBUtilErrorCode.CONN_DB_ERROR; + } + + public static DataXException asQueryException(String dataBaseType, Exception e, String querySql, String table, String userName){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + DBUtilErrorCode dbUtilErrorCode = mySqlQueryErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_QUERY_TABLE_NAME_ERROR && table != null){ + return DataXException.asDataXException(dbUtilErrorCode,"表名为:"+table+" 执行的SQL为:"+querySql+" 具体错误信息为:"+e); + } + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_QUERY_SELECT_PRI_ERROR && userName != null){ + return DataXException.asDataXException(dbUtilErrorCode,"用户名为:"+userName+" 具体错误信息为:"+e); + } + + return DataXException.asDataXException(dbUtilErrorCode,"执行的SQL为: "+querySql+" 具体错误信息为:"+e); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + DBUtilErrorCode dbUtilErrorCode = oracleQueryErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_QUERY_TABLE_NAME_ERROR && table != null){ + return DataXException.asDataXException(dbUtilErrorCode,"表名为:"+table+" 执行的SQL为:"+querySql+" 具体错误信息为:"+e); + } + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_QUERY_SELECT_PRI_ERROR){ + return DataXException.asDataXException(dbUtilErrorCode,"用户名为:"+userName+" 具体错误信息为:"+e); + } + + return DataXException.asDataXException(dbUtilErrorCode,"执行的SQL为: "+querySql+" 具体错误信息为:"+e); + + } + + return DataXException.asDataXException(DBUtilErrorCode.SQL_EXECUTE_FAIL, "执行的SQL为: "+querySql+" 具体错误信息为:"+e); + } + + public static DBUtilErrorCode mySqlQueryErrorAna(String e){ + if (e.contains(Constants.MYSQL_TABLE_NAME_ERR1) && e.contains(Constants.MYSQL_TABLE_NAME_ERR2)){ + return DBUtilErrorCode.MYSQL_QUERY_TABLE_NAME_ERROR; + }else if (e.contains(Constants.MYSQL_SELECT_PRI)){ + return DBUtilErrorCode.MYSQL_QUERY_SELECT_PRI_ERROR; + }else if (e.contains(Constants.MYSQL_COLUMN1) && e.contains(Constants.MYSQL_COLUMN2)){ + return DBUtilErrorCode.MYSQL_QUERY_COLUMN_ERROR; + }else if (e.contains(Constants.MYSQL_WHERE)){ + return DBUtilErrorCode.MYSQL_QUERY_SQL_ERROR; + } + return DBUtilErrorCode.READ_RECORD_FAIL; + } + + public static DBUtilErrorCode oracleQueryErrorAna(String e){ + if (e.contains(Constants.ORACLE_TABLE_NAME)){ + return DBUtilErrorCode.ORACLE_QUERY_TABLE_NAME_ERROR; + }else if (e.contains(Constants.ORACLE_SQL)){ + return DBUtilErrorCode.ORACLE_QUERY_SQL_ERROR; + }else if (e.contains(Constants.ORACLE_SELECT_PRI)){ + return DBUtilErrorCode.ORACLE_QUERY_SELECT_PRI_ERROR; + } + return DBUtilErrorCode.READ_RECORD_FAIL; + } + + public static DataXException asSqlParserException(String dataBaseType, Exception e, String querySql){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + throw DataXException.asDataXException(DBUtilErrorCode.MYSQL_QUERY_SQL_PARSER_ERROR, "执行的SQL为:"+querySql+" 具体错误信息为:" + e); + } + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + throw DataXException.asDataXException(DBUtilErrorCode.ORACLE_QUERY_SQL_PARSER_ERROR,"执行的SQL为:"+querySql+" 具体错误信息为:" +e); + } + throw DataXException.asDataXException(DBUtilErrorCode.READ_RECORD_FAIL,"执行的SQL为:"+querySql+" 具体错误信息为:"+e); + } + + public static DataXException asPreSQLParserException(String dataBaseType, Exception e, String querySql){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + throw DataXException.asDataXException(DBUtilErrorCode.MYSQL_PRE_SQL_ERROR, "执行的SQL为:"+querySql+" 具体错误信息为:" + e); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + throw DataXException.asDataXException(DBUtilErrorCode.ORACLE_PRE_SQL_ERROR,"执行的SQL为:"+querySql+" 具体错误信息为:" +e); + } + throw DataXException.asDataXException(DBUtilErrorCode.READ_RECORD_FAIL,"执行的SQL为:"+querySql+" 具体错误信息为:"+e); + } + + public static DataXException asPostSQLParserException(String dataBaseType, Exception e, String querySql){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + throw DataXException.asDataXException(DBUtilErrorCode.MYSQL_POST_SQL_ERROR, "执行的SQL为:"+querySql+" 具体错误信息为:" + e); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + throw DataXException.asDataXException(DBUtilErrorCode.ORACLE_POST_SQL_ERROR,"执行的SQL为:"+querySql+" 具体错误信息为:" +e); + } + throw DataXException.asDataXException(DBUtilErrorCode.READ_RECORD_FAIL,"执行的SQL为:"+querySql+" 具体错误信息为:"+e); + } + + public static DataXException asInsertPriException(String dataBaseType, String userName, String jdbcUrl){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + throw DataXException.asDataXException(DBUtilErrorCode.MYSQL_INSERT_ERROR, "用户名为:"+userName+" jdbcURL为:"+jdbcUrl); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + throw DataXException.asDataXException(DBUtilErrorCode.ORACLE_INSERT_ERROR,"用户名为:"+userName+" jdbcURL为:"+jdbcUrl); + } + throw DataXException.asDataXException(DBUtilErrorCode.NO_INSERT_PRIVILEGE,"用户名为:"+userName+" jdbcURL为:"+jdbcUrl); + } + + public static DataXException asDeletePriException(String dataBaseType, String userName, String jdbcUrl){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + throw DataXException.asDataXException(DBUtilErrorCode.MYSQL_DELETE_ERROR, "用户名为:"+userName+" jdbcURL为:"+jdbcUrl); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + throw DataXException.asDataXException(DBUtilErrorCode.ORACLE_DELETE_ERROR,"用户名为:"+userName+" jdbcURL为:"+jdbcUrl); + } + throw DataXException.asDataXException(DBUtilErrorCode.NO_DELETE_PRIVILEGE,"用户名为:"+userName+" jdbcURL为:"+jdbcUrl); + } + + public static DataXException asSplitPKException(String dataBaseType, Exception e, String splitSql, String splitPkID){ + if (dataBaseType.equals(JdbcConstants.MYSQL)){ + + return DataXException.asDataXException(DBUtilErrorCode.MYSQL_SPLIT_PK_ERROR,"配置的SplitPK为: "+splitPkID+", 执行的SQL为: "+splitSql+" 具体错误信息为:"+e); + } + + if (dataBaseType.equals(JdbcConstants.ORACLE)){ + return DataXException.asDataXException(DBUtilErrorCode.ORACLE_SPLIT_PK_ERROR,"配置的SplitPK为: "+splitPkID+", 执行的SQL为: "+splitSql+" 具体错误信息为:"+e); + } + + return DataXException.asDataXException(DBUtilErrorCode.READ_RECORD_FAIL,splitSql+e); + } +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/ReflectionUtil.java b/czsj-system/src/main/java/com/czsj/bigdata/util/ReflectionUtil.java new file mode 100644 index 0000000..a9e957e --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/ReflectionUtil.java @@ -0,0 +1,52 @@ +package com.czsj.bigdata.util; + +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +/** + * @explain JAVA反射工具类 + * @author Song + * @date 2019/12/17 + */ +public class ReflectionUtil { + + /** + * 获取私有成员变量的值 + * @param instance 要获取的对象 + * @param filedName 获取的变量名称 + * @return 返回获取变量的信息(需要强转) + */ + public static Object getPrivateField(Object instance, String filedName) throws NoSuchFieldException, IllegalAccessException { + Field field = instance.getClass().getDeclaredField(filedName); + field.setAccessible(true); + return field.get(instance); + } + + /** + * 设置私有成员的值 + * @param instance 要获取的对象 + * @param fieldName 要获取的变量名 + * @param value 设置的值 + */ + public static void setPrivateField(Object instance, String fieldName, Object value) throws NoSuchFieldException, IllegalAccessException { + Field field = instance.getClass().getDeclaredField(fieldName); + field.setAccessible(true); + field.set(instance, value); + } + + /** + * 访问私有方法 + * @param instance 要获取的对象 + * @param methodName 私有方法的名称 + * @param classes CLASS的返回信息 + * @param objects 参数信息 + * @return + */ + public static Object invokePrivateMethod(Object instance, String methodName, Class[] classes, String objects) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { + Method method = instance.getClass().getDeclaredMethod(methodName, classes); + method.setAccessible(true); + return method.invoke(instance, objects); + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/ServletUtils.java b/czsj-system/src/main/java/com/czsj/bigdata/util/ServletUtils.java new file mode 100644 index 0000000..f84ad43 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/ServletUtils.java @@ -0,0 +1,375 @@ +package com.czsj.bigdata.util; + +import cn.hutool.core.collection.CollectionUtil; +import cn.hutool.core.util.StrUtil; +import cn.hutool.json.JSONUtil; +import org.springframework.http.HttpHeaders; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import javax.servlet.ServletRequest; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.*; +import java.util.Map.Entry; + +/** + * Http与Servlet工具类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/6/15 + */ +public class ServletUtils { + + public static final String DEFAULT_PARAMS_PARAM = "params"; // 登录扩展参数(JSON字符串)优先级高于扩展参数前缀 + public static final String DEFAULT_PARAM_PREFIX_PARAM = "param_"; // 扩展参数前缀 + + // 定义静态文件后缀;静态文件排除URI地址 + private static String[] staticFiles; + private static String[] staticFileExcludeUri; + + /** + * 获取当前请求对象 + * controller.xml: + * org.springframework.controller.context.request.RequestContextListener + * + */ + public static HttpServletRequest getRequest() { + HttpServletRequest request = null; + try { + request = ((ServletRequestAttributes) RequestContextHolder.currentRequestAttributes()).getRequest(); + if (request == null) { + return null; + } + return request; + } catch (Exception e) { + return null; + } + } + + /** + * 获取当前相应对象 + * controller.xml: requestContextFilter + * org.springframework.controller.filter.RequestContextFilter + * requestContextFilter/* + */ + public static HttpServletResponse getResponse() { + HttpServletResponse response; + try { + response = ((ServletRequestAttributes) RequestContextHolder.currentRequestAttributes()).getResponse(); + if (response == null) { + return null; + } + } catch (Exception e) { + return null; + } + return response; + } + + /*** + * 获取 request 中 json 字符串的内容 + * @param request + * @return : byte[] + * @throws IOException + */ + public static String getRequestJsonString(HttpServletRequest request) throws IOException { + String submitMehtod = request.getMethod(); + // GET + if (submitMehtod.equals("GET")) { + if (StrUtil.isNotEmpty(request.getQueryString())) { + return new String(request.getQueryString().getBytes("iso-8859-1"), "utf-8").replaceAll("%22", "\""); + } else { + return new String("".getBytes("iso-8859-1"), "utf-8").replaceAll("%22", "\""); + } + // POST + } else { + return getRequestPostStr(request); + } + } + + /** + * 描述:获取 post 请求的 byte[] 数组 + *
+     * 举例:
+     * 
+ * + * @param request + * @return + * @throws IOException + */ + public static byte[] getRequestPostBytes(HttpServletRequest request) throws IOException { + int contentLength = request.getContentLength(); + if (contentLength < 0) { + return null; + } + byte buffer[] = new byte[contentLength]; + for (int i = 0; i < contentLength; ) { + int readlen = request.getInputStream().read(buffer, i, contentLength - i); + if (readlen == -1) { + break; + } + i += readlen; + } + return buffer; + } + + /** + * 描述:获取 post 请求内容 + *
+     * 举例:
+     * 
+ * + * @param request + * @return + * @throws IOException + */ + public static String getRequestPostStr(HttpServletRequest request) throws IOException { + byte buffer[] = getRequestPostBytes(request); + String charEncoding = request.getCharacterEncoding(); + if (charEncoding == null) { + charEncoding = "UTF-8"; + } + return new String(buffer, charEncoding); + } + + /** + * 是否是Ajax异步请求 + * + * @param request + */ + public static boolean isAjaxRequest(HttpServletRequest request) { + + String accept = request.getHeader("accept"); + if (accept != null && accept.contains("application/json")) { + return true; + } + + String xRequestedWith = request.getHeader("X-Requested-With"); + if (xRequestedWith != null && xRequestedWith.contains("XMLHttpRequest")) { + return true; + } + + String uri = request.getRequestURI(); + if (StrUtil.containsAnyIgnoreCase(uri, ".json", ".xml")) { + return true; + } + + String ajax = request.getParameter("__ajax"); + if (StrUtil.containsAnyIgnoreCase(ajax, "json", "xml")) { + return true; + } + + return false; + } + + /** + * 将字符串渲染到客户端 + * + * @param response 渲染对象 + * @param string 待渲染的字符串 + * @return null + */ + public static String renderString(HttpServletResponse response, String string) { + return renderString(response, string, null); + } + + /** + * 将字符串渲染到客户端 + * + * @param response 渲染对象 + * @param string 待渲染的字符串 + * @return null + */ + public static String renderString(HttpServletResponse response, String string, String type) { + try { +// response.reset(); // 先注释掉,否则以前设置的Header会被清理掉,如ajax登录设置记住我Cookie + response.setContentType(type == null ? "application/json" : type); + response.setCharacterEncoding("utf-8"); + response.getWriter().print(string); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } + + /** + * 获得请求参数值 + */ + public static String getParameter(String name) { + HttpServletRequest request = getRequest(); + if (request == null) { + return null; + } + return request.getParameter(name); + } + + /** + * 获得请求参数Map + */ + public static Map getParameters() { + return getParameters(getRequest()); + } + + /** + * 获得请求参数Map + */ + public static Map getParameters(ServletRequest request) { + if (request == null) { + return CollectionUtil.newHashMap(); + } + return getParametersStartingWith(request, ""); + } + + /** + * 取得带相同前缀的Request Parameters, copy from spring WebUtils. + * 返回的结果的Parameter名已去除前缀. + */ + @SuppressWarnings("rawtypes") + public static Map getParametersStartingWith(ServletRequest request, String prefix) { + Enumeration paramNames = request.getParameterNames(); + Map params = new TreeMap(); + String pre = prefix; + if (pre == null) { + pre = ""; + } + while (paramNames != null && paramNames.hasMoreElements()) { + String paramName = (String) paramNames.nextElement(); + if ("".equals(pre) || paramName.startsWith(pre)) { + String unprefixed = paramName.substring(pre.length()); + String[] values = request.getParameterValues(paramName); + if (values == null || values.length == 0) { + values = new String[]{}; + // Do nothing, no values found at all. + } else if (values.length > 1) { + params.put(unprefixed, values); + } else { + params.put(unprefixed, values[0]); + } + } + } + return params; + } + + /** + * 组合Parameters生成Query String的Parameter部分,并在paramter name上加上prefix. + */ + public static String encodeParameterStringWithPrefix(Map params, String prefix) { + StringBuilder queryStringBuilder = new StringBuilder(); + String pre = prefix; + if (pre == null) { + pre = ""; + } + Iterator> it = params.entrySet().iterator(); + while (it.hasNext()) { + Entry entry = it.next(); + queryStringBuilder.append(pre).append(entry.getKey()).append("=").append(entry.getValue()); + if (it.hasNext()) { + queryStringBuilder.append("&"); + } + } + return queryStringBuilder.toString(); + } + + /** + * 从请求对象中扩展参数数据,格式:JSON 或 param_ 开头的参数 + * + * @param request 请求对象 + * @return 返回Map对象 + */ + public static Map getExtParams(ServletRequest request) { + Map paramMap = null; + String params = StrUtil.trim(request.getParameter(DEFAULT_PARAMS_PARAM)); + if (StrUtil.isNotBlank(params) && StrUtil.startWith(params, "{")) { + paramMap = (Map) JSONUtil.parseObj(params); + } else { + paramMap = getParametersStartingWith(ServletUtils.getRequest(), DEFAULT_PARAM_PREFIX_PARAM); + } + return paramMap; + } + + /** + * 设置客户端缓存过期时间 的Header. + */ + public static void setExpiresHeader(HttpServletResponse response, long expiresSeconds) { + // Http 1.0 header, set a fix expires date. + response.setDateHeader(HttpHeaders.EXPIRES, System.currentTimeMillis() + expiresSeconds * 1000); + // Http 1.1 header, set a time after now. + response.setHeader(HttpHeaders.CACHE_CONTROL, "private, max-age=" + expiresSeconds); + } + + /** + * 设置禁止客户端缓存的Header. + */ + public static void setNoCacheHeader(HttpServletResponse response) { + // Http 1.0 header + response.setDateHeader(HttpHeaders.EXPIRES, 1L); + response.addHeader(HttpHeaders.PRAGMA, "no-cache"); + // Http 1.1 header + response.setHeader(HttpHeaders.CACHE_CONTROL, "no-cache, no-store, max-age=0"); + } + + /** + * 设置LastModified Header. + */ + public static void setLastModifiedHeader(HttpServletResponse response, long lastModifiedDate) { + response.setDateHeader(HttpHeaders.LAST_MODIFIED, lastModifiedDate); + } + + /** + * 设置Etag Header. + */ + public static void setEtag(HttpServletResponse response, String etag) { + response.setHeader(HttpHeaders.ETAG, etag); + } + + /** + * 根据浏览器If-Modified-Since Header, 计算文件是否已被修改. + * 如果无修改, checkIfModify返回false ,设置304 not modify status. + * + * @param lastModified 内容的最后修改时间. + */ + public static boolean checkIfModifiedSince(HttpServletRequest request, HttpServletResponse response, + long lastModified) { + long ifModifiedSince = request.getDateHeader(HttpHeaders.IF_MODIFIED_SINCE); + if ((ifModifiedSince != -1) && (lastModified < ifModifiedSince + 1000)) { + response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); + return false; + } + return true; + } + + /** + * 根据浏览器 If-None-Match Header, 计算Etag是否已无效. + * 如果Etag有效, checkIfNoneMatch返回false, 设置304 not modify status. + * + * @param etag 内容的ETag. + */ + public static boolean checkIfNoneMatchEtag(HttpServletRequest request, HttpServletResponse response, String etag) { + String headerValue = request.getHeader(HttpHeaders.IF_NONE_MATCH); + if (headerValue != null) { + boolean conditionSatisfied = false; + if (!"*".equals(headerValue)) { + StringTokenizer commaTokenizer = new StringTokenizer(headerValue, ","); + + while (!conditionSatisfied && commaTokenizer.hasMoreTokens()) { + String currentToken = commaTokenizer.nextToken(); + if (currentToken.trim().equals(etag)) { + conditionSatisfied = true; + } + } + } else { + conditionSatisfied = true; + } + + if (conditionSatisfied) { + response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); + response.setHeader(HttpHeaders.ETAG, etag); + return false; + } + } + return true; + } + +} diff --git a/czsj-system/src/main/java/com/czsj/bigdata/util/TestOutput.java b/czsj-system/src/main/java/com/czsj/bigdata/util/TestOutput.java new file mode 100644 index 0000000..d6b5844 --- /dev/null +++ b/czsj-system/src/main/java/com/czsj/bigdata/util/TestOutput.java @@ -0,0 +1,20 @@ +package com.czsj.bigdata.util; + + +import com.czsj.bigdata.service.RpcService; +import com.czsj.common.config.RPCClient; +import java.io.IOException; +import java.net.InetSocketAddress; + +public class TestOutput { + + public static void main(String[] args) throws IOException { + for(int i=0;i<=5;i++) { + RpcService service = RPCClient.getRemoteProxyObj(RpcService.class, new InetSocketAddress("192.168.172.235", 8088)); + System.out.println(service.getMonitor()); + } + } + + + +} diff --git a/czsj-system/src/main/lib/HiveJDBC41.jar b/czsj-system/src/main/lib/HiveJDBC41.jar new file mode 100644 index 0000000..098d8be Binary files /dev/null and b/czsj-system/src/main/lib/HiveJDBC41.jar differ diff --git a/czsj-system/src/main/lib/ojdbc6-11.2.0.3.jar b/czsj-system/src/main/lib/ojdbc6-11.2.0.3.jar new file mode 100644 index 0000000..01da074 Binary files /dev/null and b/czsj-system/src/main/lib/ojdbc6-11.2.0.3.jar differ diff --git a/czsj-system/src/main/lib/sql-1.10.0.jar b/czsj-system/src/main/lib/sql-1.10.0.jar new file mode 100644 index 0000000..a8e609f Binary files /dev/null and b/czsj-system/src/main/lib/sql-1.10.0.jar differ diff --git a/czsj-system/src/main/lib/sqljdbc4-4.0.jar b/czsj-system/src/main/lib/sqljdbc4-4.0.jar new file mode 100644 index 0000000..d6b7f6d Binary files /dev/null and b/czsj-system/src/main/lib/sqljdbc4-4.0.jar differ