【feat】基于若水,调整system模块依赖

This commit is contained in:
Kris 2024-12-31 14:17:51 +08:00
parent d5ce99fe14
commit adae3e447d
244 changed files with 18004 additions and 97 deletions

View File

@ -32,101 +32,101 @@ import com.czsj.common.utils.StringUtils;
@Configuration
public class MyBatisConfig
{
@Autowired
private Environment env;
static final String DEFAULT_RESOURCE_PATTERN = "**/*.class";
public static String setTypeAliasesPackage(String typeAliasesPackage)
{
ResourcePatternResolver resolver = (ResourcePatternResolver) new PathMatchingResourcePatternResolver();
MetadataReaderFactory metadataReaderFactory = new CachingMetadataReaderFactory(resolver);
List<String> allResult = new ArrayList<String>();
try
{
for (String aliasesPackage : typeAliasesPackage.split(","))
{
List<String> result = new ArrayList<String>();
aliasesPackage = ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX
+ ClassUtils.convertClassNameToResourcePath(aliasesPackage.trim()) + "/" + DEFAULT_RESOURCE_PATTERN;
Resource[] resources = resolver.getResources(aliasesPackage);
if (resources != null && resources.length > 0)
{
MetadataReader metadataReader = null;
for (Resource resource : resources)
{
if (resource.isReadable())
{
metadataReader = metadataReaderFactory.getMetadataReader(resource);
try
{
result.add(Class.forName(metadataReader.getClassMetadata().getClassName()).getPackage().getName());
}
catch (ClassNotFoundException e)
{
e.printStackTrace();
}
}
}
}
if (result.size() > 0)
{
HashSet<String> hashResult = new HashSet<String>(result);
allResult.addAll(hashResult);
}
}
if (allResult.size() > 0)
{
typeAliasesPackage = String.join(",", (String[]) allResult.toArray(new String[0]));
}
else
{
throw new RuntimeException("mybatis typeAliasesPackage 路径扫描错误,参数typeAliasesPackage:" + typeAliasesPackage + "未找到任何包");
}
}
catch (IOException e)
{
e.printStackTrace();
}
return typeAliasesPackage;
}
public Resource[] resolveMapperLocations(String[] mapperLocations)
{
ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver();
List<Resource> resources = new ArrayList<Resource>();
if (mapperLocations != null)
{
for (String mapperLocation : mapperLocations)
{
try
{
Resource[] mappers = resourceResolver.getResources(mapperLocation);
resources.addAll(Arrays.asList(mappers));
}
catch (IOException e)
{
// ignore
}
}
}
return resources.toArray(new Resource[resources.size()]);
}
@Bean
public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception
{
String typeAliasesPackage = env.getProperty("mybatis.typeAliasesPackage");
String mapperLocations = env.getProperty("mybatis.mapperLocations");
String configLocation = env.getProperty("mybatis.configLocation");
typeAliasesPackage = setTypeAliasesPackage(typeAliasesPackage);
VFS.addImplClass(SpringBootVFS.class);
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(dataSource);
sessionFactory.setTypeAliasesPackage(typeAliasesPackage);
sessionFactory.setMapperLocations(resolveMapperLocations(StringUtils.split(mapperLocations, ",")));
sessionFactory.setConfigLocation(new DefaultResourceLoader().getResource(configLocation));
return sessionFactory.getObject();
}
// @Autowired
// private Environment env;
//
// static final String DEFAULT_RESOURCE_PATTERN = "**/*.class";
//
// public static String setTypeAliasesPackage(String typeAliasesPackage)
// {
// ResourcePatternResolver resolver = (ResourcePatternResolver) new PathMatchingResourcePatternResolver();
// MetadataReaderFactory metadataReaderFactory = new CachingMetadataReaderFactory(resolver);
// List<String> allResult = new ArrayList<String>();
// try
// {
// for (String aliasesPackage : typeAliasesPackage.split(","))
// {
// List<String> result = new ArrayList<String>();
// aliasesPackage = ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX
// + ClassUtils.convertClassNameToResourcePath(aliasesPackage.trim()) + "/" + DEFAULT_RESOURCE_PATTERN;
// Resource[] resources = resolver.getResources(aliasesPackage);
// if (resources != null && resources.length > 0)
// {
// MetadataReader metadataReader = null;
// for (Resource resource : resources)
// {
// if (resource.isReadable())
// {
// metadataReader = metadataReaderFactory.getMetadataReader(resource);
// try
// {
// result.add(Class.forName(metadataReader.getClassMetadata().getClassName()).getPackage().getName());
// }
// catch (ClassNotFoundException e)
// {
// e.printStackTrace();
// }
// }
// }
// }
// if (result.size() > 0)
// {
// HashSet<String> hashResult = new HashSet<String>(result);
// allResult.addAll(hashResult);
// }
// }
// if (allResult.size() > 0)
// {
// typeAliasesPackage = String.join(",", (String[]) allResult.toArray(new String[0]));
// }
// else
// {
// throw new RuntimeException("mybatis typeAliasesPackage 路径扫描错误,参数typeAliasesPackage:" + typeAliasesPackage + "未找到任何包");
// }
// }
// catch (IOException e)
// {
// e.printStackTrace();
// }
// return typeAliasesPackage;
// }
//
// public Resource[] resolveMapperLocations(String[] mapperLocations)
// {
// ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver();
// List<Resource> resources = new ArrayList<Resource>();
// if (mapperLocations != null)
// {
// for (String mapperLocation : mapperLocations)
// {
// try
// {
// Resource[] mappers = resourceResolver.getResources(mapperLocation);
// resources.addAll(Arrays.asList(mappers));
// }
// catch (IOException e)
// {
// // ignore
// }
// }
// }
// return resources.toArray(new Resource[resources.size()]);
// }
//
// @Bean
// public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception
// {
// String typeAliasesPackage = env.getProperty("mybatis.typeAliasesPackage");
// String mapperLocations = env.getProperty("mybatis.mapperLocations");
// String configLocation = env.getProperty("mybatis.configLocation");
// typeAliasesPackage = setTypeAliasesPackage(typeAliasesPackage);
// VFS.addImplClass(SpringBootVFS.class);
//
// final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
// sessionFactory.setDataSource(dataSource);
// sessionFactory.setTypeAliasesPackage(typeAliasesPackage);
// sessionFactory.setMapperLocations(resolveMapperLocations(StringUtils.split(mapperLocations, ",")));
// sessionFactory.setConfigLocation(new DefaultResourceLoader().getResource(configLocation));
// return sessionFactory.getObject();
// }
}

View File

@ -23,6 +23,754 @@
<artifactId>czsj-common</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.oracle</groupId>-->
<!-- <artifactId>ojdbc6</artifactId>-->
<!-- <version>11.2.0.3</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${basedir}/src/main/lib/ojdbc6-11.2.0.3.jar</systemPath>-->
<!-- </dependency>-->
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>sqljdbc4</artifactId>
<version>4.0</version>
<scope>system</scope>
<systemPath>${basedir}/src/main/lib/sqljdbc4-4.0.jar</systemPath>
</dependency>
<dependency>
<groupId>ch.ethz.ganymed</groupId>
<artifactId>ganymed-ssh2</artifactId>
<version>262</version>
</dependency>
<dependency>
<groupId>com.czsj</groupId>
<artifactId>czsj-core</artifactId>
<version>3.8.8</version>
<scope>compile</scope>
</dependency>
<!-- Mybatis Plus -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatisplus.version}</version>
<exclusions>
<exclusion>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-generator</artifactId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatisplus.version}</version>
</dependency>
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
<version>1.4.7</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.12.0</version>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-annotations</artifactId>
<version>1.6.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-extension</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>jettison</artifactId>
<groupId>org.codehaus.jettison</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>commons-collections</artifactId>
<groupId>commons-collections</groupId>
</exclusion>
<exclusion>
<artifactId>commons-lang</artifactId>
<groupId>commons-lang</groupId>
</exclusion>
<exclusion>
<artifactId>curator-framework</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
<exclusions>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>jettison</artifactId>
<groupId>org.codehaus.jettison</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>curator-client</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>commons-compress</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-hdfs</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>snappy</artifactId>
<groupId>org.iq80.snappy</groupId>
</exclusion>
<exclusion>
<artifactId>antlr-runtime</artifactId>
<groupId>org.antlr</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-client</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>libthrift</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
<exclusion>
<artifactId>twill-common</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-core</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-discovery-api</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-discovery-core</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-zookeeper</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>avro</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>curator-recipes</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-common</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-hadoop-compat</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-hadoop2-compat</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>curator-framework</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>guice-servlet</artifactId>
<groupId>com.google.inject.extensions</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-client</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-api</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-jaxrs</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-xc</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-client</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jamon-runtime</artifactId>
<groupId>org.jamon</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-annotations</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>commons-collections</artifactId>
<groupId>commons-collections</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-guice</artifactId>
<groupId>com.sun.jersey.contribs</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-slf4j-impl</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>hive-shims-common</artifactId>
<groupId>org.apache.hive.shims</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet</artifactId>
<groupId>org.eclipse.jetty.orbit</groupId>
</exclusion>
<exclusion>
<artifactId>jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>jasper-compiler</artifactId>
<groupId>tomcat</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-all</artifactId>
<groupId>org.eclipse.jetty.aggregate</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-auth</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-annotations</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-protocol</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>${phoenix.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>avro</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>guice</artifactId>
<groupId>com.google.inject</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-api</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-auth</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-core</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>guice-servlet</artifactId>
<groupId>com.google.inject.extensions</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-server</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-json</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-client</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>commons-io</artifactId>
<groupId>commons-io</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<artifactId>commons-math3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-annotations</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-hdfs</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-client</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-server-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>javax.ws.rs-api</artifactId>
<groupId>javax.ws.rs</groupId>
</exclusion>
<exclusion>
<artifactId>htrace-core</artifactId>
<groupId>org.apache.htrace</groupId>
</exclusion>
<exclusion>
<artifactId>jline</artifactId>
<groupId>jline</groupId>
</exclusion>
<exclusion>
<artifactId>fastutil</artifactId>
<groupId>it.unimi.dsi</groupId>
</exclusion>
<exclusion>
<artifactId>commons-lang</artifactId>
<groupId>commons-lang</groupId>
</exclusion>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-common</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet.jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-io</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-http</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-security</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-server</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-servlet</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>${mongo-java-driver.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.2.4</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt</artifactId>
<version>${jjwt.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>29.0-jre</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>3.0.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>net.sourceforge.jtds</groupId>
<artifactId>jtds</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.2.8</version>
<scope>compile</scope>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.czsj</groupId>-->
<!-- <artifactId>czsj-flink-web-common</artifactId>-->
<!-- <version>3.8.2</version>-->
<!-- <scope>compile</scope>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.czsj</groupId>-->
<!-- <artifactId>czsj-flink-alarm</artifactId>-->
<!-- <version>3.8.2</version>-->
<!-- <scope>compile</scope>-->
<!-- </dependency>-->
<dependency>
<groupId>com.alibaba.nacos</groupId>
<artifactId>nacos-api</artifactId>
<version>2.0.4</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.czsj</groupId>-->
<!-- <artifactId>czsj-flink-web-config</artifactId>-->
<!-- <version>3.8.2</version>-->
<!-- <scope>compile</scope>-->
<!-- </dependency>-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
</dependency>
<dependency>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct</artifactId>
<version>1.3.1.Final</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-openfeign-core</artifactId>
<version>3.0.3</version>
</dependency>
<dependency>
<groupId>org.springframework.security.oauth</groupId>
<artifactId>spring-security-oauth2</artifactId>
<version>2.3.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit</artifactId>
<version>2.2.12.RELEASE</version>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
</includes>
<filtering>true</filtering>
</resource>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
<include>**/templates/**</include>
</includes>
<filtering>true</filtering>
</resource>
</resources>
</build>
</project>

View File

@ -0,0 +1,45 @@
package com.czsj.bigdata.config;
import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector;
import com.baomidou.mybatisplus.core.injector.ISqlInjector;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
*
* @Author: czsj
* @Date: 2022/9/16 11:14
* @Description:
**/
@EnableTransactionManagement
@Configuration
@MapperScan("com.czsj.bigdata.mapper")
public class MybatisPlusConfig {
/**
* 分页插件
*/
@Bean
public PaginationInterceptor paginationInterceptor() {
PaginationInterceptor paginationInterceptor = new PaginationInterceptor();
return paginationInterceptor.setOverflow(true);
}
/**
* MyBatisPlus逻辑删除 需要在 yml 中配置开启
* 3.0.7.1版本的LogicSqlInjector里面什么都没做只是 extends DefaultSqlInjector
* 以后版本直接去的了LogicSqlInjector
*
* @return
*/
@Bean
public ISqlInjector sqlInjector() {
return new DefaultSqlInjector();
}
}

View File

@ -0,0 +1,83 @@
package com.czsj.bigdata.core.conf;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
/**
*
*
* @Date: 2022/1/20 22:52
* @Description:
**/
@Component
public class ExcecutorConfig implements InitializingBean, DisposableBean {
private static ExcecutorConfig excecutorConfig = null;
public static ExcecutorConfig getExcecutorConfig() {
return excecutorConfig;
}
@Override
public void afterPropertiesSet() throws Exception {
excecutorConfig = this;
}
@Override
public void destroy() throws Exception {
}
@Value("${spring.datasource.url}")
private String url;
@Value("${spring.datasource.driver-class-name}")
private String driverClassname;
@Value("${spring.datasource.username}")
private String username;
@Value("${spring.datasource.password}")
private String password;
public static void setExcecutorConfig(ExcecutorConfig excecutorConfig) {
ExcecutorConfig.excecutorConfig = excecutorConfig;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getDriverClassname() {
return driverClassname;
}
public void setDriverClassname(String driverClassname) {
this.driverClassname = driverClassname;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}

View File

@ -0,0 +1,166 @@
package com.czsj.bigdata.core.conf;
import com.czsj.bigdata.core.scheduler.JobScheduler;
import com.czsj.bigdata.core.util.EmailUtil;
import com.czsj.bigdata.mapper.*;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.beans.factory.annotation.Autowired;
import javax.sql.DataSource;
/**
* xxl-job config
*
* @author xuxueli 2017-04-28
*/
@Component
public class JobAdminConfig implements InitializingBean, DisposableBean {
private static JobAdminConfig adminConfig = null;
public static JobAdminConfig getAdminConfig() {
return adminConfig;
}
// ---------------------- XxlJobScheduler ----------------------
private JobScheduler xxlJobScheduler;
@Override
public void afterPropertiesSet() throws Exception {
adminConfig = this;
xxlJobScheduler = new JobScheduler();
xxlJobScheduler.init();
}
@Override
public void destroy() throws Exception {
xxlJobScheduler.destroy();
}
// ---------------------- XxlJobScheduler ----------------------
// conf
@Value("${datax.job.i18n}")
private String i18n;
@Value("${datax.job.accessToken}")
private String accessToken;
@Value("${spring.mail.username}")
private String emailUserName;
@Value("${spring.mail.password}")
private String emailPassword;
@Value("${spring.mail.authorization}")
private String emailAuthorization;
@Value("${datax.job.triggerpool.fast.max}")
private int triggerPoolFastMax;
@Value("${datax.job.triggerpool.slow.max}")
private int triggerPoolSlowMax;
@Value("${datax.job.logretentiondays}")
private int logretentiondays;
@Value("${datasource.aes.key}")
private String dataSourceAESKey;
// dao, service
@Autowired
private JobLogMapper jobLogMapper;
@Autowired
private JobInfoMapper jobInfoMapper;
@Autowired
private JobRegistryMapper jobRegistryMapper;
@Autowired
private JobGroupMapper jobGroupMapper;
@Autowired
private JobLogReportMapper jobLogReportMapper;
@Autowired
private DataSource dataSource;
@Autowired
private JobDatasourceMapper jobDatasourceMapper;
public String getI18n() {
return i18n;
}
public String getAccessToken() {
return accessToken;
}
public String getEmailUserName() {
return emailUserName;
}
public int getTriggerPoolFastMax() {
return triggerPoolFastMax < 200 ? 200 : triggerPoolFastMax;
}
public int getTriggerPoolSlowMax() {
return triggerPoolSlowMax < 100 ? 100 : triggerPoolSlowMax;
}
public int getLogretentiondays() {
return logretentiondays < 7 ? -1 : logretentiondays;
}
public JobLogMapper getJobLogMapper() {
return jobLogMapper;
}
public JobInfoMapper getJobInfoMapper() {
return jobInfoMapper;
}
public JobRegistryMapper getJobRegistryMapper() {
return jobRegistryMapper;
}
public JobGroupMapper getJobGroupMapper() {
return jobGroupMapper;
}
public JobLogReportMapper getJobLogReportMapper() {
return jobLogReportMapper;
}
public String getEmailPassword() {
return emailPassword;
}
public DataSource getDataSource() {
return dataSource;
}
public JobDatasourceMapper getJobDatasourceMapper() {
return jobDatasourceMapper;
}
public String getDataSourceAESKey() {
return dataSourceAESKey;
}
public void setDataSourceAESKey(String dataSourceAESKey) {
this.dataSourceAESKey = dataSourceAESKey;
}
public String getEmailAuthorization() {
return emailAuthorization;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,50 @@
package com.czsj.bigdata.core.handler;
import com.czsj.bigdata.util.AESUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* @author water
* @date 20-03-17 下午5:38
*/
@MappedTypes({String.class})
public class AESEncryptHandler extends BaseTypeHandler<String> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, String parameter, JdbcType jdbcType) throws SQLException {
if(StringUtils.isNotBlank(parameter)){
ps.setString(i, AESUtil.encrypt(parameter));
}else{
ps.setString(i, null);
}
}
@Override
public String getNullableResult(ResultSet rs, String columnName) throws SQLException {
String columnValue = rs.getString(columnName);
return AESUtil.decrypt(columnValue);
}
@Override
public String getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
String columnValue = rs.getString(columnIndex);
return AESUtil.decrypt(columnValue);
}
@Override
public String getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
String columnValue = cs.getString(columnIndex);
return AESUtil.decrypt(columnValue);
}
}

View File

@ -0,0 +1,42 @@
package com.czsj.bigdata.core.kill;
import com.czsj.bigdata.core.trigger.JobTrigger;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import com.czsj.core.enums.ExecutorBlockStrategyEnum;
import com.czsj.core.glue.GlueTypeEnum;
import java.util.Date;
/**
* datax-job trigger
* Created by jingwk on 2019/12/15.
*/
public class KillJob {
/**
* @param logId
* @param address
* @param processId
*/
public static ReturnT<String> trigger(long logId, Date triggerTime, String address, String processId) {
ReturnT<String> triggerResult;
TriggerParam triggerParam = new TriggerParam();
triggerParam.setJobId(-1);
triggerParam.setExecutorHandler("killJobHandler");
triggerParam.setProcessId(processId);
triggerParam.setLogId(logId);
triggerParam.setGlueType(GlueTypeEnum.BEAN.getDesc());
triggerParam.setExecutorBlockStrategy(ExecutorBlockStrategyEnum.SERIAL_EXECUTION.getTitle());
triggerParam.setLogDateTime(triggerTime.getTime());
if (address != null) {
triggerResult = JobTrigger.runExecutor(triggerParam, address);
} else {
triggerResult = new ReturnT<>(ReturnT.FAIL_CODE, null);
}
return triggerResult;
}
}

View File

@ -0,0 +1,49 @@
package com.czsj.bigdata.core.route;
import com.czsj.bigdata.core.route.strategy.*;
import com.czsj.bigdata.core.util.I18nUtil;
/**
* Created by xuxueli on 17/3/10.
*/
public enum ExecutorRouteStrategyEnum {
FIRST(I18nUtil.getString("jobconf_route_first"), new ExecutorRouteFirst()),
LAST(I18nUtil.getString("jobconf_route_last"), new ExecutorRouteLast()),
ROUND(I18nUtil.getString("jobconf_route_round"), new ExecutorRouteRound()),
RANDOM(I18nUtil.getString("jobconf_route_random"), new ExecutorRouteRandom()),
CONSISTENT_HASH(I18nUtil.getString("jobconf_route_consistenthash"), new ExecutorRouteConsistentHash()),
LEAST_FREQUENTLY_USED(I18nUtil.getString("jobconf_route_lfu"), new ExecutorRouteLFU()),
LEAST_RECENTLY_USED(I18nUtil.getString("jobconf_route_lru"), new ExecutorRouteLRU()),
FAILOVER(I18nUtil.getString("jobconf_route_failover"), new ExecutorRouteFailover()),
BUSYOVER(I18nUtil.getString("jobconf_route_busyover"), new ExecutorRouteBusyover()),
SHARDING_BROADCAST(I18nUtil.getString("jobconf_route_shard"), null);
ExecutorRouteStrategyEnum(String title, ExecutorRouter router) {
this.title = title;
this.router = router;
}
private String title;
private ExecutorRouter router;
public String getTitle() {
return title;
}
public ExecutorRouter getRouter() {
return router;
}
public static ExecutorRouteStrategyEnum match(String name, ExecutorRouteStrategyEnum defaultItem){
if (name != null) {
for (ExecutorRouteStrategyEnum item: ExecutorRouteStrategyEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
}
return defaultItem;
}
}

View File

@ -0,0 +1,24 @@
package com.czsj.bigdata.core.route;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public abstract class ExecutorRouter {
protected static Logger logger = LoggerFactory.getLogger(ExecutorRouter.class);
/**
* route address
*
* @param addressList
* @return ReturnT.content=address
*/
public abstract ReturnT<String> route(TriggerParam triggerParam, List<String> addressList);
}

View File

@ -0,0 +1,49 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.bigdata.core.scheduler.JobScheduler;
import com.czsj.bigdata.core.util.I18nUtil;
import com.czsj.core.biz.ExecutorBiz;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteBusyover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuilder idleBeatResultSB = new StringBuilder();
for (String address : addressList) {
// beat
ReturnT<String> idleBeatResult = null;
try {
ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
idleBeatResult = executorBiz.idleBeat(triggerParam.getJobId());
} catch (Exception e) {
logger.error(e.getMessage(), e);
idleBeatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
idleBeatResultSB.append( (idleBeatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_idleBeat") + "")
.append("<br>address").append(address)
.append("<br>code").append(idleBeatResult.getCode())
.append("<br>msg").append(idleBeatResult.getMsg());
// beat success
if (idleBeatResult.getCode() == ReturnT.SUCCESS_CODE) {
idleBeatResult.setMsg(idleBeatResultSB.toString());
idleBeatResult.setContent(address);
return idleBeatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, idleBeatResultSB.toString());
}
}

View File

@ -0,0 +1,86 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* 分组下机器地址相同不同JOB均匀散列在不同机器上保证分组下机器分配JOB平均且每个JOB固定调度其中一台机器
* avirtual node解决不均衡问题
* bhash method replace hashCodeString的hashCode可能重复需要进一步扩大hashCode的取值范围
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteConsistentHash extends ExecutorRouter {
private static int VIRTUAL_NODE_NUM = 100;
/**
* get hash code on 2^32 ring (md5散列的方式计算hash值)
* @param key
* @return
*/
private static long hash(String key) {
// md5 byte
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("MD5 not supported", e);
}
md5.reset();
byte[] keyBytes = null;
try {
keyBytes = key.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Unknown string :" + key, e);
}
md5.update(keyBytes);
byte[] digest = md5.digest();
// hash code, Truncate to 32-bits
long hashCode = ((long) (digest[3] & 0xFF) << 24)
| ((long) (digest[2] & 0xFF) << 16)
| ((long) (digest[1] & 0xFF) << 8)
| (digest[0] & 0xFF);
long truncateHashCode = hashCode & 0xffffffffL;
return truncateHashCode;
}
public String hashJob(int jobId, List<String> addressList) {
// ------A1------A2-------A3------
// -----------J1------------------
TreeMap<Long, String> addressRing = new TreeMap<Long, String>();
for (String address: addressList) {
for (int i = 0; i < VIRTUAL_NODE_NUM; i++) {
long addressHash = hash("SHARD-" + address + "-NODE-" + i);
addressRing.put(addressHash, address);
}
}
long jobHash = hash(String.valueOf(jobId));
SortedMap<Long, String> lastRing = addressRing.tailMap(jobHash);
if (!lastRing.isEmpty()) {
return lastRing.get(lastRing.firstKey());
}
return addressRing.firstEntry().getValue();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = hashJob(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@ -0,0 +1,50 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.bigdata.core.scheduler.JobScheduler;
import com.czsj.bigdata.core.util.I18nUtil;
import com.czsj.core.biz.ExecutorBiz;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteFailover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuilder beatResultSB = new StringBuilder();
for (String address : addressList) {
// beat
ReturnT<String> beatResult = null;
try {
ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
beatResult = executorBiz.beat();
} catch (Exception e) {
logger.error(e.getMessage(), e);
beatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
beatResultSB.append( (beatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_beat") + "")
.append("<br>address").append(address)
.append("<br>code").append(beatResult.getCode())
.append("<br>msg").append(beatResult.getMsg());
// beat success
if (beatResult.getCode() == ReturnT.SUCCESS_CODE) {
beatResult.setMsg(beatResultSB.toString());
beatResult.setContent(address);
return beatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, beatResultSB.toString());
}
}

View File

@ -0,0 +1,21 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteFirst extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList){
return new ReturnT<String>(addressList.get(0));
}
}

View File

@ -0,0 +1,79 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* 单个JOB对应的每个执行器使用频率最低的优先被选举
* a(*)LFU(Least Frequently Used)最不经常使用频率/次数
* bLRU(Least Recently Used)最近最久未使用时间
*
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteLFU extends ExecutorRouter {
private static ConcurrentMap<Integer, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<Integer, HashMap<String, Integer>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLfuMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// lfu item init
HashMap<String, Integer> lfuItemMap = jobLfuMap.get(jobId); // Key排序可以用TreeMap+构造入参CompareValue排序暂时只能通过ArrayList
if (lfuItemMap == null) {
lfuItemMap = new HashMap<String, Integer>();
jobLfuMap.putIfAbsent(jobId, lfuItemMap); // 避免重复覆盖
}
// put new
for (String address: addressList) {
if (!lfuItemMap.containsKey(address) || lfuItemMap.get(address) >1000000 ) {
lfuItemMap.put(address, new Random().nextInt(addressList.size())); // 初始化时主动Random一次缓解首次压力
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lfuItemMap.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lfuItemMap.remove(delKey);
}
}
// load least userd count address
List<Map.Entry<String, Integer>> lfuItemList = new ArrayList<Map.Entry<String, Integer>>(lfuItemMap.entrySet());
Collections.sort(lfuItemList, new Comparator<Map.Entry<String, Integer>>() {
@Override
public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
return o1.getValue().compareTo(o2.getValue());
}
});
Map.Entry<String, Integer> addressItem = lfuItemList.get(0);
String minAddress = addressItem.getKey();
addressItem.setValue(addressItem.getValue() + 1);
return addressItem.getKey();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@ -0,0 +1,76 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* 单个JOB对应的每个执行器最久为使用的优先被选举
* aLFU(Least Frequently Used)最不经常使用频率/次数
* b(*)LRU(Least Recently Used)最近最久未使用时间
*
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteLRU extends ExecutorRouter {
private static ConcurrentMap<Integer, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<Integer, LinkedHashMap<String, String>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLRUMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// init lru
LinkedHashMap<String, String> lruItem = jobLRUMap.get(jobId);
if (lruItem == null) {
/**
* LinkedHashMap
* aaccessOrdertrue=访问顺序排序get/put时排序false=插入顺序排期
* bremoveEldestEntry新增元素时将会调用返回true时会删除最老元素可封装LinkedHashMap并重写该方法比如定义最大容量超出是返回true即可实现固定长度的LRU算法
*/
lruItem = new LinkedHashMap<String, String>(16, 0.75f, true);
jobLRUMap.putIfAbsent(jobId, lruItem);
}
// put new
for (String address: addressList) {
if (!lruItem.containsKey(address)) {
lruItem.put(address, address);
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lruItem.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lruItem.remove(delKey);
}
}
// load
String eldestKey = lruItem.entrySet().iterator().next().getKey();
String eldestValue = lruItem.get(eldestKey);
return eldestValue;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@ -0,0 +1,21 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.List;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteLast extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
return new ReturnT<String>(addressList.get(addressList.size()-1));
}
}

View File

@ -0,0 +1,23 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.List;
import java.util.Random;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteRandom extends ExecutorRouter {
private static Random localRandom = new Random();
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(localRandom.nextInt(addressList.size()));
return new ReturnT<String>(address);
}
}

View File

@ -0,0 +1,40 @@
package com.czsj.bigdata.core.route.strategy;
import com.czsj.bigdata.core.route.ExecutorRouter;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Created by xuxueli on 17/3/10.
*/
public class ExecutorRouteRound extends ExecutorRouter {
private static ConcurrentMap<Integer, Integer> routeCountEachJob = new ConcurrentHashMap<Integer, Integer>();
private static long CACHE_VALID_TIME = 0;
private static int count(int jobId) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
routeCountEachJob.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// count++
Integer count = routeCountEachJob.get(jobId);
count = (count==null || count>1000000)?(new Random().nextInt(100)):++count; // 初始化时主动Random一次缓解首次压力
routeCountEachJob.put(jobId, count);
return count;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(count(triggerParam.getJobId())%addressList.size());
return new ReturnT<String>(address);
}
}

View File

@ -0,0 +1,114 @@
package com.czsj.bigdata.core.scheduler;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.core.thread.*;
import com.czsj.bigdata.core.util.I18nUtil;
import com.czsj.core.biz.ExecutorBiz;
import com.czsj.core.enums.ExecutorBlockStrategyEnum;
import com.czsj.rpc.remoting.invoker.call.CallType;
import com.czsj.rpc.remoting.invoker.reference.XxlRpcReferenceBean;
import com.czsj.rpc.remoting.invoker.route.LoadBalance;
import com.czsj.rpc.remoting.net.impl.netty_http.client.NettyHttpClient;
import com.czsj.rpc.serialize.impl.HessianSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @author xuxueli 2018-10-28 00:18:17
*/
public class JobScheduler {
private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class);
public void init() throws Exception {
// init i18n
initI18n();
// admin registry monitor run
JobRegistryMonitorHelper.getInstance().start();
// admin monitor run
JobFailMonitorHelper.getInstance().start();
// admin trigger pool start
JobTriggerPoolHelper.toStart();
// admin log report start
JobLogReportHelper.getInstance().start();
// start-schedule
JobScheduleHelper.getInstance().start();
logger.info(">>>>>>>>> init czsj-ground admin success.");
}
public void destroy() throws Exception {
// stop-schedule
JobScheduleHelper.getInstance().toStop();
// admin log report stop
JobLogReportHelper.getInstance().toStop();
// admin trigger pool stop
JobTriggerPoolHelper.toStop();
// admin monitor stop
JobFailMonitorHelper.getInstance().toStop();
// admin registry stop
JobRegistryMonitorHelper.getInstance().toStop();
}
// ---------------------- I18n ----------------------
private void initI18n() {
for (ExecutorBlockStrategyEnum item : ExecutorBlockStrategyEnum.values()) {
item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name())));
}
}
// ---------------------- executor-client ----------------------
private static ConcurrentMap<String, ExecutorBiz> executorBizRepository = new ConcurrentHashMap<>();
public static ExecutorBiz getExecutorBiz(String address) throws Exception {
// valid
if (address == null || address.trim().length() == 0) {
return null;
}
// load-cache
address = address.trim();
ExecutorBiz executorBiz = executorBizRepository.get(address);
if (executorBiz != null) {
return executorBiz;
}
// set-cache
XxlRpcReferenceBean referenceBean = new XxlRpcReferenceBean();
referenceBean.setClient(NettyHttpClient.class);
referenceBean.setSerializer(HessianSerializer.class);
referenceBean.setCallType(CallType.SYNC);
referenceBean.setLoadBalance(LoadBalance.ROUND);
referenceBean.setIface(ExecutorBiz.class);
referenceBean.setVersion(null);
referenceBean.setTimeout(3000);
referenceBean.setAddress(address);
referenceBean.setAccessToken(JobAdminConfig.getAdminConfig().getAccessToken());
referenceBean.setInvokeCallback(null);
referenceBean.setInvokerFactory(null);
executorBiz = (ExecutorBiz) referenceBean.getObject();
executorBizRepository.put(address, executorBiz);
return executorBiz;
}
}

View File

@ -0,0 +1,201 @@
package com.czsj.bigdata.core.thread;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.core.trigger.TriggerTypeEnum;
import com.czsj.bigdata.core.util.EmailUtil;
import com.czsj.bigdata.core.util.I18nUtil;
import com.czsj.bigdata.entity.JobGroup;
import com.czsj.bigdata.entity.JobInfo;
import com.czsj.bigdata.entity.JobLog;
import com.czsj.core.biz.model.ReturnT;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.mail.internet.MimeMessage;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* job monitor instance
*
* @author xuxueli 2015-9-1 18:05:56
*/
public class JobFailMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobFailMonitorHelper.class);
private static JobFailMonitorHelper instance = new JobFailMonitorHelper();
public static JobFailMonitorHelper getInstance(){
return instance;
}
// ---------------------- monitor ----------------------
private Thread monitorThread;
private volatile boolean toStop = false;
public void start(){
monitorThread = new Thread(new Runnable() {
@Override
public void run() {
// monitor
while (!toStop) {
try {
List<Long> failLogIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findFailJobLogIds(1000);
if (failLogIds!=null && !failLogIds.isEmpty()) {
for (long failLogId: failLogIds) {
// lock log
int lockRet = JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, 0, -1);
if (lockRet < 1) {
continue;
}
JobLog log = JobAdminConfig.getAdminConfig().getJobLogMapper().load(failLogId);
JobInfo info = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(log.getJobId());
// 1fail retry monitor
if (log.getExecutorFailRetryCount() > 0) {
JobTriggerPoolHelper.trigger(log.getJobId(), TriggerTypeEnum.RETRY, (log.getExecutorFailRetryCount()-1), log.getExecutorShardingParam(), log.getExecutorParam());
String retryMsg = "<br><br><span style=\"color:#F39C12;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_type_retry") +"<<<<<<<<<<< </span><br>";
log.setTriggerMsg(log.getTriggerMsg() + retryMsg);
JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(log);
}
// 2fail alarm monitor
int newAlarmStatus = 0; // 告警状态0-默认-1=锁定状态1-无需告警2-告警成功3-告警失败
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
boolean alarmResult = true;
try {
alarmResult = failAlarm(info, log);
} catch (Exception e) {
alarmResult = false;
logger.error(e.getMessage(), e);
}
newAlarmStatus = alarmResult?2:3;
} else {
newAlarmStatus = 1;
}
JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, -1, newAlarmStatus);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> czsj-ground, job fail monitor thread error:{0}", e);
}
}
try {
TimeUnit.SECONDS.sleep(10);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> czsj-ground, job fail monitor thread stop");
}
});
monitorThread.setDaemon(true);
monitorThread.setName("czsj-ground, admin JobFailMonitorHelper");
monitorThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
monitorThread.interrupt();
try {
monitorThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// ---------------------- alarm ----------------------
// email alarm template
private static final String mailBodyTemplate = "<h5>" + I18nUtil.getString("jobconf_monitor_detail") + "</span>" +
"<table border=\"1\" cellpadding=\"3\" style=\"border-collapse:collapse; width:80%;\" >\n" +
" <thead style=\"font-weight: bold;color: #ffffff;background-color: #ff8c00;\" >" +
" <tr>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobgroup") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobinfo_field_id") +"</td>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobdesc") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_title") +"</td>\n" +
" <td width=\"40%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_content") +"</td>\n" +
" </tr>\n" +
" </thead>\n" +
" <tbody>\n" +
" <tr>\n" +
" <td>{0}</td>\n" +
" <td>{1}</td>\n" +
" <td>{2}</td>\n" +
" <td>"+ I18nUtil.getString("jobconf_monitor_alarm_type") +"</td>\n" +
" <td>{3}</td>\n" +
" </tr>\n" +
" </tbody>\n" +
"</table>";
/**
* fail alarm
*
* @param jobLog
*/
private boolean failAlarm(JobInfo info, JobLog jobLog){
boolean alarmResult = true;
// send monitor email
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
// alarmContent
String alarmContent = "Alarm Job LogId=" + jobLog.getId();
if (jobLog.getTriggerCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>TriggerMsg=<br>" + jobLog.getTriggerMsg();
}
if (jobLog.getHandleCode()>0 && jobLog.getHandleCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>HandleCode=" + jobLog.getHandleMsg();
}
// email info
JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(Integer.valueOf(info.getJobGroup()));
String personal = I18nUtil.getString("admin_name_full");
String title = I18nUtil.getString("jobconf_monitor");
String content = MessageFormat.format(mailBodyTemplate,
group!=null?group.getTitle():"null",
info.getId(),
info.getJobDesc(),
alarmContent);
Set<String> emailSet = new HashSet<String>(Arrays.asList(info.getAlarmEmail().split(",")));
for (String email: emailSet) {
// make mail
try {
EmailUtil.send(JobAdminConfig.getAdminConfig().getEmailUserName(), JobAdminConfig.getAdminConfig().getEmailPassword(),JobAdminConfig.getAdminConfig().getEmailAuthorization(),email,title,content);
} catch (Exception e) {
logger.error(">>>>>>>>>>> czsj-ground, job fail alarm email send error, JobLogId:{}", jobLog.getId(), e);
alarmResult = false;
}
}
}
// do something, custom alarm strategy, such as sms
return alarmResult;
}
}

View File

@ -0,0 +1,153 @@
package com.czsj.bigdata.core.thread;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.entity.JobLogReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* job log report helper
*
* @author xuxueli 2019-11-22
*/
public class JobLogReportHelper {
private static Logger logger = LoggerFactory.getLogger(JobLogReportHelper.class);
private static JobLogReportHelper instance = new JobLogReportHelper();
public static JobLogReportHelper getInstance(){
return instance;
}
private Thread logrThread;
private volatile boolean toStop = false;
public void start(){
logrThread = new Thread(new Runnable() {
@Override
public void run() {
// last clean log time
long lastCleanLogTime = 0;
while (!toStop) {
// 1log-report refresh: refresh log report in 3 days
try {
for (int i = 0; i < 3; i++) {
// today
Calendar itemDay = Calendar.getInstance();
itemDay.add(Calendar.DAY_OF_MONTH, -i);
itemDay.set(Calendar.HOUR_OF_DAY, 0);
itemDay.set(Calendar.MINUTE, 0);
itemDay.set(Calendar.SECOND, 0);
itemDay.set(Calendar.MILLISECOND, 0);
Date todayFrom = itemDay.getTime();
itemDay.set(Calendar.HOUR_OF_DAY, 23);
itemDay.set(Calendar.MINUTE, 59);
itemDay.set(Calendar.SECOND, 59);
itemDay.set(Calendar.MILLISECOND, 999);
Date todayTo = itemDay.getTime();
// refresh log-report every minute
JobLogReport xxlJobLogReport = new JobLogReport();
xxlJobLogReport.setTriggerDay(todayFrom);
xxlJobLogReport.setRunningCount(0);
xxlJobLogReport.setSucCount(0);
xxlJobLogReport.setFailCount(0);
Map<String, Object> triggerCountMap = JobAdminConfig.getAdminConfig().getJobLogMapper().findLogReport(todayFrom, todayTo);
if (triggerCountMap!=null && triggerCountMap.size()>0) {
int triggerDayCount = triggerCountMap.containsKey("triggerDayCount")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCount"))):0;
int triggerDayCountRunning = triggerCountMap.containsKey("triggerDayCountRunning")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountRunning"))):0;
int triggerDayCountSuc = triggerCountMap.containsKey("triggerDayCountSuc")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountSuc"))):0;
int triggerDayCountFail = triggerDayCount - triggerDayCountRunning - triggerDayCountSuc;
xxlJobLogReport.setRunningCount(triggerDayCountRunning);
xxlJobLogReport.setSucCount(triggerDayCountSuc);
xxlJobLogReport.setFailCount(triggerDayCountFail);
}
// do refresh
int ret = JobAdminConfig.getAdminConfig().getJobLogReportMapper().update(xxlJobLogReport);
if (ret < 1) {
JobAdminConfig.getAdminConfig().getJobLogReportMapper().save(xxlJobLogReport);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> czsj-ground, job log report thread error:{}", e);
}
}
// 2log-clean: switch open & once each day
if (JobAdminConfig.getAdminConfig().getLogretentiondays()>0
&& System.currentTimeMillis() - lastCleanLogTime > 24*60*60*1000) {
// expire-time
Calendar expiredDay = Calendar.getInstance();
expiredDay.add(Calendar.DAY_OF_MONTH, -1 * JobAdminConfig.getAdminConfig().getLogretentiondays());
expiredDay.set(Calendar.HOUR_OF_DAY, 0);
expiredDay.set(Calendar.MINUTE, 0);
expiredDay.set(Calendar.SECOND, 0);
expiredDay.set(Calendar.MILLISECOND, 0);
Date clearBeforeTime = expiredDay.getTime();
// clean expired log
List<Long> logIds = null;
do {
logIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findClearLogIds(0, 0, clearBeforeTime, 0, 1000);
if (logIds!=null && logIds.size()>0) {
JobAdminConfig.getAdminConfig().getJobLogMapper().clearLog(logIds);
}
} while (logIds!=null && logIds.size()>0);
// update clean time
lastCleanLogTime = System.currentTimeMillis();
}
try {
TimeUnit.MINUTES.sleep(1);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> czsj-ground, job log report thread stop");
}
});
logrThread.setDaemon(true);
logrThread.setName("czsj-ground, admin JobLogReportHelper");
logrThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
logrThread.interrupt();
try {
logrThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@ -0,0 +1,109 @@
package com.czsj.bigdata.core.thread;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.entity.JobGroup;
import com.czsj.bigdata.entity.JobRegistry;
import com.czsj.core.enums.RegistryConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* job registry instance
* @author xuxueli 2016-10-02 19:10:24
*/
public class JobRegistryMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobRegistryMonitorHelper.class);
private static JobRegistryMonitorHelper instance = new JobRegistryMonitorHelper();
public static JobRegistryMonitorHelper getInstance(){
return instance;
}
private Thread registryThread;
private volatile boolean toStop = false;
public void start(){
registryThread = new Thread(() -> {
while (!toStop) {
try {
// auto registry group
List<JobGroup> groupList = JobAdminConfig.getAdminConfig().getJobGroupMapper().findByAddressType(0);
if (groupList!=null && !groupList.isEmpty()) {
// remove dead address (admin/executor)
List<Integer> ids = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findDead(RegistryConfig.DEAD_TIMEOUT, new Date());
if (ids!=null && ids.size()>0) {
JobAdminConfig.getAdminConfig().getJobRegistryMapper().removeDead(ids);
}
// fresh online address (admin/executor)
HashMap<String, List<String>> appAddressMap = new HashMap<>();
List<JobRegistry> list = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (JobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appName = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appName);
if (registryList == null) {
registryList = new ArrayList<>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appName, registryList);
}
}
}
// fresh group address
for (JobGroup group: groupList) {
List<String> registryList = appAddressMap.get(group.getAppName());
String addressListStr = null;
if (registryList!=null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item:registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length()-1);
}
group.setAddressList(addressListStr);
JobAdminConfig.getAdminConfig().getJobGroupMapper().update(group);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> czsj-ground, job registry monitor thread error:{}", e);
}
}
try {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(">>>>>>>>>>> czsj-ground, job registry monitor thread error:{}", e);
}
}
}
logger.info(">>>>>>>>>>> czsj-ground, job registry monitor thread stop");
});
registryThread.setDaemon(true);
registryThread.setName("czsj-ground, admin JobRegistryMonitorHelper");
registryThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
registryThread.interrupt();
try {
registryThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@ -0,0 +1,350 @@
package com.czsj.bigdata.core.thread;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.core.cron.CronExpression;
import com.czsj.bigdata.core.trigger.TriggerTypeEnum;
import com.czsj.bigdata.entity.JobInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
/**
* @author xuxueli 2019-05-21
*/
public class JobScheduleHelper {
private static Logger logger = LoggerFactory.getLogger(JobScheduleHelper.class);
private static JobScheduleHelper instance = new JobScheduleHelper();
public static JobScheduleHelper getInstance() {
return instance;
}
public static final long PRE_READ_MS = 5000; // pre read
private Thread scheduleThread;
private Thread ringThread;
private volatile boolean scheduleThreadToStop = false;
private volatile boolean ringThreadToStop = false;
private volatile static Map<Integer, List<Integer>> ringData = new ConcurrentHashMap<>();
public void start() {
// schedule thread
scheduleThread = new Thread(new Runnable() {
@Override
public void run() {
try {
TimeUnit.MILLISECONDS.sleep(5000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>> init czsj-ground admin scheduler success.");
// pre-read count: treadpool-size * trigger-qps (each trigger cost 50ms, qps = 1000/50 = 20)
int preReadCount = (JobAdminConfig.getAdminConfig().getTriggerPoolFastMax() + JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax()) * 20;
while (!scheduleThreadToStop) {
// Scan Job
long start = System.currentTimeMillis();
Connection conn = null;
Boolean connAutoCommit = null;
PreparedStatement preparedStatement = null;
boolean preReadSuc = true;
try {
conn = JobAdminConfig.getAdminConfig().getDataSource().getConnection();
connAutoCommit = conn.getAutoCommit();
conn.setAutoCommit(false);
preparedStatement = conn.prepareStatement("select * from job_lock where lock_name = 'schedule_lock' for update");
preparedStatement.execute();
// tx start
// 1pre read
long nowTime = System.currentTimeMillis();
List<JobInfo> scheduleList = JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount);
if (scheduleList != null && scheduleList.size() > 0) {
// 2push time-ring
for (JobInfo jobInfo : scheduleList) {
// time-ring jump
if (nowTime > jobInfo.getTriggerNextTime() + PRE_READ_MS) {
// 2.1trigger-expire > 5spass && make next-trigger-time
logger.warn(">>>>>>>>>>> czsj-ground, schedule misfire, jobId = " + jobInfo.getId());
// fresh next
refreshNextValidTime(jobInfo, new Date());
} else if (nowTime > jobInfo.getTriggerNextTime()) {
// 2.2trigger-expire < 5sdirect-trigger && make next-trigger-time
// 1trigger
JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.CRON, -1, null, null);
logger.debug(">>>>>>>>>>> czsj-ground, schedule push trigger : jobId = " + jobInfo.getId());
// 2fresh next
refreshNextValidTime(jobInfo, new Date());
// next-trigger-time in 5s, pre-read again
if (jobInfo.getTriggerStatus() == 1 && nowTime + PRE_READ_MS > jobInfo.getTriggerNextTime()) {
// 1make ring second
int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
// 2push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
} else {
// 2.3trigger-pre-readtime-ring trigger && make next-trigger-time
// 1make ring second
int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
// 2push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
}
// 3update trigger info
for (JobInfo jobInfo : scheduleList) {
JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleUpdate(jobInfo);
}
} else {
preReadSuc = false;
}
// tx stop
} catch (Exception e) {
if (!scheduleThreadToStop) {
logger.error(">>>>>>>>>>> czsj-ground, JobScheduleHelper#scheduleThread error:{}", e);
}
} finally {
// commit
if (conn != null) {
try {
conn.commit();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.setAutoCommit(connAutoCommit);
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
// close PreparedStatement
if (null != preparedStatement) {
try {
preparedStatement.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
long cost = System.currentTimeMillis() - start;
// Wait seconds, align second
if (cost < 1000) { // scan-overtime, not wait
try {
// pre-read period: success > scan each second; fail > skip this period;
TimeUnit.MILLISECONDS.sleep((preReadSuc ? 1000 : PRE_READ_MS) - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
logger.info(">>>>>>>>>>> czsj-ground, JobScheduleHelper#scheduleThread stop");
}
});
scheduleThread.setDaemon(true);
scheduleThread.setName("czsj-ground, admin JobScheduleHelper#scheduleThread");
scheduleThread.start();
// ring thread
ringThread = new Thread(() -> {
// align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
while (!ringThreadToStop) {
try {
// second data
List<Integer> ringItemData = new ArrayList<>();
int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长跨过刻度向前校验一个刻度
for (int i = 0; i < 2; i++) {
List<Integer> tmpData = ringData.remove((nowSecond + 60 - i) % 60);
if (tmpData != null) {
ringItemData.addAll(tmpData);
}
}
// ring trigger
logger.debug(">>>>>>>>>>> czsj-ground, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData));
if (ringItemData.size() > 0) {
// do trigger
for (int jobId : ringItemData) {
// do trigger
JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null);
}
// clear
ringItemData.clear();
}
} catch (Exception e) {
if (!ringThreadToStop) {
logger.error(">>>>>>>>>>> czsj-ground, JobScheduleHelper#ringThread error:{}", e);
}
}
// next second, align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> czsj-ground, JobScheduleHelper#ringThread stop");
});
ringThread.setDaemon(true);
ringThread.setName("czsj-ground, admin JobScheduleHelper#ringThread");
ringThread.start();
}
private void refreshNextValidTime(JobInfo jobInfo, Date fromTime) throws ParseException {
Date nextValidTime = new CronExpression(jobInfo.getJobCron()).getNextValidTimeAfter(fromTime);
if (nextValidTime != null) {
jobInfo.setTriggerLastTime(jobInfo.getTriggerNextTime());
jobInfo.setTriggerNextTime(nextValidTime.getTime());
} else {
jobInfo.setTriggerStatus(0);
jobInfo.setTriggerLastTime(0);
jobInfo.setTriggerNextTime(0);
}
}
private void pushTimeRing(int ringSecond, int jobId) {
// push async ring
List<Integer> ringItemData = ringData.get(ringSecond);
if (ringItemData == null) {
ringItemData = new ArrayList<Integer>();
ringData.put(ringSecond, ringItemData);
}
ringItemData.add(jobId);
logger.debug(">>>>>>>>>>> czsj-ground, schedule push time-ring : " + ringSecond + " = " + Arrays.asList(ringItemData));
}
public void toStop() {
// 1stop schedule
scheduleThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1); // wait
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (scheduleThread.getState() != Thread.State.TERMINATED) {
// interrupt and wait
scheduleThread.interrupt();
try {
scheduleThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// if has ring data
boolean hasRingData = false;
if (!ringData.isEmpty()) {
for (int second : ringData.keySet()) {
List<Integer> tmpData = ringData.get(second);
if (tmpData != null && tmpData.size() > 0) {
hasRingData = true;
break;
}
}
}
if (hasRingData) {
try {
TimeUnit.SECONDS.sleep(8);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// stop ring (wait job-in-memory stop)
ringThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (ringThread.getState() != Thread.State.TERMINATED) {
// interrupt and wait
ringThread.interrupt();
try {
ringThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> czsj-ground, JobScheduleHelper stop");
}
}

View File

@ -0,0 +1,133 @@
package com.czsj.bigdata.core.thread;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.core.trigger.JobTrigger;
import com.czsj.bigdata.core.trigger.TriggerTypeEnum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* job trigger thread pool helper
*
* @author xuxueli 2018-07-03 21:08:07
*/
public class JobTriggerPoolHelper {
private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class);
// ---------------------- trigger pool ----------------------
// fast/slow thread pool
private ThreadPoolExecutor fastTriggerPool = null;
private ThreadPoolExecutor slowTriggerPool = null;
public void start() {
fastTriggerPool = new ThreadPoolExecutor(
10,
JobAdminConfig.getAdminConfig().getTriggerPoolFastMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(1000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "czsj-ground, admin JobTriggerPoolHelper-fastTriggerPool-" + r.hashCode());
}
});
slowTriggerPool = new ThreadPoolExecutor(
10,
JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(2000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "czsj-ground, admin JobTriggerPoolHelper-slowTriggerPool-" + r.hashCode());
}
});
}
public void stop() {
//triggerPool.shutdown();
fastTriggerPool.shutdownNow();
slowTriggerPool.shutdownNow();
logger.info(">>>>>>>>> czsj-ground trigger thread pool shutdown success.");
}
// job timeout count
private volatile long minTim = System.currentTimeMillis() / 60000; // ms > min
private volatile ConcurrentMap<Integer, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>();
/**
* add trigger
*/
public void addTrigger(final int jobId, final TriggerTypeEnum triggerType, final int failRetryCount, final String executorShardingParam, final String executorParam) {
// choose thread pool
ThreadPoolExecutor triggerPool_ = fastTriggerPool;
AtomicInteger jobTimeoutCount = jobTimeoutCountMap.get(jobId);
if (jobTimeoutCount != null && jobTimeoutCount.get() > 10) { // job-timeout 10 times in 1 min
triggerPool_ = slowTriggerPool;
}
// trigger
triggerPool_.execute(() -> {
long start = System.currentTimeMillis();
try {
// do trigger
JobTrigger.trigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
// check timeout-count-map
long minTim_now = System.currentTimeMillis() / 60000;
if (minTim != minTim_now) {
minTim = minTim_now;
jobTimeoutCountMap.clear();
}
// incr timeout-count-map
long cost = System.currentTimeMillis() - start;
if (cost > 500) { // ob-timeout threshold 500ms
AtomicInteger timeoutCount = jobTimeoutCountMap.putIfAbsent(jobId, new AtomicInteger(1));
if (timeoutCount != null) {
timeoutCount.incrementAndGet();
}
}
}
});
}
// ---------------------- helper ----------------------
private static JobTriggerPoolHelper helper = new JobTriggerPoolHelper();
public static void toStart() {
helper.start();
}
public static void toStop() {
helper.stop();
}
/**
* @param jobId
* @param triggerType
* @param failRetryCount >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
}
}

View File

@ -0,0 +1,263 @@
package com.czsj.bigdata.core.trigger;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import com.czsj.bigdata.core.route.ExecutorRouteStrategyEnum;
import com.czsj.bigdata.core.scheduler.JobScheduler;
import com.czsj.bigdata.core.util.I18nUtil;
import com.czsj.bigdata.entity.JobDatasource;
import com.czsj.bigdata.entity.JobGroup;
import com.czsj.bigdata.entity.JobInfo;
import com.czsj.bigdata.entity.JobLog;
import com.czsj.bigdata.tool.query.BaseQueryTool;
import com.czsj.bigdata.tool.query.QueryToolFactory;
import com.czsj.bigdata.util.JSONUtils;
import com.czsj.core.biz.ExecutorBiz;
import com.czsj.core.biz.model.ReturnT;
import com.czsj.core.biz.model.TriggerParam;
import com.czsj.core.enums.ExecutorBlockStrategyEnum;
import com.czsj.core.enums.IncrementTypeEnum;
import com.czsj.core.glue.GlueTypeEnum;
import com.czsj.rpc.util.IpUtil;
import com.czsj.rpc.util.ThrowableUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
/**
* xxl-job trigger
* Created by xuxueli on 17/7/13.
*/
public class JobTrigger {
private static Logger logger = LoggerFactory.getLogger(JobTrigger.class);
/**
* trigger job
*
* @param jobId
* @param triggerType
* @param failRetryCount >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId);
if (jobInfo == null) {
logger.warn(">>>>>>>>>>>> trigger fail, jobId invalidjobId={}", jobId);
return;
}
if (GlueTypeEnum.BEAN.getDesc().equals(jobInfo.getGlueType())) {
//解密账密
String json = JSONUtils.changeJson(jobInfo.getJobJson(), JSONUtils.decrypt);
jobInfo.setJobJson(json);
}
if (StringUtils.isNotBlank(executorParam)) {
jobInfo.setExecutorParam(executorParam);
}
int finalFailRetryCount = failRetryCount >= 0 ? failRetryCount : jobInfo.getExecutorFailRetryCount();
JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(jobInfo.getJobGroup());
// sharding param
int[] shardingParam = null;
if (executorShardingParam != null) {
String[] shardingArr = executorShardingParam.split("/");
if (shardingArr.length == 2 && isNumeric(shardingArr[0]) && isNumeric(shardingArr[1])) {
shardingParam = new int[2];
shardingParam[0] = Integer.valueOf(shardingArr[0]);
shardingParam[1] = Integer.valueOf(shardingArr[1]);
}
}
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null)
&& group.getRegistryList() != null && !group.getRegistryList().isEmpty()
&& shardingParam == null) {
for (int i = 0; i < group.getRegistryList().size(); i++) {
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, i, group.getRegistryList().size());
}
} else {
if (shardingParam == null) {
shardingParam = new int[]{0, 1};
}
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, shardingParam[0], shardingParam[1]);
}
}
private static boolean isNumeric(String str) {
try {
int result = Integer.valueOf(str);
return true;
} catch (NumberFormatException e) {
return false;
}
}
/**
* @param group job group, registry list may be empty
* @param jobInfo
* @param finalFailRetryCount
* @param triggerType
* @param index sharding index
* @param total sharding index
*/
private static void processTrigger(JobGroup group, JobInfo jobInfo, int finalFailRetryCount, TriggerTypeEnum triggerType, int index, int total) {
TriggerParam triggerParam = new TriggerParam();
// param
ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), ExecutorBlockStrategyEnum.SERIAL_EXECUTION); // block strategy
ExecutorRouteStrategyEnum executorRouteStrategyEnum = ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null); // route strategy
String shardingParam = (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) ? String.valueOf(index).concat("/").concat(String.valueOf(total)) : null;
// 1save log-id
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MILLISECOND, 0);
Date triggerTime = calendar.getTime();
JobLog jobLog = new JobLog();
jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(triggerTime);
jobLog.setJobDesc(jobInfo.getJobDesc());
JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog);
logger.debug(">>>>>>>>>>> czsj-ground trigger start, jobId:{}", jobLog.getId());
// 2init trigger-param
triggerParam.setJobId(jobInfo.getId());
triggerParam.setExecutorHandler(jobInfo.getExecutorHandler());
triggerParam.setExecutorParams(jobInfo.getExecutorParam());
triggerParam.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy());
triggerParam.setExecutorTimeout(jobInfo.getExecutorTimeout());
triggerParam.setLogId(jobLog.getId());
triggerParam.setLogDateTime(jobLog.getTriggerTime().getTime());
triggerParam.setGlueType(jobInfo.getGlueType());
triggerParam.setGlueSource(jobInfo.getGlueSource());
triggerParam.setGlueUpdatetime(jobInfo.getGlueUpdatetime().getTime());
triggerParam.setBroadcastIndex(index);
triggerParam.setBroadcastTotal(total);
triggerParam.setJobJson(jobInfo.getJobJson());
//increment parameter
Integer incrementType = jobInfo.getIncrementType();
if (incrementType != null) {
triggerParam.setIncrementType(incrementType);
if (IncrementTypeEnum.ID.getCode() == incrementType) {
long maxId = getMaxId(jobInfo);
jobLog.setMaxId(maxId);
triggerParam.setEndId(maxId);
if(maxId != 0){
triggerParam.setStartId(maxId);
jobInfo.setIncStartId(maxId);
}else{
triggerParam.setStartId(jobInfo.getIncStartId());
}
} else if (IncrementTypeEnum.TIME.getCode() == incrementType) {
triggerParam.setStartTime(jobInfo.getIncStartTime());
triggerParam.setTriggerTime(triggerTime);
triggerParam.setReplaceParamType(jobInfo.getReplaceParamType());
} else if (IncrementTypeEnum.PARTITION.getCode() == incrementType) {
triggerParam.setPartitionInfo(jobInfo.getPartitionInfo());
}
triggerParam.setReplaceParam(jobInfo.getReplaceParam());
}
//jvm parameter
triggerParam.setJvmParam(jobInfo.getJvmParam());
// 3init address
String address = null;
ReturnT<String> routeAddressResult = null;
if (group.getRegistryList() != null && !group.getRegistryList().isEmpty()) {
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) {
if (index < group.getRegistryList().size()) {
address = group.getRegistryList().get(index);
} else {
address = group.getRegistryList().get(0);
}
} else {
routeAddressResult = executorRouteStrategyEnum.getRouter().route(triggerParam, group.getRegistryList());
if (routeAddressResult.getCode() == ReturnT.SUCCESS_CODE) {
address = routeAddressResult.getContent();
}
}
} else {
routeAddressResult = new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("jobconf_trigger_address_empty"));
}
// 4trigger remote executor
ReturnT<String> triggerResult = null;
if (address != null) {
triggerResult = runExecutor(triggerParam, address);
} else {
triggerResult = new ReturnT<String>(ReturnT.FAIL_CODE, null);
}
// 5collection trigger info
StringBuilder triggerMsgSb = new StringBuilder();
triggerMsgSb.append(I18nUtil.getString("jobconf_trigger_type")).append("").append(triggerType.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_admin_adress")).append("").append(IpUtil.getIp());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regtype")).append("")
.append((group.getAddressType() == 0) ? I18nUtil.getString("jobgroup_field_addressType_0") : I18nUtil.getString("jobgroup_field_addressType_1"));
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regaddress")).append("").append(group.getRegistryList());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorRouteStrategy")).append("").append(executorRouteStrategyEnum.getTitle());
if (shardingParam != null) {
triggerMsgSb.append("(" + shardingParam + ")");
}
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorBlockStrategy")).append("").append(blockStrategy.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_timeout")).append("").append(jobInfo.getExecutorTimeout());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorFailRetryCount")).append("").append(finalFailRetryCount);
triggerMsgSb.append("<br><br><span style=\"color:#00c0ef;\" > >>>>>>>>>>>" + I18nUtil.getString("jobconf_trigger_run") + "<<<<<<<<<<< </span><br>")
.append((routeAddressResult != null && routeAddressResult.getMsg() != null) ? routeAddressResult.getMsg() + "<br><br>" : "").append(triggerResult.getMsg() != null ? triggerResult.getMsg() : "");
// 6save log trigger-info
jobLog.setExecutorAddress(address);
jobLog.setExecutorHandler(jobInfo.getExecutorHandler());
jobLog.setExecutorParam(jobInfo.getExecutorParam());
jobLog.setExecutorShardingParam(shardingParam);
jobLog.setExecutorFailRetryCount(finalFailRetryCount);
jobLog.setTriggerCode(triggerResult.getCode());
jobLog.setTriggerMsg(triggerMsgSb.toString());
JobAdminConfig.getAdminConfig().getJobInfoMapper().update(jobInfo);
JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(jobLog);
logger.debug(">>>>>>>>>>> czsj-ground trigger end, jobId:{}", jobLog.getId());
}
private static long getMaxId(JobInfo jobInfo) {
JobDatasource datasource = JobAdminConfig.getAdminConfig().getJobDatasourceMapper().selectById(jobInfo.getDatasourceId());
BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource);
return qTool.getMaxIdVal(jobInfo.getReaderTable(), jobInfo.getPrimaryKey());
}
/**
* run executor
*
* @param triggerParam
* @param address
* @return
*/
public static ReturnT<String> runExecutor(TriggerParam triggerParam, String address) {
ReturnT<String> runResult = null;
try {
ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
runResult = executorBiz.run(triggerParam);
} catch (Exception e) {
logger.error(">>>>>>>>>>> czsj-ground trigger error, please check if the executor[{}] is running.", address, e);
runResult = new ReturnT<String>(ReturnT.FAIL_CODE, ThrowableUtil.toString(e));
}
StringBuilder runResultSB = new StringBuilder(I18nUtil.getString("jobconf_trigger_run") + "");
runResultSB.append("<br>address").append(address);
runResultSB.append("<br>code").append(runResult.getCode());
runResultSB.append("<br>msg").append(runResult.getMsg());
runResult.setMsg(runResultSB.toString());
return runResult;
}
}

View File

@ -0,0 +1,27 @@
package com.czsj.bigdata.core.trigger;
import com.czsj.bigdata.core.util.I18nUtil;
/**
* trigger type enum
*
* @author xuxueli 2018-09-16 04:56:41
*/
public enum TriggerTypeEnum {
MANUAL(I18nUtil.getString("jobconf_trigger_type_manual")),
CRON(I18nUtil.getString("jobconf_trigger_type_cron")),
RETRY(I18nUtil.getString("jobconf_trigger_type_retry")),
PARENT(I18nUtil.getString("jobconf_trigger_type_parent")),
API(I18nUtil.getString("jobconf_trigger_type_api"));
private TriggerTypeEnum(String title){
this.title = title;
}
private String title;
public String getTitle() {
return title;
}
}

View File

@ -0,0 +1,56 @@
package com.czsj.bigdata.core.util;
import com.sun.mail.util.MailSSLSocketFactory;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import java.util.Properties;
public class EmailUtil {
public static void send(String emailUserName,String emailPassword,String emailAuthorization,String SJemailUserName,String title,String msg) throws Exception{
Properties properties = new Properties();
//设置QQ邮件服务器
properties.setProperty("mail.host","smtp.qq.com");
//邮件发送协议
properties.setProperty("mail.transport.protocol","smtp");
//需要验证用户名密码
properties.setProperty("mail.smtp.auth","true");
//还要设置SSL加密加上以下代码即可
MailSSLSocketFactory mailSSLSocketFactory = new MailSSLSocketFactory();
mailSSLSocketFactory.setTrustAllHosts(true);
properties.put("mail.smtp.ssl.enable","true");
properties.put("mail.smtp.ssl.socketFactory",mailSSLSocketFactory);
//使用JavaMail发送邮件的5个步骤
//1创建定义整个应用程序所需环境信息的 Session 对象
Session session = Session.getDefaultInstance(properties, new Authenticator() {
@Override
public PasswordAuthentication getPasswordAuthentication() {
//发件人用户名授权码
return new PasswordAuthentication(emailUserName,emailAuthorization);
}
});
//开启Session的debug模式这样就可以查看程序发送Email的运行状态
session.setDebug(true);
//2通过session得到transport对象
Transport transport = session.getTransport();
//3使用用户名和授权码连上邮件服务器
transport.connect("smtp.qq.com",emailUserName,emailPassword);
//4创建邮件写邮件
//注意需要传递Session
MimeMessage message = new MimeMessage(session);
//指明邮件的发件人
message.setFrom(new InternetAddress(emailUserName));
//指明邮件的收件人现在发件人和收件人是一样的就是自己给自己发
message.setRecipient(Message.RecipientType.TO , new InternetAddress(SJemailUserName));
message.setSubject(title);
message.setContent(msg,"text/html;charset=UTF-8");
//5发送邮件
transport.sendMessage(message,message.getAllRecipients());
//6关闭连接
transport.close();
}
}

View File

@ -0,0 +1,80 @@
package com.czsj.bigdata.core.util;
import com.czsj.bigdata.core.conf.JobAdminConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* i18n util
*
* @author xuxueli 2018-01-17 20:39:06
*/
public class I18nUtil {
private static Logger logger = LoggerFactory.getLogger(I18nUtil.class);
private static Properties prop = null;
public static Properties loadI18nProp(){
if (prop != null) {
return prop;
}
try {
// build i18n prop
String i18n = JobAdminConfig.getAdminConfig().getI18n();
i18n = (i18n!=null && i18n.trim().length()>0)?("_"+i18n):i18n;
String i18nFile = MessageFormat.format("i18n/message{0}.properties", i18n);
// load prop
Resource resource = new ClassPathResource(i18nFile);
EncodedResource encodedResource = new EncodedResource(resource,"UTF-8");
prop = PropertiesLoaderUtils.loadProperties(encodedResource);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return prop;
}
/**
* get val of i18n key
*
* @param key
* @return
*/
public static String getString(String key) {
return loadI18nProp().getProperty(key);
}
/**
* get mult val of i18n mult key, as json
*
* @param keys
* @return
*/
public static String getMultString(String... keys) {
Map<String, String> map = new HashMap<String, String>();
Properties prop = loadI18nProp();
if (keys!=null && keys.length>0) {
for (String key: keys) {
map.put(key, prop.getProperty(key));
}
} else {
for (String key: prop.stringPropertyNames()) {
map.put(key, prop.getProperty(key));
}
}
String json = JacksonUtil.writeValueAsString(map);
return json;
}
}

View File

@ -0,0 +1,92 @@
package com.czsj.bigdata.core.util;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* Jackson util
*
* 1obj need private and set/get
* 2do not support inner class
*
* @author xuxueli 2015-9-25 18:02:56
*/
public class JacksonUtil {
private static Logger logger = LoggerFactory.getLogger(JacksonUtil.class);
private final static ObjectMapper objectMapper = new ObjectMapper();
public static ObjectMapper getInstance() {
return objectMapper;
}
/**
* beanarrayListMap --> json
*
* @param obj
* @return json string
* @throws Exception
*/
public static String writeValueAsString(Object obj) {
try {
return getInstance().writeValueAsString(obj);
} catch (JsonGenerationException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> beanMapList(array)
*
* @param jsonStr
* @param clazz
* @return obj
* @throws Exception
*/
public static <T> T readValue(String jsonStr, Class<T> clazz) {
try {
return getInstance().readValue(jsonStr, clazz);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> List<Bean>...
*
* @param jsonStr
* @param parametrized
* @param parameterClasses
* @param <T>
* @return
*/
public static <T> T readValue(String jsonStr, Class<?> parametrized, Class<?>... parameterClasses) {
try {
JavaType javaType = getInstance().getTypeFactory().constructParametricType(parametrized, parameterClasses);
return getInstance().readValue(jsonStr, javaType);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
}

View File

@ -0,0 +1,133 @@
package com.czsj.bigdata.core.util;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* local cache tool
*
* @author xuxueli 2018-01-22 21:37:34
*/
public class LocalCacheUtil {
private static ConcurrentMap<String, LocalCacheData> cacheRepository = new ConcurrentHashMap<String, LocalCacheData>(); // 类型建议用抽象父类兼容性更好
private static class LocalCacheData{
private String key;
private Object val;
private long timeoutTime;
public LocalCacheData() {
}
public LocalCacheData(String key, Object val, long timeoutTime) {
this.key = key;
this.val = val;
this.timeoutTime = timeoutTime;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public Object getVal() {
return val;
}
public void setVal(Object val) {
this.val = val;
}
public long getTimeoutTime() {
return timeoutTime;
}
public void setTimeoutTime(long timeoutTime) {
this.timeoutTime = timeoutTime;
}
}
/**
* set cache
*
* @param key
* @param val
* @param cacheTime
* @return
*/
public static boolean set(String key, Object val, long cacheTime){
// clean timeout cache, before set new cache (avoid cache too much)
cleanTimeoutCache();
// set new cache
if (key==null || key.trim().length()==0) {
return false;
}
if (val == null) {
remove(key);
}
if (cacheTime <= 0) {
remove(key);
}
long timeoutTime = System.currentTimeMillis() + cacheTime;
LocalCacheData localCacheData = new LocalCacheData(key, val, timeoutTime);
cacheRepository.put(localCacheData.getKey(), localCacheData);
return true;
}
/**
* remove cache
*
* @param key
* @return
*/
public static boolean remove(String key){
if (key==null || key.trim().length()==0) {
return false;
}
cacheRepository.remove(key);
return true;
}
/**
* get cache
*
* @param key
* @return
*/
public static Object get(String key){
if (key==null || key.trim().length()==0) {
return null;
}
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()<localCacheData.getTimeoutTime()) {
return localCacheData.getVal();
} else {
remove(key);
return null;
}
}
/**
* clean timeout cache
*
* @return
*/
public static boolean cleanTimeoutCache(){
if (!cacheRepository.keySet().isEmpty()) {
for (String key: cacheRepository.keySet()) {
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()>=localCacheData.getTimeoutTime()) {
cacheRepository.remove(key);
}
}
}
return true;
}
}

View File

@ -0,0 +1,16 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
*
* @author gavin
* @ClassName clickhouse reader dto
* @Version 2.0
* @since 2022/9/29
*/
@Data
public class ClickhouseReaderDto implements Serializable {
}

View File

@ -0,0 +1,16 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
*
* @author gavin
* @ClassName clickhouse write dto
* @Version 2.0
* @since 2022/9/29
*/
@Data
public class ClickhouseWriterDto implements Serializable {
}

View File

@ -0,0 +1,32 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author jingwk
* @ClassName DataXJsonDto
* @Version 2.1.2
* @since 2022/05/05 17:15
*/
@Data
public class DataXBatchJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private Long writerDatasourceId;
private List<String> writerTables;
private int templateId;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
}

View File

@ -0,0 +1,46 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author jingwk
* @ClassName DataxJsonDto
* @Version 2.1.1
* @since 2022/03/14 07:15
*/
@Data
public class DataXJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private List<String> readerColumns;
private Long writerDatasourceId;
private List<String> writerTables;
private List<String> writerColumns;
private HiveReaderDto hiveReader;
private HiveWriterDto hiveWriter;
private HbaseReaderDto hbaseReader;
private HbaseWriterDto hbaseWriter;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
private MongoDBReaderDto mongoDBReader;
private MongoDBWriterDto mongoDBWriter;
}

View File

@ -0,0 +1,21 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import javax.validation.constraints.NotBlank;
import java.io.Serializable;
/**
*
* @author fei
* @date 2022-01-27
*
**/
@Data
public class FlinkSqlDto implements Serializable{
private static final long serialVersionUID = 1L;
@NotBlank(message = "SQL 字符串必传!")
private String sqlStr;
}

View File

@ -0,0 +1,32 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author jingwk
* @ClassName FlinkXJsonDto
* @Version 2.1.2
* @since 2022/05/05 17:15
*/
@Data
public class FlinkXBatchJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private Long writerDatasourceId;
private List<String> writerTables;
private int templateId;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
}

View File

@ -0,0 +1,17 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class HbaseReaderDto implements Serializable {
private String readerMaxVersion;
private String readerMode;
private Range readerRange;
}

View File

@ -0,0 +1,17 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class HbaseWriterDto implements Serializable {
private String writeNullMode;
private String writerMode;
private String writerRowkeyColumn;
private VersionColumn writerVersionColumn;
}

View File

@ -0,0 +1,28 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建hive reader dto
*
* @author jingwk
* @ClassName hive reader
* @Version 2.0
* @since 2022/01/11 17:15
*/
@Data
public class HiveReaderDto implements Serializable {
private String readerPath;
private String readerDefaultFS;
private String readerFileType;
private String readerFieldDelimiter;
private Boolean readerSkipHeader;
}

View File

@ -0,0 +1,29 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建hive write dto
*
* @author jingwk
* @ClassName hive write dto
* @Version 2.0
* @since 2022/01/11 17:15
*/
@Data
public class HiveWriterDto implements Serializable {
private String writerDefaultFS;
private String writerFileType;
private String writerPath;
private String writerFileName;
private String writeMode;
private String writeFieldDelimiter;
}

View File

@ -0,0 +1,52 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author jingwk
* @ClassName FlinkxJsonDto
* @Version 2.1.1
* @since 2022/03/14 07:15
*/
@Data
public class JsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private List<String> readerColumns;
private Long writerDatasourceId;
private List<String> writerTables;
private List<String> writerColumns;
private HiveReaderDto hiveReader;
private HiveWriterDto hiveWriter;
private HbaseReaderDto hbaseReader;
private HbaseWriterDto hbaseWriter;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
private MongoDBReaderDto mongoDBReader;
private MongoDBWriterDto mongoDBWriter;
private ClickhouseReaderDto clickhouseReader;
private ClickhouseWriterDto clickhouseWriter;
private String type;
}

View File

@ -0,0 +1,19 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建mongodb reader dto
*
* @author jingwk
* @ClassName mongodb reader
* @Version 2.1.1
* @since 2022/03/14 07:15
*/
@Data
public class MongoDBReaderDto implements Serializable {
}

View File

@ -0,0 +1,20 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建mongodb write dto
*
* @author jingwk
* @ClassName mongodb write dto
* @Version 2.1.1
* @since 2022/03/14 07:15
*/
@Data
public class MongoDBWriterDto implements Serializable {
private UpsertInfo upsertInfo;
}

View File

@ -0,0 +1,15 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class Range implements Serializable {
private String startRowkey;
private String endRowkey;
private Boolean isBinaryRowkey;
}

View File

@ -0,0 +1,23 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建json dto
*
* @author jingwk
* @ClassName RdbmsReaderDto
* @Version 2.0
* @since 2022/01/11 17:15
*/
@Data
public class RdbmsReaderDto implements Serializable {
private String readerSplitPk;
private String whereParams;
private String querySql;
}

View File

@ -0,0 +1,21 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建json dto
*
* @author jingwk
* @ClassName RdbmsWriteDto
* @Version 2.0
* @since 2022/01/11 17:15
*/
@Data
public class RdbmsWriterDto implements Serializable {
private String preSql;
private String postSql;
}

View File

@ -0,0 +1,86 @@
package com.czsj.bigdata.dto;
public class TaskScheduleDto {
/**
* 所选作业类型:
* 1 -> 每天
* 2 -> 每月
* 3 -> 每周
*/
Integer jobType;
/**
* 一周的哪几天
*/
Integer[] dayOfWeeks;
/**
* 一个月的哪几天
*/
Integer[] dayOfMonths;
/**
*
*/
Integer second;
/**
*
*/
Integer minute;
/**
*
*/
Integer hour;
public Integer getJobType() {
return jobType;
}
public void setJobType(Integer jobType) {
this.jobType = jobType;
}
public Integer[] getDayOfWeeks() {
return dayOfWeeks;
}
public void setDayOfWeeks(Integer[] dayOfWeeks) {
this.dayOfWeeks = dayOfWeeks;
}
public Integer[] getDayOfMonths() {
return dayOfMonths;
}
public void setDayOfMonths(Integer[] dayOfMonths) {
this.dayOfMonths = dayOfMonths;
}
public Integer getSecond() {
return second;
}
public void setSecond(Integer second) {
this.second = second;
}
public Integer getMinute() {
return minute;
}
public void setMinute(Integer minute) {
this.minute = minute;
}
public Integer getHour() {
return hour;
}
public void setHour(Integer hour) {
this.hour = hour;
}
}

View File

@ -0,0 +1,21 @@
package com.czsj.bigdata.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 用于启动任务接收的实体
*
* @author jingwk
* @ClassName TriggerJobDto
* @Version 1.0
* @since 2019/12/01 16:12
*/
@Data
public class TriggerJobDto implements Serializable {
private String executorParam;
private int jobId;
}

View File

@ -0,0 +1,18 @@
package com.czsj.bigdata.dto;
import lombok.Data;
/**
* Created by mac on 2022/3/16.
*/
@Data
public class UpsertInfo {
/**
* 当设置为true时表示针对相同的upsertKey做更新操作
*/
private Boolean isUpsert;
/**
* upsertKey指定了没行记录的业务主键用来做更新时使用
*/
private String upsertKey;
}

View File

@ -0,0 +1,11 @@
package com.czsj.bigdata.dto;
import lombok.Data;
@Data
public class VersionColumn {
private Integer index;
private String value;
}

View File

@ -0,0 +1,24 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class APIAuth {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("请求token的ID")
private String token_id;
@ApiModelProperty("请求分组的ID")
private String group_id;
@ApiModelProperty("更新时间")
private String update_time;
}

View File

@ -0,0 +1,20 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class APIAuthConfig {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("权限的id")
private String auth_id;
@ApiModelProperty("配置的id")
private String config_id;
}

View File

@ -0,0 +1,41 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class APIConfig {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("请求路径")
private String path;
@ApiModelProperty("名称")
private String name;
@ApiModelProperty("API分组")
private String group_id;
@ApiModelProperty("描述")
private String describe;
@ApiModelProperty("数据源ID")
private Long datasource_id;
@ApiModelProperty("请求参数")
private String params;
@ApiModelProperty("创建时间")
private String create_time;
@ApiModelProperty("更新时间")
private String update_time;
@ApiModelProperty("执行的SQL语句")
private String sql_text;
}

View File

@ -0,0 +1,17 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class APIGroup {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("分组名称")
private String name;
}

View File

@ -0,0 +1,20 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class APISQL {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("api的Id")
private String api_id;
@ApiModelProperty("api的执行SQL")
private String sql_text;
}

View File

@ -0,0 +1,27 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class APIToken {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("请求Token")
private String token;
@ApiModelProperty("描述")
private String describe;
@ApiModelProperty("过期时间")
private String expire;
@ApiModelProperty("创建时间")
private String create_time;
}

View File

@ -0,0 +1,252 @@
package com.czsj.bigdata.entity;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.czsj.bigdata.util.PageUtils;
import com.czsj.bigdata.util.ServletUtils;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpServletRequest;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.Map;
/**
*
* @Author: czsj
* @Date: 2022/9/16 11:14
* @Description: 基础参数辅助类
**/
@Slf4j
public class BaseForm {
/**
* 查询参数对象
*/
protected Map<String, Object> values = new LinkedHashMap<>();
/**
* 当前页码
*/
private Long current = 1L;
/**
* 页大小
*/
private Long size = 10L;
/**
* 构造方法
*/
public BaseForm() {
try {
HttpServletRequest request = ServletUtils.getRequest();
Enumeration<String> params = request.getParameterNames();
while (params.hasMoreElements()) {
String name = params.nextElement();
String value = StrUtil.trim(request.getParameter(name));
this.set(name, URLDecoder.decode(value, "UTF-8"));
}
this.parsePagingQueryParams();
} catch (Exception e) {
e.printStackTrace();
log.error("BaseControlForm initialize parameters setting error" + e);
}
}
/**
* 获取页码
*
* @return
*/
public Long getPageNo() {
String pageNum = StrUtil.toString(this.get("current"));
if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) {
this.current = Long.parseLong(pageNum);
}
return this.current;
}
/**
* 获取页大小
*
* @return
*/
public Long getPageSize() {
String pageSize = StrUtil.toString(this.get("size"));
if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) {
this.size = Long.parseLong(pageSize);
}
return this.size;
}
/**
* 获得参数信息对象
*
* @return
*/
public Map<String, Object> getParameters() {
return values;
}
/**
* 根据key获取values中的值
*
* @param name
* @return
*/
public Object get(String name) {
if (values == null) {
values = new LinkedHashMap<>();
return null;
}
return this.values.get(name);
}
/**
* 根据key获取values中String类型值
*
* @param key
* @return String
*/
public String getString(String key) {
return StrUtil.toString(get(key));
}
/**
* 获取排序字段
*
* @return
*/
public String getSort() {
return StrUtil.toString(this.values.get("sort"));
}
/**
* 获取排序
*
* @return
*/
public String getOrder() {
return StrUtil.toString(this.values.get("order"));
}
/**
* 获取排序
*
* @return
*/
public String getOrderby() {
return StrUtil.toString(this.values.get("orderby"));
}
/**
* 解析出mybatis plus分页查询参数
*/
public Page getPlusPagingQueryEntity() {
Page page = new Page();
//如果无current默认返回1000条数据
page.setCurrent(this.getPageNo());
page.setSize(this.getPageSize());
if (ObjectUtil.isNotNull(this.get("ifCount"))) {
page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount")));
} else {
//默认给true
page.setSearchCount(true);
}
return page;
}
/**
* 解析分页排序参数pageHelper
*/
public void parsePagingQueryParams() {
// 排序字段解析
String orderBy = StrUtil.toString(this.get("orderby")).trim();
String sortName = StrUtil.toString(this.get("sort")).trim();
String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase();
if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) {
if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) {
sortOrder = "asc";
}
this.set("orderby", sortName + " " + sortOrder);
}
}
/**
* 设置参数
*
* @param name 参数名称
* @param value 参数值
*/
public void set(String name, Object value) {
if (ObjectUtil.isNotNull(value)) {
this.values.put(name, value);
}
}
/**
* 移除参数
*
* @param name
*/
public void remove(String name) {
this.values.remove(name);
}
/**
* 清除所有参数
*/
public void clear() {
if (values != null) {
values.clear();
}
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
public QueryWrapper<?> pageQueryWrapperCustom(Map<String, Object> map, QueryWrapper<?> queryWrapper) {
// mybatis plus 分页相关的参数
Map<String, Object> pageParams = PageUtils.filterPageParams(map);
//过滤空值分页查询相关的参数
Map<String, Object> colQueryMap = PageUtils.filterColumnQueryParams(map);
//排序 操作
pageParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
colQueryMap.forEach((k, v) -> {
switch (k) {
case "pluginName":
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@ -0,0 +1,37 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class BaseResource {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("资源名称")
private String name;
@ApiModelProperty("资源地址")
private String resource_address;
@ApiModelProperty("更新时间")
private String update_time;
@ApiModelProperty("服务器IP")
private String serverIp;
@ApiModelProperty("服务器用户名")
private String serverUser;
@ApiModelProperty("服务器密码")
private String serverPassword;
@ApiModelProperty("资源类型")
private String type;
}

View File

@ -0,0 +1,14 @@
package com.czsj.bigdata.entity;
/**
*
*
* @Date: 2022/4/4 9:09
* @Description:
**/
public class ColumnClass {
private String columnType;
private String columnName;
private String tableName;
}

View File

@ -0,0 +1,14 @@
package com.czsj.bigdata.entity;
/**
*
* @Author: czsj
* @Date: 2022/10/7 11:21
* @Description: 常量描述类
**/
public class Common {
public static final String DOCPAGE = "/doc.html";
public static String PORT ="8080";
public static String SERVERPORT ="server.port";
public static String SERVERCONTEXTPATH ="server.contextPath";
}

View File

@ -0,0 +1,32 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class DeployTask {
@ApiModelProperty("作业ID")
private String jid;
@ApiModelProperty("作业名称")
private String name;
@ApiModelProperty("开始时间")
private String begintime;
@ApiModelProperty("持续时间")
private String duration;
@ApiModelProperty("结束时间")
private String endtime;
@ApiModelProperty("任务数")
private String tasknumber;
@ApiModelProperty("状态")
private String status;
}

View File

@ -0,0 +1,55 @@
package com.czsj.bigdata.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
*
*/
@Data
public class DevEnvSetting {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("属性名称")
private String name;
@ApiModelProperty("属性值")
private String propValue;
@ApiModelProperty("属性描述")
private String description;
@ApiModelProperty("用户Id")
private Long userId;
@ApiModelProperty("标记")
private Boolean flag;
@ApiModelProperty("上传的URL")
private Boolean uploadurl;
@ApiModelProperty("部署的URL")
private Boolean deployurl;
@ApiModelProperty("展示的URL")
private Boolean showurl;
@ApiModelProperty("下线的URL")
private Boolean offlineurl;
@ApiModelProperty("创建时间")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
@ApiModelProperty("更新时间")
private Date updateTime;
@TableField(exist=false)
private String userName;
}

View File

@ -0,0 +1,44 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
*
*/
@Data
public class DevTask {
@ApiModelProperty("属性Id")
private int id;
@ApiModelProperty("任务名称")
private String name;
@ApiModelProperty("任务类型")
private String tasktype;
@ApiModelProperty("运行类型")
private String runtype;
@ApiModelProperty("运行参数")
private String run_param;
@ApiModelProperty("JAR包路径")
private String jarpath;
@ApiModelProperty("任务的SQL")
private String sql_text;
@ApiModelProperty("任务描述")
private String task_describe;
@ApiModelProperty("创建时间")
private String create_time;
@ApiModelProperty("更新时间")
private String update_time;
@ApiModelProperty("类型")
private String type;
}

View File

@ -0,0 +1,9 @@
package com.czsj.bigdata.entity;
import lombok.Data;
@Data
public class InfoReport {
private int resultCount;
private String countType;
}

View File

@ -0,0 +1,149 @@
package com.czsj.bigdata.entity;
import com.alibaba.fastjson.annotation.JSONField;
import com.baomidou.mybatisplus.annotation.*;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import com.czsj.bigdata.core.handler.AESEncryptHandler;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* jdbc数据源配置实体类(job_jdbc_datasource)
*
* @author zhouhongfa@gz-yibo.com
* @version v1.0
* @since 2019-07-30
*/
@Data
@ApiModel
@TableName("job_jdbc_datasource")
public class JobDatasource extends Model<JobDatasource> {
/**
* 自增主键
*/
@TableId
@ApiModelProperty(value = "自增主键")
private Long id;
/**
* 数据源名称
*/
@ApiModelProperty(value = "数据源名称")
private String datasourceName;
/**
* 数据源
*/
@ApiModelProperty(value = "数据源")
private String datasource;
/**
* 数据源分组
*/
@ApiModelProperty(value = "数据源分组")
private String datasourceGroup;
/**
* 用户名
* AESEncryptHandler 加密类
* MyBatis Plus 3.0.7.1之前版本没有typeHandler属性需要升级到最低3.1.2
*/
@ApiModelProperty(value = "用户名")
@TableField(typeHandler = AESEncryptHandler.class)
private String jdbcUsername;
/**
* 密码
*/
@TableField(typeHandler = AESEncryptHandler.class)
@ApiModelProperty(value = "密码")
private String jdbcPassword;
/**
* jdbc url
*/
@ApiModelProperty(value = "jdbc url")
private String jdbcUrl;
/**
* jdbc驱动类
*/
@ApiModelProperty(value = "jdbc驱动类")
private String jdbcDriverClass;
/**
* 状态0删除 1启用 2禁用
*/
@TableLogic
@ApiModelProperty(value = "状态0删除 1启用 2禁用")
private Integer status;
/**
* 创建人
*/
@TableField(fill = FieldFill.INSERT)
@ApiModelProperty(value = "创建人", hidden = true)
private String createBy;
/**
* 创建时间
*/
@TableField(fill = FieldFill.INSERT)
@JSONField(format = "yyyy/MM/dd")
@ApiModelProperty(value = "创建时间", hidden = true)
private Date createDate;
/**
* 更新人
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
@ApiModelProperty(value = "更新人", hidden = true)
private String updateBy;
/**
* 更新时间
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
@JSONField(format = "yyyy/MM/dd")
@ApiModelProperty(value = "更新时间", hidden = true)
private Date updateDate;
/**
* 备注
*/
@ApiModelProperty(value = "备注", hidden = true)
private String comments;
/**
* zookeeper地址
*/
@ApiModelProperty(value = "zookeeper地址", hidden = true)
private String zkAdress;
/**
* 数据库名
*/
@ApiModelProperty(value = "数据库名", hidden = true)
private String databaseName;
/**
* 数据库名
*/
@ApiModelProperty(value = "orc库名", hidden = true)
private String orcschema;
/**
* 获取主键值
*
* @return 主键值
*/
@Override
protected Serializable pkVal() {
return this.id;
}
}

View File

@ -0,0 +1,84 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Created by jingwk on 2019/11/17
*/
public class JobGroup {
@ApiModelProperty("执行器Id")
private int id;
@ApiModelProperty("执行器AppName")
private String appName;
@ApiModelProperty("执行器名称")
private String title;
@ApiModelProperty("排序")
private int order;
@ApiModelProperty("执行器地址类型0=自动注册、1=手动录入")
private int addressType;
@ApiModelProperty("执行器地址列表,多地址逗号分隔(手动录入)")
private String addressList;
// registry list
private List<String> registryList; // 执行器地址列表(系统注册)
public List<String> getRegistryList() {
if (addressList!=null && addressList.trim().length()>0) {
registryList = new ArrayList<>(Arrays.asList(addressList.split(",")));
}
return registryList;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
public int getAddressType() {
return addressType;
}
public void setAddressType(int addressType) {
this.addressType = addressType;
}
public String getAddressList() {
return addressList;
}
public void setAddressList(String addressList) {
this.addressList = addressList;
}
}

View File

@ -0,0 +1,125 @@
package com.czsj.bigdata.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
* xxl-job info
*
* @author jingwk 2019-11-17 14:25:49
*/
@Data
public class JobInfo {
@ApiModelProperty("主键ID")
private int id;
@ApiModelProperty("执行器主键ID")
private int jobGroup;
@ApiModelProperty("任务执行CRON表达式")
private String jobCron;
@ApiModelProperty("排序")
private String jobDesc;
private Date addTime;
private Date updateTime;
@ApiModelProperty("修改用户")
private Long userId;
@ApiModelProperty("报警邮件")
private String alarmEmail;
@ApiModelProperty("执行器路由策略")
private String executorRouteStrategy;
@ApiModelProperty("执行器任务Handler名称")
private String executorHandler;
@ApiModelProperty("执行器,任务参数")
private String executorParam;
@ApiModelProperty("阻塞处理策略")
private String executorBlockStrategy;
@ApiModelProperty("任务执行超时时间,单位秒")
private int executorTimeout;
@ApiModelProperty("失败重试次数")
private int executorFailRetryCount;
@ApiModelProperty("GLUE类型\t#com.wugui.datatx.core.glue.GlueTypeEnum")
private String glueType;
@ApiModelProperty("GLUE源代码")
private String glueSource;
@ApiModelProperty("GLUE备注")
private String glueRemark;
@ApiModelProperty("GLUE更新时间")
private Date glueUpdatetime;
@ApiModelProperty("子任务ID")
private String childJobId;
@ApiModelProperty("调度状态0-停止1-运行")
private int triggerStatus;
@ApiModelProperty("上次调度时间")
private long triggerLastTime;
@ApiModelProperty("下次调度时间")
private long triggerNextTime;
@ApiModelProperty("datax运行json")
private String jobJson;
@ApiModelProperty("脚本动态参数")
private String replaceParam;
@ApiModelProperty("增量日期格式")
private String replaceParamType;
@ApiModelProperty("jvm参数")
private String jvmParam;
@ApiModelProperty("增量初始时间")
private Date incStartTime;
@ApiModelProperty("分区信息")
private String partitionInfo;
@ApiModelProperty("最近一次执行状态")
private int lastHandleCode;
@ApiModelProperty("所属项目Id")
private int projectId;
@ApiModelProperty("主键字段")
private String primaryKey;
@ApiModelProperty("增量初始id")
private Long incStartId;
@ApiModelProperty("增量方式")
private int incrementType;
@ApiModelProperty("datax的读表")
private String readerTable;
@ApiModelProperty("数据源id")
private int datasourceId;
@TableField(exist=false)
private String projectName;
@TableField(exist=false)
private String userName;
}

View File

@ -0,0 +1,66 @@
package com.czsj.bigdata.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
* czsj-ground log, used to track trigger process
*
* @author jingwk 2019-11-17 22:08:11
*/
@Data
public class JobLog {
private long id;
// job info
@ApiModelProperty("执行器主键ID")
private int jobGroup;
@ApiModelProperty("任务主键ID")
private int jobId;
@ApiModelProperty("任务描述")
private String jobDesc;
// execute info
@ApiModelProperty("执行器地址,本次执行的地址")
private String executorAddress;
@ApiModelProperty("执行器任务handler")
private String executorHandler;
@ApiModelProperty("执行器任务参数")
private String executorParam;
@ApiModelProperty("执行器任务分片参数,格式如 1/2")
private String executorShardingParam;
@ApiModelProperty("失败重试次数")
private int executorFailRetryCount;
// trigger info
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@ApiModelProperty("调度-时间")
private Date triggerTime;
@ApiModelProperty("调度-结果")
private int triggerCode;
@ApiModelProperty("调度-日志")
private String triggerMsg;
// handle info
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@ApiModelProperty("执行-时间")
private Date handleTime;
@ApiModelProperty("执行-状态")
private int handleCode;
@ApiModelProperty("执行-日志")
private String handleMsg;
// alarm info
@ApiModelProperty("告警状态0-默认、1-无需告警、2-告警成功、3-告警失败")
private int alarmStatus;
@ApiModelProperty("DataX进程Id")
private String processId;
@ApiModelProperty("增量最大id")
private Long maxId;
}

View File

@ -0,0 +1,34 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
* xxl-job log for glue, used to track job code process
*
* @author xuxueli 2016-5-19 17:57:46
*/
@Data
public class JobLogGlue {
private int id;
@ApiModelProperty("任务主键ID")
private int jobId;
@ApiModelProperty("GLUE类型\t#com.xxl.job.core.glue.GlueTypeEnum")
private String glueType;
@ApiModelProperty("GLUE源代码")
private String glueSource;
@ApiModelProperty("GLUE备注")
private String glueRemark;
private Date addTime;
private Date updateTime;
}

View File

@ -0,0 +1,17 @@
package com.czsj.bigdata.entity;
import lombok.Data;
import java.util.Date;
@Data
public class JobLogReport {
private int id;
private Date triggerDay;
private int runningCount;
private int sucCount;
private int failCount;
}

View File

@ -0,0 +1,57 @@
package com.czsj.bigdata.entity;
public class JobPermission {
private int id;
//权限名称
private String name;
//权限描述
private String descritpion;
//授权链接
private String url;
//父节点id
private int pid;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescritpion() {
return descritpion;
}
public void setDescritpion(String descritpion) {
this.descritpion = descritpion;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public int getPid() {
return pid;
}
public void setPid(int pid) {
this.pid = pid;
}
}

View File

@ -0,0 +1,41 @@
package com.czsj.bigdata.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
* Created by jingwk on 2022/05/24
*/
@Data
public class JobProject {
@ApiModelProperty("项目Id")
private int id;
@ApiModelProperty("项目名称")
private String name;
@ApiModelProperty("项目描述")
private String description;
@ApiModelProperty("用户Id")
private Long userId;
@ApiModelProperty("标记")
private Boolean flag;
@ApiModelProperty("创建时间")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
@ApiModelProperty("更新时间")
private Date updateTime;
@TableField(exist=false)
private String userName;
}

View File

@ -0,0 +1,23 @@
package com.czsj.bigdata.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import java.util.Date;
/**
* Created by xuxueli on 16/9/30.
*/
@Data
public class JobRegistry {
private int id;
private String registryGroup;
private String registryKey;
private String registryValue;
private double cpuUsage;
private double memoryUsage;
private double loadAverage;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date updateTime;
}

View File

@ -0,0 +1,30 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
/**
* @author xuxueli 2019-05-04 16:43:12
*/
public class JobRole {
private int id;
@ApiModelProperty("账号")
private String name;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}

View File

@ -0,0 +1,92 @@
package com.czsj.bigdata.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
/**
* xxl-job info
*
* @author jingwk 2019-11-17 14:25:49
*/
@Data
public class JobTemplate {
@ApiModelProperty("主键ID")
private int id;
@ApiModelProperty("执行器主键ID")
private int jobGroup;
@ApiModelProperty("任务执行CRON表达式")
private String jobCron;
@ApiModelProperty("排序")
private String jobDesc;
private Date addTime;
private Date updateTime;
@ApiModelProperty("修改用户")
private Long userId;
@ApiModelProperty("报警邮件")
private String alarmEmail;
@ApiModelProperty("执行器路由策略")
private String executorRouteStrategy;
@ApiModelProperty("执行器任务Handler名称")
private String executorHandler;
@ApiModelProperty("执行器,任务参数")
private String executorParam;
@ApiModelProperty("阻塞处理策略")
private String executorBlockStrategy;
@ApiModelProperty("任务执行超时时间,单位秒")
private int executorTimeout;
@ApiModelProperty("失败重试次数")
private int executorFailRetryCount;
@ApiModelProperty("GLUE类型\t#com.wugui.datatx.core.glue.GlueTypeEnum")
private String glueType;
@ApiModelProperty("GLUE源代码")
private String glueSource;
@ApiModelProperty("GLUE备注")
private String glueRemark;
@ApiModelProperty("GLUE更新时间")
private Date glueUpdatetime;
@ApiModelProperty("子任务ID")
private String childJobId;
@ApiModelProperty("上次调度时间")
private long triggerLastTime;
@ApiModelProperty("下次调度时间")
private long triggerNextTime;
@ApiModelProperty("datax运行json")
private String jobJson;
@ApiModelProperty("jvm参数")
private String jvmParam;
@ApiModelProperty("所属项目")
private int projectId;
@TableField(exist=false)
private String projectName;
@TableField(exist=false)
private String userName;
}

View File

@ -0,0 +1,78 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
/**
* @author xuxueli 2019-05-04 16:43:12
*/
public class JobUser {
private int id;
@ApiModelProperty("账号")
private String username;
@ApiModelProperty("密码")
private String password;
@ApiModelProperty("角色0-普通用户、1-管理员")
private String role;
@ApiModelProperty("权限执行器ID列表多个逗号分割")
private String permission;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public String getPermission() {
return permission;
}
public void setPermission(String permission) {
this.permission = permission;
}
// plugin
public boolean validPermission(int jobGroup){
if ("1".equals(this.role)) {
return true;
} else {
if (StringUtils.hasText(this.permission)) {
for (String permissionItem : this.permission.split(",")) {
if (String.valueOf(jobGroup).equals(permissionItem)) {
return true;
}
}
}
return false;
}
}
}

View File

@ -0,0 +1,84 @@
package com.czsj.bigdata.entity;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
import java.util.Collections;
/**
* Created by jingwk on 2019/11/17
*/
public class JwtUser implements UserDetails {
private Integer id;
private String username;
private String password;
private Collection<? extends GrantedAuthority> authorities;
public JwtUser() {
}
// 写一个能直接使用user创建jwtUser的构造器
public JwtUser(JobUser user) {
id = user.getId();
username = user.getUsername();
password = user.getPassword();
authorities = Collections.singleton(new SimpleGrantedAuthority(user.getRole()));
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return authorities;
}
@Override
public String getPassword() {
return password;
}
@Override
public String getUsername() {
return username;
}
@Override
public boolean isAccountNonExpired() {
return true;
}
@Override
public boolean isAccountNonLocked() {
return true;
}
@Override
public boolean isCredentialsNonExpired() {
return true;
}
@Override
public boolean isEnabled() {
return true;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Override
public String toString() {
return "JwtUser{" +
"id=" + id +
", username='" + username + '\'' +
", password='" + password + '\'' +
", authorities=" + authorities +
'}';
}
}

View File

@ -0,0 +1,15 @@
package com.czsj.bigdata.entity;
import lombok.Data;
/**
* Created by jingwk on 2019/11/17
*/
@Data
public class LoginUser {
private String username;
private String password;
private Integer rememberMe;
}

View File

@ -0,0 +1,18 @@
package com.czsj.bigdata.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
@Data
public class OperLog {
private long id;
@ApiModelProperty("操作")
private String operate;
@ApiModelProperty("用户名")
private String user;
@ApiModelProperty("地址")
private String address;
@ApiModelProperty("创建时间")
private String createtime;
}

View File

@ -0,0 +1,73 @@
package com.czsj.bigdata.entity;
import com.alibaba.fastjson.annotation.JSONField;
import com.alibaba.fastjson.serializer.SerializerFeature;
/**
*
*
* @Date: 2022/2/1 12:34
* @Description:
**/
public class ResponseData {
String msg;
boolean success;
@JSONField(serialzeFeatures = {SerializerFeature.WriteMapNullValue})
Object data;
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public Object getData() {
return data;
}
public void setData(Object data) {
this.data = data;
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public static ResponseData apiSuccess(Object data) {
ResponseData dto = new ResponseData();
dto.setData(data);
dto.setSuccess(true);
// dto.setMsg("Api access succeeded");
return dto;
}
public static ResponseData successWithMsg(String msg) {
ResponseData dto = new ResponseData();
dto.setData(null);
dto.setSuccess(true);
dto.setMsg(msg);
return dto;
}
public static ResponseData successWithData(Object data) {
ResponseData dto = new ResponseData();
dto.setData(data);
dto.setSuccess(true);
return dto;
}
public static ResponseData fail(String msg) {
ResponseData dto = new ResponseData();
dto.setSuccess(false);
dto.setMsg(msg);
return dto;
}
}

View File

@ -0,0 +1,172 @@
package com.czsj.bigdata.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.czsj.bigdata.util.CronUtils;
import com.czsj.common.annotation.Excel;
import com.czsj.common.annotation.Excel.ColumnType;
import com.czsj.common.constant.ScheduleConstants;
import com.czsj.common.core.domain.BaseEntity;
import com.czsj.common.utils.StringUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import java.util.Date;
/**
* 定时任务调度表 sys_job
*
* @author czsj
*/
public class SysJob extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 任务ID */
@Excel(name = "任务序号", cellType = ColumnType.NUMERIC)
private Long jobId;
/** 任务名称 */
@Excel(name = "任务名称")
private String jobName;
/** 任务组名 */
@Excel(name = "任务组名")
private String jobGroup;
/** 调用目标字符串 */
@Excel(name = "调用目标字符串")
private String invokeTarget;
/** cron执行表达式 */
@Excel(name = "执行表达式 ")
private String cronExpression;
/** cron计划策略 */
@Excel(name = "计划策略 ", readConverterExp = "0=默认,1=立即触发执行,2=触发一次执行,3=不触发立即执行")
private String misfirePolicy = ScheduleConstants.MISFIRE_DEFAULT;
/** 是否并发执行0允许 1禁止 */
@Excel(name = "并发执行", readConverterExp = "0=允许,1=禁止")
private String concurrent;
/** 任务状态0正常 1暂停 */
@Excel(name = "任务状态", readConverterExp = "0=正常,1=暂停")
private String status;
public Long getJobId()
{
return jobId;
}
public void setJobId(Long jobId)
{
this.jobId = jobId;
}
@NotBlank(message = "任务名称不能为空")
@Size(min = 0, max = 64, message = "任务名称不能超过64个字符")
public String getJobName()
{
return jobName;
}
public void setJobName(String jobName)
{
this.jobName = jobName;
}
public String getJobGroup()
{
return jobGroup;
}
public void setJobGroup(String jobGroup)
{
this.jobGroup = jobGroup;
}
@NotBlank(message = "调用目标字符串不能为空")
@Size(min = 0, max = 500, message = "调用目标字符串长度不能超过500个字符")
public String getInvokeTarget()
{
return invokeTarget;
}
public void setInvokeTarget(String invokeTarget)
{
this.invokeTarget = invokeTarget;
}
@NotBlank(message = "Cron执行表达式不能为空")
@Size(min = 0, max = 255, message = "Cron执行表达式不能超过255个字符")
public String getCronExpression()
{
return cronExpression;
}
public void setCronExpression(String cronExpression)
{
this.cronExpression = cronExpression;
}
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
public Date getNextValidTime()
{
if (StringUtils.isNotEmpty(cronExpression))
{
return CronUtils.getNextExecution(cronExpression);
}
return null;
}
public String getMisfirePolicy()
{
return misfirePolicy;
}
public void setMisfirePolicy(String misfirePolicy)
{
this.misfirePolicy = misfirePolicy;
}
public String getConcurrent()
{
return concurrent;
}
public void setConcurrent(String concurrent)
{
this.concurrent = concurrent;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("jobId", getJobId())
.append("jobName", getJobName())
.append("jobGroup", getJobGroup())
.append("cronExpression", getCronExpression())
.append("nextValidTime", getNextValidTime())
.append("misfirePolicy", getMisfirePolicy())
.append("concurrent", getConcurrent())
.append("status", getStatus())
.append("createBy", getCreateBy())
.append("createTime", getCreateTime())
.append("updateBy", getUpdateBy())
.append("updateTime", getUpdateTime())
.append("remark", getRemark())
.toString();
}
}

View File

@ -0,0 +1,156 @@
package com.czsj.bigdata.entity;
import com.czsj.common.annotation.Excel;
import com.czsj.common.core.domain.BaseEntity;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.util.Date;
/**
* 定时任务调度日志表 sys_job_log
*
* @author czsj
*/
public class SysJobLog extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** ID */
@Excel(name = "日志序号")
private Long jobLogId;
/** 任务名称 */
@Excel(name = "任务名称")
private String jobName;
/** 任务组名 */
@Excel(name = "任务组名")
private String jobGroup;
/** 调用目标字符串 */
@Excel(name = "调用目标字符串")
private String invokeTarget;
/** 日志信息 */
@Excel(name = "日志信息")
private String jobMessage;
/** 执行状态0正常 1失败 */
@Excel(name = "执行状态", readConverterExp = "0=正常,1=失败")
private String status;
/** 异常信息 */
@Excel(name = "异常信息")
private String exceptionInfo;
/** 开始时间 */
private Date startTime;
/** 停止时间 */
private Date stopTime;
public Long getJobLogId()
{
return jobLogId;
}
public void setJobLogId(Long jobLogId)
{
this.jobLogId = jobLogId;
}
public String getJobName()
{
return jobName;
}
public void setJobName(String jobName)
{
this.jobName = jobName;
}
public String getJobGroup()
{
return jobGroup;
}
public void setJobGroup(String jobGroup)
{
this.jobGroup = jobGroup;
}
public String getInvokeTarget()
{
return invokeTarget;
}
public void setInvokeTarget(String invokeTarget)
{
this.invokeTarget = invokeTarget;
}
public String getJobMessage()
{
return jobMessage;
}
public void setJobMessage(String jobMessage)
{
this.jobMessage = jobMessage;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
public String getExceptionInfo()
{
return exceptionInfo;
}
public void setExceptionInfo(String exceptionInfo)
{
this.exceptionInfo = exceptionInfo;
}
public Date getStartTime()
{
return startTime;
}
public void setStartTime(Date startTime)
{
this.startTime = startTime;
}
public Date getStopTime()
{
return stopTime;
}
public void setStopTime(Date stopTime)
{
this.stopTime = stopTime;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("jobLogId", getJobLogId())
.append("jobName", getJobName())
.append("jobGroup", getJobGroup())
.append("jobMessage", getJobMessage())
.append("status", getStatus())
.append("exceptionInfo", getExceptionInfo())
.append("startTime", getStartTime())
.append("stopTime", getStopTime())
.toString();
}
}

View File

@ -0,0 +1,264 @@
package com.czsj.bigdata.entity;
import java.math.BigDecimal;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.czsj.common.annotation.Excel;
import com.czsj.common.core.domain.BaseEntity;
/**
* 执行器管理对象 sys_servers
*
* @author czsj
* @date 2022-04-28
*/
public class SysServers extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** $column.columnComment */
private Long id;
/** 分组名 */
@Excel(name = "分组名")
private String groupname;
/** 分组编码 */
@Excel(name = "分组编码")
private String groupcode;
/** 服务器地址 */
@Excel(name = "服务器地址")
private String serveraddress;
/** 系统名 */
@Excel(name = "系统名")
private String osname;
/** 程序启动时间 */
@Excel(name = "程序启动时间")
private String starttime;
/** pid */
@Excel(name = "pid")
private String pid;
/** cpu核心数 */
@Excel(name = "cpu核心数")
private Long cpucores;
/** cpu使用率 */
@Excel(name = "cpu使用率")
private BigDecimal cpuutilization;
/** cpu空闲率 */
@Excel(name = "cpu空闲率")
private BigDecimal cpurate;
/** JVM初始内存 */
@Excel(name = "JVM初始内存")
private BigDecimal jvminitialmemory;
/** JVM最大内存 */
@Excel(name = "JVM最大内存")
private BigDecimal jvmmaxmemory;
/** JVM已用内存 */
@Excel(name = "JVM已用内存")
private BigDecimal jvmusedmemory;
/** 总物理内存 */
@Excel(name = "总物理内存")
private BigDecimal physicalmemory;
/** 剩余物理内存 */
@Excel(name = "剩余物理内存")
private BigDecimal surplusmemory;
/** 已用物理内存 */
@Excel(name = "已用物理内存")
private BigDecimal usedmemory;
/** 磁盘状态 */
@Excel(name = "磁盘状态")
private String diskstatus;
public void setId(Long id)
{
this.id = id;
}
public Long getId()
{
return id;
}
public void setGroupname(String groupname)
{
this.groupname = groupname;
}
public String getGroupname()
{
return groupname;
}
public void setGroupcode(String groupcode)
{
this.groupcode = groupcode;
}
public String getGroupcode()
{
return groupcode;
}
public void setServeraddress(String serveraddress)
{
this.serveraddress = serveraddress;
}
public String getServeraddress()
{
return serveraddress;
}
public void setOsname(String osname)
{
this.osname = osname;
}
public String getOsname()
{
return osname;
}
public void setStarttime(String starttime)
{
this.starttime = starttime;
}
public String getStarttime()
{
return starttime;
}
public void setPid(String pid)
{
this.pid = pid;
}
public String getPid()
{
return pid;
}
public void setCpucores(Long cpucores)
{
this.cpucores = cpucores;
}
public Long getCpucores()
{
return cpucores;
}
public void setCpuutilization(BigDecimal cpuutilization)
{
this.cpuutilization = cpuutilization;
}
public BigDecimal getCpuutilization()
{
return cpuutilization;
}
public void setCpurate(BigDecimal cpurate)
{
this.cpurate = cpurate;
}
public BigDecimal getCpurate()
{
return cpurate;
}
public void setJvminitialmemory(BigDecimal jvminitialmemory)
{
this.jvminitialmemory = jvminitialmemory;
}
public BigDecimal getJvminitialmemory()
{
return jvminitialmemory;
}
public void setJvmmaxmemory(BigDecimal jvmmaxmemory)
{
this.jvmmaxmemory = jvmmaxmemory;
}
public BigDecimal getJvmmaxmemory()
{
return jvmmaxmemory;
}
public void setJvmusedmemory(BigDecimal jvmusedmemory)
{
this.jvmusedmemory = jvmusedmemory;
}
public BigDecimal getJvmusedmemory()
{
return jvmusedmemory;
}
public void setPhysicalmemory(BigDecimal physicalmemory)
{
this.physicalmemory = physicalmemory;
}
public BigDecimal getPhysicalmemory()
{
return physicalmemory;
}
public void setSurplusmemory(BigDecimal surplusmemory)
{
this.surplusmemory = surplusmemory;
}
public BigDecimal getSurplusmemory()
{
return surplusmemory;
}
public void setUsedmemory(BigDecimal usedmemory)
{
this.usedmemory = usedmemory;
}
public BigDecimal getUsedmemory()
{
return usedmemory;
}
public void setDiskstatus(String diskstatus)
{
this.diskstatus = diskstatus;
}
public String getDiskstatus()
{
return diskstatus;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("id", getId())
.append("groupname", getGroupname())
.append("groupcode", getGroupcode())
.append("serveraddress", getServeraddress())
.append("osname", getOsname())
.append("starttime", getStarttime())
.append("pid", getPid())
.append("cpucores", getCpucores())
.append("cpuutilization", getCpuutilization())
.append("cpurate", getCpurate())
.append("jvminitialmemory", getJvminitialmemory())
.append("jvmmaxmemory", getJvmmaxmemory())
.append("jvmusedmemory", getJvmusedmemory())
.append("physicalmemory", getPhysicalmemory())
.append("surplusmemory", getSurplusmemory())
.append("usedmemory", getUsedmemory())
.append("diskstatus", getDiskstatus())
.append("createTime", getCreateTime())
.append("createBy", getCreateBy())
.toString();
}
}

View File

@ -0,0 +1,47 @@
package com.czsj.bigdata.entity;
import lombok.Data;
@Data
public class SystemMonitor {
//系统名称
private String osName;
//程序启动时间
private String startTime;
//pid
private String pid;
//cpu核数
private Integer cpuCores;
//cpu使用率
private Double cpuUtilization;
//cpu空闲率
private Double cpuRate;
//JVM初始内存
private Double jvmInitialMemory;
//JVM最大内存
private Double jvmMaxMemory;
//JVM已用内存
private Double jvmUsedMemory;
//总物理内存
private Double physicalMemory;
//剩余物理内存
private Double surplusMemory;
//以用物理内存
private Double usedMemory;
//磁盘状态
private String diskStatus;
}

View File

@ -0,0 +1,27 @@
package com.czsj.bigdata.exception;
/**
* @description: 自定义异常
* @author: jingwk
* @date: 2019/11/17 17:21
*/
public class TokenIsExpiredException extends Exception{
public TokenIsExpiredException() {
}
public TokenIsExpiredException(String message) {
super(message);
}
public TokenIsExpiredException(String message, Throwable cause) {
super(message, cause);
}
public TokenIsExpiredException(Throwable cause) {
super(cause);
}
public TokenIsExpiredException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}

View File

@ -0,0 +1,92 @@
package com.czsj.bigdata.filter;
import com.alibaba.fastjson.JSON;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.czsj.bigdata.core.util.I18nUtil;
import com.czsj.bigdata.entity.JwtUser;
import com.czsj.bigdata.entity.LoginUser;
import com.czsj.bigdata.util.JwtTokenUtils;
import com.czsj.core.biz.model.ReturnT;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static com.czsj.core.util.Constants.SPLIT_COMMA;
/**
* Created by jingwk on 2019/11/17
*/
@Slf4j
public class JWTAuthenticationFilter extends UsernamePasswordAuthenticationFilter {
private ThreadLocal<Integer> rememberMe = new ThreadLocal<>();
private AuthenticationManager authenticationManager;
public JWTAuthenticationFilter(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
super.setFilterProcessesUrl("/api/auth/login");
}
@Override
public Authentication attemptAuthentication(HttpServletRequest request,
HttpServletResponse response) throws AuthenticationException {
// 从输入流中获取到登录的信息
try {
LoginUser loginUser = new ObjectMapper().readValue(request.getInputStream(), LoginUser.class);
rememberMe.set(loginUser.getRememberMe());
return authenticationManager.authenticate(
new UsernamePasswordAuthenticationToken(loginUser.getUsername(), loginUser.getPassword(), new ArrayList<>())
);
} catch (IOException e) {
logger.error("attemptAuthentication error :{}",e);
return null;
}
}
// 成功验证后调用的方法
// 如果验证成功就生成token并返回
@Override
protected void successfulAuthentication(HttpServletRequest request,
HttpServletResponse response,
FilterChain chain,
Authentication authResult) throws IOException {
JwtUser jwtUser = (JwtUser) authResult.getPrincipal();
boolean isRemember = rememberMe.get() == 1;
String role = "";
Collection<? extends GrantedAuthority> authorities = jwtUser.getAuthorities();
for (GrantedAuthority authority : authorities){
role = authority.getAuthority();
}
String token = JwtTokenUtils.createToken(jwtUser.getId(),jwtUser.getUsername(), role, isRemember);
response.setHeader("token", JwtTokenUtils.TOKEN_PREFIX + token);
response.setCharacterEncoding("UTF-8");
Map<String, Object> maps = new HashMap<>();
maps.put("data", JwtTokenUtils.TOKEN_PREFIX + token);
maps.put("roles", role.split(SPLIT_COMMA));
response.getWriter().write(JSON.toJSONString(new ReturnT<>(maps)));
}
@Override
protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, AuthenticationException failed) throws IOException, ServletException {
response.setCharacterEncoding("UTF-8");
response.getWriter().write(JSON.toJSON(new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("login_param_invalid"))).toString());
}
}

View File

@ -0,0 +1,73 @@
package com.czsj.bigdata.filter;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.extension.api.R;
import com.czsj.bigdata.exception.TokenIsExpiredException;
import com.czsj.bigdata.util.JwtTokenUtils;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Collections;
/**
* Created by jingwk on 2019/11/17
*/
public class JWTAuthorizationFilter extends BasicAuthenticationFilter {
public JWTAuthorizationFilter(AuthenticationManager authenticationManager) {
super(authenticationManager);
}
@Override
protected void doFilterInternal(HttpServletRequest request,
HttpServletResponse response,
FilterChain chain) throws IOException, ServletException {
String tokenHeader = request.getHeader(JwtTokenUtils.TOKEN_HEADER);
// 如果请求头中没有Authorization信息则直接放行
if (tokenHeader == null || !tokenHeader.startsWith(JwtTokenUtils.TOKEN_PREFIX)) {
chain.doFilter(request, response);
return;
}
// 如果请求头中有token则进行解析并且设置认证信息
try {
SecurityContextHolder.getContext().setAuthentication(getAuthentication(tokenHeader));
} catch (TokenIsExpiredException e) {
//返回json形式的错误信息
response.setCharacterEncoding("UTF-8");
response.setContentType("application/json; charset=utf-8");
response.getWriter().write(JSON.toJSONString(R.failed(e.getMessage())));
response.getWriter().flush();
return;
}
super.doFilterInternal(request, response, chain);
}
// 这里从token中获取用户信息并新建一个token
private UsernamePasswordAuthenticationToken getAuthentication(String tokenHeader) throws TokenIsExpiredException {
String token = tokenHeader.replace(JwtTokenUtils.TOKEN_PREFIX, "");
boolean expiration = JwtTokenUtils.isExpiration(token);
if (expiration) {
throw new TokenIsExpiredException("登录时间过长,请退出重新登录");
}
else {
String username = JwtTokenUtils.getUsername(token);
String role = JwtTokenUtils.getUserRole(token);
if (username != null) {
return new UsernamePasswordAuthenticationToken(username, null,
Collections.singleton(new SimpleGrantedAuthority(role))
);
}
}
return null;
}
}

View File

@ -0,0 +1,24 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.czsj.bigdata.entity.APIAuth;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface APIAuthMapper extends BaseMapper<APIAuth>{
int delete(@Param("id") int id);
List<APIAuth> findAll();
int save(APIAuth apiAuth);
int update(APIAuth entity);
APIAuth getById(int id);
}

View File

@ -0,0 +1,30 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.czsj.bigdata.entity.BaseResource;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface BaseResourceMapper extends BaseMapper<BaseResource>{
int delete(@Param("id") int id);
List<BaseResource> findList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("name") String name);
int save(BaseResource apiAuth);
int update(BaseResource entity);
BaseResource getById(int id);
List<BaseResource> getResource();
List<BaseResource> getFileResource();
}

View File

@ -0,0 +1,13 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.czsj.bigdata.entity.DevEnvSetting;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
@Mapper
public interface DevEnvSettingMapper extends BaseMapper<DevEnvSetting>{
IPage<DevEnvSetting> getDevEnvSettingListPaging(IPage<DevEnvSetting> page,
@Param("searchName") String searchName);
}

View File

@ -0,0 +1,17 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.czsj.bigdata.entity.DevTask;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface DevJarMapper extends BaseMapper<DevTask>{
int save(DevTask devJar);
int update(DevTask entity);
DevTask getById(int id);
}

View File

@ -0,0 +1,17 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.czsj.bigdata.entity.DevTask;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface DevSQLMapper extends BaseMapper<DevTask>{
int save(DevTask devJar);
int update(DevTask entity);
DevTask getById(int id);
}

View File

@ -0,0 +1,28 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.czsj.bigdata.entity.DevEnvSetting;
import com.czsj.bigdata.entity.DevTask;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface DevTaskMapper extends BaseMapper<DevTask>{
int delete(@Param("id") int id);
int save(DevTask devJar);
int update(DevTask entity);
DevTask getById(int id);
List<DevTask> findList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("type") String type);
String findPath(@Param("tasktype") String tasktype);
}

View File

@ -0,0 +1,25 @@
package com.czsj.bigdata.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.czsj.bigdata.entity.JobDatasource;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
* jdbc数据源配置表数据库访问层
*
* @author zhouhongfa@gz-yibo.com
* @version v1.0
* @since 2019-07-30
*/
@Mapper
public interface JobDatasourceMapper extends BaseMapper<JobDatasource> {
int update(JobDatasource datasource);
JobDatasource getDataSourceById(Long id);
List<JobDatasource> findDataSourceName();
List<JobDatasource> getdataSourceAll();
}

View File

@ -0,0 +1,29 @@
package com.czsj.bigdata.mapper;
import com.czsj.bigdata.entity.JobGroup;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* Created by xuxueli on 16/9/30.
*/
@Mapper
public interface JobGroupMapper {
List<JobGroup> findAll();
List<JobGroup> find(@Param("appName") String appName,
@Param("title") String title,
@Param("addressList") String addressList);
int save(JobGroup jobGroup);
List<JobGroup> findByAddressType(@Param("addressType") int addressType);
int update(JobGroup jobGroup);
int remove(@Param("id") int id);
JobGroup load(@Param("id") int id);
}

View File

@ -0,0 +1,60 @@
package com.czsj.bigdata.mapper;
import com.czsj.bigdata.entity.JobInfo;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
/**
* job info
*
* @author xuxueli 2016-1-12 18:03:45
*/
@Mapper
public interface JobInfoMapper {
List<JobInfo> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc,
@Param("glueType") String glueType,
@Param("userId") int userId,
@Param("projectIds") Integer[] projectIds);
int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc,
@Param("glueType") String glueType,
@Param("userId") int userId,
@Param("projectIds") Integer[] projectIds);
List<JobInfo> findAll();
int save(JobInfo info);
JobInfo loadById(@Param("id") int id);
int update(JobInfo jobInfo);
int delete(@Param("id") long id);
List<JobInfo> getJobsByGroup(@Param("jobGroup") int jobGroup);
int findAllCount();
List<JobInfo> scheduleJobQuery(@Param("maxNextTime") long maxNextTime, @Param("pagesize") int pagesize);
int scheduleUpdate(JobInfo xxlJobInfo);
int incrementTimeUpdate(@Param("id") int id, @Param("incStartTime") Date incStartTime);
public int updateLastHandleCode(@Param("id") int id,@Param("lastHandleCode")int lastHandleCode);
void incrementIdUpdate(@Param("id") int id, @Param("incStartId")Long incStartId);
}

View File

@ -0,0 +1,25 @@
package com.czsj.bigdata.mapper;
import com.czsj.bigdata.entity.JobLogGlue;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* job log for glue
*
* @author xuxueli 2016-5-19 18:04:56
*/
@Mapper
public interface JobLogGlueMapper {
int save(JobLogGlue jobLogGlue);
List<JobLogGlue> findByJobId(@Param("jobId") int jobId);
int removeOld(@Param("jobId") int jobId, @Param("limit") int limit);
int deleteByJobId(@Param("jobId") int jobId);
}

View File

@ -0,0 +1,67 @@
package com.czsj.bigdata.mapper;
import com.czsj.bigdata.entity.JobLog;
import com.czsj.bigdata.entity.OperLog;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* job log
*
* @author xuxueli 2016-1-12 18:03:06
*/
@Mapper
public interface JobLogMapper {
// exist jobId not use jobGroup, not exist use jobGroup
List<JobLog> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus);
int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus);
JobLog load(@Param("id") long id);
long save(JobLog jobLog);
int updateTriggerInfo(JobLog jobLog);
int updateHandleInfo(JobLog jobLog);
int updateProcessId(@Param("id") long id,
@Param("processId") String processId);
int delete(@Param("jobId") int jobId);
Map<String, Object> findLogReport(@Param("from") Date from,
@Param("to") Date to);
List<Long> findClearLogIds(@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("clearBeforeTime") Date clearBeforeTime,
@Param("clearBeforeNum") int clearBeforeNum,
@Param("pagesize") int pagesize);
int clearLog(@Param("logIds") List<Long> logIds);
List<Long> findFailJobLogIds(@Param("pagesize") int pagesize);
int updateAlarmStatus(@Param("logId") long logId,
@Param("oldAlarmStatus") int oldAlarmStatus,
@Param("newAlarmStatus") int newAlarmStatus);
}

View File

@ -0,0 +1,30 @@
package com.czsj.bigdata.mapper;
import com.czsj.bigdata.entity.InfoReport;
import com.czsj.bigdata.entity.JobLogReport;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
/**
* job log
*
* @author xuxueli 2019-11-22
*/
@Mapper
public interface JobLogReportMapper {
int save(JobLogReport xxlJobLogReport);
int update(JobLogReport xxlJobLogReport);
List<JobLogReport> queryLogReport(@Param("triggerDayFrom") Date triggerDayFrom,
@Param("triggerDayTo") Date triggerDayTo);
List<InfoReport> getInfoReportCount();
JobLogReport queryLogReportTotal();
}

Some files were not shown because too many files have changed in this diff Show More