【feat】基于若水,调整common模块

This commit is contained in:
Kris 2024-12-31 11:18:26 +08:00
parent 6176293dea
commit ea7cd01080
45 changed files with 4681 additions and 1 deletions

View File

@ -124,6 +124,108 @@
<artifactId>javax.servlet-api</artifactId>
</dependency>
<dependency>
<groupId>ch.ethz.ganymed</groupId>
<artifactId>ganymed-ssh2</artifactId>
<version>build210</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatisplus-spring-boot-starter</artifactId>
<version>1.0.5</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-extension</artifactId>
<version>3.3.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.24</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>com.dtstack.dtcenter</groupId>
<artifactId>common.loader.core</artifactId>
<version>1.8.0-RELEASE</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>4.5.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.czsj</groupId>
<artifactId>czsj-core</artifactId>
<version>3.8.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.aspose</groupId>
<artifactId>aspose-words</artifactId>
<version>20.3</version>
<classifier>jdk17</classifier>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>5.0.1</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>12.8.0.jre11</version> <!-- 使用适合你 Java 版本的驱动 -->
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.oracle</groupId>-->
<!-- <artifactId>ojdbc6</artifactId>-->
<!-- <version>11.2.0.1.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
</dependency>
</dependencies>
<repositories>
<repository>
<id>AsposeJavaAPI</id>
<name>Aspose Java API</name>
<url>https://releases.aspose.com/java/repo/</url>
</repository>
</repositories>
</project>

View File

@ -0,0 +1,24 @@
package com.czsj.common.database;
import com.czsj.common.database.constants.DbType;
import com.czsj.common.database.dialect.DialectRegistry;
import com.czsj.common.database.service.DbDialect;
/**
* 方言工厂类
*
* @author yuwei
* @since 2020-03-14
*/
public class DialectFactory {
private static final DialectRegistry DIALECT_REGISTRY = new DialectRegistry();
public static DbDialect getDialect(DbType dbType) {
return DIALECT_REGISTRY.getDialect(dbType);
}
}

View File

@ -0,0 +1,123 @@
package com.czsj.common.database.cache;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class DefaultSqlCache extends LinkedHashMap<String, DefaultSqlCache.ExpireNode<Object>> implements SqlCache {
private int capacity;
private long expire;
private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
public DefaultSqlCache(int capacity, long expire) {
super((int) Math.ceil(capacity / 0.75) + 1, 0.75f, true);
// 容量
this.capacity = capacity;
// 固定过期时间
this.expire = expire;
}
@Override
public void put(String key, Object value, long ttl) {
long expireTime = Long.MAX_VALUE;
if (ttl >= 0) {
expireTime = System.currentTimeMillis() + (ttl == 0 ? this.expire : ttl);
}
lock.writeLock().lock();
try {
// 封装成过期时间节点
put(key, new ExpireNode<>(expireTime, value));
} finally {
lock.writeLock().unlock();
}
}
@Override
public Object get(String key) {
lock.readLock().lock();
ExpireNode<Object> expireNode;
try {
expireNode = super.get(key);
} finally {
lock.readLock().unlock();
}
if (expireNode == null) {
return null;
}
// 惰性删除过期的
if (this.expire > -1L && expireNode.expire < System.currentTimeMillis()) {
try {
lock.writeLock().lock();
super.remove(key);
} finally {
lock.writeLock().unlock();
}
return null;
}
return expireNode.value;
}
@Override
public void delete(String key) {
try {
lock.writeLock().lock();
Iterator<Map.Entry<String, ExpireNode<Object>>> iterator = super.entrySet().iterator();
// 清除key的缓存
while (iterator.hasNext()) {
Map.Entry<String, ExpireNode<Object>> entry = iterator.next();
if (entry.getKey().equals(key)) {
iterator.remove();
}
}
} finally {
lock.writeLock().unlock();
}
}
@Override
protected boolean removeEldestEntry(Map.Entry<String, ExpireNode<Object>> eldest) {
if (this.expire > -1L && size() > capacity) {
clean();
}
// lru淘汰
return size() > this.capacity;
}
/**
* 清理已过期的数据
*/
private void clean() {
try {
lock.writeLock().lock();
Iterator<Map.Entry<String, ExpireNode<Object>>> iterator = super.entrySet().iterator();
long now = System.currentTimeMillis();
while (iterator.hasNext()) {
Map.Entry<String, ExpireNode<Object>> next = iterator.next();
// 判断是否过期
if (next.getValue().expire < now) {
iterator.remove();
}
}
} finally {
lock.writeLock().unlock();
}
}
/**
* 过期时间节点
*/
static class ExpireNode<V> {
long expire;
Object value;
public ExpireNode(long expire, Object value) {
this.expire = expire;
this.value = value;
}
}
}

View File

@ -0,0 +1,39 @@
package com.czsj.common.database.cache;
import com.czsj.common.database.utils.MD5Util;
import java.util.Arrays;
/**
* SQL缓存接口
*/
public interface SqlCache {
/**
* 计算key
*/
default String buildSqlCacheKey(String sql, Object[] args) {
return MD5Util.encrypt(sql + ":" + Arrays.toString(args));
}
/**
* 存入缓存
* @param key key
* @param value
*/
void put(String key, Object value, long ttl);
/**
* 获取缓存
* @param key key
* @return
*/
<T> T get(String key);
/**
* 删除缓存
* @param key key
*/
void delete(String key);
}

View File

@ -0,0 +1,37 @@
package com.czsj.common.database.constants;
import lombok.AllArgsConstructor;
import lombok.Data;
import org.springframework.util.StringUtils;
import java.io.Serializable;
@Data
@AllArgsConstructor
public class DbQueryProperty implements Serializable {
private static final long serialVersionUID = 1L;
private String dbType;
private String host;
private String username;
private String password;
private Integer port;
private String dbName;
private String sid;
/**
* 参数合法性校验
*/
public void viald() {
if (StringUtils.isEmpty(dbType) || StringUtils.isEmpty(host) ||
StringUtils.isEmpty(username) || StringUtils.isEmpty(password) ||
StringUtils.isEmpty(port)) {
throw new RuntimeException("参数不完整");
}
if (DbType.OTHER.getDb().equals(dbType)) {
throw new RuntimeException("不支持的数据库类型");
}
}
}

View File

@ -0,0 +1,100 @@
package com.czsj.common.database.constants;
/**
* 数据库类型
*
* @author yuwei
* @since 2020-03-14
*/
public enum DbType {
/**
* MYSQL
*/
MYSQL("1", "MySql数据库", "jdbc:mysql://${host}:${port}/${dbName}?serverTimezone=GMT%2B8&characterEncoding=UTF-8&useUnicode=true&useSSL=false"),
/**
* MARIADB
*/
MARIADB("2", "MariaDB数据库", "jdbc:mariadb://${host}:${port}/${dbName}"),
/**
* ORACLE
*/
ORACLE("3", "Oracle11g及以下数据库", "jdbc:oracle:thin:@${host}:${port}:${sid}"),
/**
* oracle12c new pagination
*/
ORACLE_12C("4", "Oracle12c+数据库", "jdbc:oracle:thin:@${host}:${port}:${sid}"),
/**
* POSTGRESQL
*/
POSTGRE_SQL("5", "PostgreSql数据库", "jdbc:postgresql://${host}:${port}/${dbName}"),
/**
* SQLSERVER2005
*/
SQL_SERVER2008("6", "SQLServer2008及以下数据库", "jdbc:sqlserver://${host}:${port};DatabaseName=${dbName}"),
/**
* SQLSERVER
*/
SQL_SERVER("7", "SQLServer2012+数据库", "jdbc:sqlserver://${host}:${port};DatabaseName=${dbName}"),
/**
* UNKONWN DB
*/
OTHER("8", "其他数据库", ""),
/**
* CLICKHOUSE
*/
CLICKHOUSE("9", "Clickhouse数据库", "jdbc:clickhouse://${host}:${port}/${dbName}"),
/**
* HIVE
*/
HIVE("10", "Hive数据库", "jdbc:hive2://${host}:${port}/${dbName}");
/**
* 数据库名称
*/
private final String db;
/**
* 描述
*/
private final String desc;
/**
* url
*/
private final String url;
public String getDb() {
return this.db;
}
public String getDesc() {
return this.desc;
}
public String getUrl() {
return this.url;
}
DbType(String db, String desc, String url) {
this.db = db;
this.desc = desc;
this.url = url;
}
/**
* 获取数据库类型
*
* @param dbType 数据库类型字符串
*/
public static DbType getDbType(String dbType) {
for (DbType type : DbType.values()) {
if (type.db.equals(dbType)) {
return type;
}
}
return OTHER;
}
}

View File

@ -0,0 +1,72 @@
package com.czsj.common.database.datasource;
import com.czsj.common.database.DialectFactory;
import com.czsj.common.database.constants.DbQueryProperty;
import com.czsj.common.database.constants.DbType;
import com.czsj.common.database.query.AbstractDbQueryFactory;
import com.czsj.common.database.query.CacheDbQueryFactoryBean;
import com.czsj.common.database.service.DataSourceFactory;
import com.czsj.common.database.service.DbDialect;
import com.czsj.common.database.service.DbQuery;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.util.Properties;
@Component
public class AbstractDataSourceFactory implements DataSourceFactory {
public DbQuery createDbQuery(DbQueryProperty property) {
property.viald();
DbType dbType = DbType.getDbType(property.getDbType());
DataSource dataSource = createDataSource(property);
DbQuery dbQuery = createDbQuery(dataSource, dbType);
return dbQuery;
}
public static DbQuery createDbQuery(DataSource dataSource, DbType dbType) {
DbDialect dbDialect = DialectFactory.getDialect(dbType);
if(dbDialect == null){
throw new RuntimeException("该数据库类型正在开发中");
}
AbstractDbQueryFactory dbQuery = new CacheDbQueryFactoryBean();
dbQuery.setDataSource(dataSource);
dbQuery.setJdbcTemplate(new JdbcTemplate(dataSource));
dbQuery.setDbDialect(dbDialect);
return dbQuery;
}
public DataSource createDataSource(DbQueryProperty property) {
HikariDataSource dataSource = new HikariDataSource();
if(DbType.ORACLE_12C.getDb().equals(property.getDbType())){
Properties properties = new Properties();
properties.put("driverType","thin");
dataSource.setDataSourceProperties(properties);
}
dataSource.setJdbcUrl(trainToJdbcUrl(property));
dataSource.setUsername(property.getUsername());
dataSource.setPassword(property.getPassword());
return dataSource;
}
protected String trainToJdbcUrl(DbQueryProperty property) {
String url = DbType.getDbType(property.getDbType()).getUrl();
if (StringUtils.isEmpty(url)) {
throw new RuntimeException("无效数据库类型!");
}
url = url.replace("${host}", property.getHost());
url = url.replace("${port}", String.valueOf(property.getPort()));
if (DbType.ORACLE.getDb().equals(property.getDbType()) || DbType.ORACLE_12C.getDb().equals(property.getDbType())) {
url = url.replace("${sid}", property.getSid());
} else {
url = url.replace("${dbName}", property.getDbName());
}
return url;
}
}

View File

@ -0,0 +1,65 @@
package com.czsj.common.database.datasource;
import com.czsj.common.database.constants.DbQueryProperty;
import javax.sql.DataSource;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class CacheDataSourceFactoryBean extends AbstractDataSourceFactory {
/**
* 数据源缓存
*/
private static Map<String, DataSource> dataSourceMap = new ConcurrentHashMap<>();
@Override
public DataSource createDataSource(DbQueryProperty property) {
String key = property.getHost() + ":" + property.getPort() + ":" + property.getUsername()+ ":" + property.getDbName();
String s = compress(key);
DataSource dataSource = dataSourceMap.get(s);
if (null == dataSource) {
synchronized (CacheDataSourceFactoryBean.class) {
if (null == dataSource) {
dataSource = super.createDataSource(property);
dataSourceMap.put(s, dataSource);
}
}
}
return dataSource;
}
// 压缩
public static String compress(String str) {
if (str == null || str.length() == 0) {
return str;
}
MessageDigest md = null;
try {
md = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
md.update(str.getBytes());
byte b[] = md.digest();
int i;
StringBuffer buf = new StringBuffer();
for (int offset = 0; offset < b.length; offset++) {
i = b[offset];
if (i < 0)
i += 256;
if (i < 16)
buf.append("0");
buf.append(Integer.toHexString(i));
}
// System.out.println("MD5(" + str + ",32小写) = " + buf.toString());
// System.out.println("MD5(" + str + ",32大写) = " + buf.toString().toUpperCase());
// System.out.println("MD5(" + str + ",16小写) = " + buf.toString().substring(8, 24));
// System.out.println("MD5(" + str + ",16大写) = " + buf.toString().substring(8, 24).toUpperCase());
return buf.toString().substring(8, 24).toUpperCase();
}
}

View File

@ -0,0 +1,4 @@
package com.czsj.common.database.datasource;
public class DefaultDataSourceFactoryBean extends AbstractDataSourceFactory {
}

View File

@ -0,0 +1,38 @@
package com.czsj.common.database.dialect;
import com.czsj.common.database.service.DbDialect;
/**
* 方言抽象类
*
* @author yuwei
* @since 2020-03-14
*/
public abstract class AbstractDbDialect implements DbDialect {
@Override
public String columns(String dbName, String tableName) {
return "select column_name AS COLNAME, ordinal_position AS COLPOSITION, column_default AS DATADEFAULT, is_nullable AS NULLABLE, data_type AS DATATYPE, " +
"character_maximum_length AS DATALENGTH, numeric_precision AS DATAPRECISION, numeric_scale AS DATASCALE, column_key AS COLKEY, column_comment AS COLCOMMENT " +
"from information_schema.columns where table_schema = '" + dbName + "' and table_name = '" + tableName + "' order by ordinal_position ";
}
@Override
public String tables(String dbName) {
return "SELECT table_name AS TABLENAME, table_comment AS TABLECOMMENT FROM information_schema.tables where table_schema = '" + dbName + "' ";
}
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
// 获取 分页实际条数
StringBuilder sqlBuilder = new StringBuilder(originalSql);
sqlBuilder.append(" LIMIT ").append(offset).append(" , ").append(count);
return sqlBuilder.toString();
}
@Override
public String count(String sql) {
return "SELECT COUNT(*) FROM ( " + sql + " ) TEMP";
}
}

View File

@ -0,0 +1,36 @@
package com.czsj.common.database.dialect;
import com.czsj.common.database.service.DbDialect;
/**
* 方言抽象类
*
* @author yuwei
* @since 2020-03-14
*/
public abstract class CKAbstractDbDialect implements DbDialect {
@Override
public String columns(String dbName, String tableName) {
return "select name COLNAME,type DATATYPE,'' DATALENGTH, '' DATAPRECISION,'' DATASCALE, is_in_primary_key COLKEY,'' NULLABLE,rowNumberInAllBlocks() COLPOSITION,default_expression DATADEFAULT,comment COLCOMMENT from system.columns where database = '" + dbName + "' and table = '" + tableName + "'";
}
@Override
public String tables(String dbName) {
return "SELECT name AS TABLENAME, '' AS TABLECOMMENT FROM system.tables where database = '" + dbName + "' ";
}
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
// 获取 分页实际条数
StringBuilder sqlBuilder = new StringBuilder(originalSql);
sqlBuilder.append(" LIMIT ").append(offset).append(" , ").append(count);
return sqlBuilder.toString();
}
@Override
public String count(String sql) {
return "SELECT COUNT(*) FROM ( " + sql + " ) TEMP";
}
}

View File

@ -0,0 +1,45 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
/**
* clickhouse
*
* @author xinjingczsj
* @since 2023-05-30
*/
public class ClickhouseDialect extends CKAbstractDbDialect {
@Override
public RowMapper<DbColumn> columnMapper() {
return (ResultSet rs, int rowNum) -> {
DbColumn entity = new DbColumn();
entity.setColName(rs.getString("COLNAME"));
entity.setDataType(rs.getString("DATATYPE"));
entity.setDataLength(rs.getString("DATALENGTH"));
entity.setDataPrecision(rs.getString("DATAPRECISION"));
entity.setDataScale(rs.getString("DATASCALE"));
entity.setColKey("1".equals(rs.getString("COLKEY")) ? true : false);
entity.setNullable("Y".equals(rs.getString("NULLABLE")) ? true : false);
entity.setColPosition(rs.getInt("COLPOSITION"));
// entity.setDataDefault(rs.getString("DATADEFAULT"));
entity.setColComment(rs.getString("COLCOMMENT"));
return entity;
};
}
@Override
public RowMapper<DbTable> tableMapper() {
return (ResultSet rs, int rowNum) -> {
DbTable entity = new DbTable();
entity.setTableName(rs.getString("TABLENAME"));
entity.setTableComment(rs.getString("TABLECOMMENT"));
return entity;
};
}
}

View File

@ -0,0 +1,30 @@
package com.czsj.common.database.dialect;
import com.czsj.common.database.constants.DbType;
import com.czsj.common.database.service.DbDialect;
import java.util.EnumMap;
import java.util.Map;
public class DialectRegistry {
private final Map<DbType, DbDialect> dialect_enum_map = new EnumMap<>(DbType.class);
public DialectRegistry() {
dialect_enum_map.put(DbType.MARIADB, new MariaDBDialect());
dialect_enum_map.put(DbType.MYSQL, new MySqlDialect());
dialect_enum_map.put(DbType.ORACLE_12C, new Oracle12cDialect());
dialect_enum_map.put(DbType.ORACLE, new OracleDialect());
dialect_enum_map.put(DbType.POSTGRE_SQL, new PostgreDialect());
dialect_enum_map.put(DbType.SQL_SERVER2008, new SQLServer2008Dialect());
dialect_enum_map.put(DbType.SQL_SERVER, new SQLServerDialect());
dialect_enum_map.put(DbType.CLICKHOUSE, new ClickhouseDialect());
dialect_enum_map.put(DbType.HIVE, new HiveDialect());
dialect_enum_map.put(DbType.OTHER, new UnknownDialect());
}
public DbDialect getDialect(DbType dbType) {
return dialect_enum_map.get(dbType);
}
}

View File

@ -0,0 +1,36 @@
package com.czsj.common.database.dialect;
import com.czsj.common.database.service.DbDialect;
/**
* 方言抽象类
*
* @author xinjingczsj
* @since 2023-08-14
*/
public abstract class HiveAbstractDbDialect implements DbDialect {
@Override
public String columns(String dbName, String tableName) {
return "desc "+dbName+"."+tableName;
}
@Override
public String tables(String dbName) {
return "show tables in "+dbName;
}
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
// 获取 分页实际条数
StringBuilder sqlBuilder = new StringBuilder(originalSql);
sqlBuilder.append(" LIMIT ").append(offset).append(" , ").append(count);
return sqlBuilder.toString();
}
@Override
public String count(String sql) {
return "SELECT COUNT(1) FROM ( " + sql + " ) TEMP";
}
}

View File

@ -0,0 +1,30 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
public class HiveDialect extends HiveAbstractDbDialect {
@Override
public RowMapper<DbColumn> columnMapper() {
return (ResultSet rs, int rowNum) -> {
DbColumn entity = new DbColumn();
entity.setColName(rs.getString("col_name"));
entity.setDataType(rs.getString("data_type"));
entity.setColComment(rs.getString("comment"));
return entity;
};
}
@Override
public RowMapper<DbTable> tableMapper() {
return (ResultSet rs, int rowNum) -> {
DbTable entity = new DbTable();
entity.setTableName(rs.getString("tab_name"));
return entity;
};
}
}

View File

@ -0,0 +1,10 @@
package com.czsj.common.database.dialect;
/**
* MariaDB 数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class MariaDBDialect extends MySqlDialect {
}

View File

@ -0,0 +1,44 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
/**
* MySql 数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class MySqlDialect extends AbstractDbDialect {
@Override
public RowMapper<DbColumn> columnMapper() {
return (ResultSet rs, int rowNum) -> {
DbColumn entity = new DbColumn();
entity.setColName(rs.getString("COLNAME"));
entity.setDataType(rs.getString("DATATYPE"));
entity.setDataLength(rs.getString("DATALENGTH"));
entity.setDataPrecision(rs.getString("DATAPRECISION"));
entity.setDataScale(rs.getString("DATASCALE"));
entity.setColKey("PRI".equals(rs.getString("COLKEY")) ? true : false);
entity.setNullable("YES".equals(rs.getString("NULLABLE")) ? true : false);
entity.setColPosition(rs.getInt("COLPOSITION"));
entity.setDataDefault(rs.getString("DATADEFAULT"));
entity.setColComment(rs.getString("COLCOMMENT"));
return entity;
};
}
@Override
public RowMapper<DbTable> tableMapper() {
return (ResultSet rs, int rowNum) -> {
DbTable entity = new DbTable();
entity.setTableName(rs.getString("TABLENAME"));
entity.setTableComment(rs.getString("TABLECOMMENT"));
return entity;
};
}
}

View File

@ -0,0 +1,17 @@
package com.czsj.common.database.dialect;
/**
* ORACLE Oracle12c+数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class Oracle12cDialect extends OracleDialect {
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
StringBuilder sqlBuilder = new StringBuilder(originalSql);
sqlBuilder.append(" OFFSET ").append(offset).append(" ROWS FETCH NEXT ").append(count).append(" ROWS ONLY ");
return sqlBuilder.toString();
}
}

View File

@ -0,0 +1,74 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
/**
* Oracle Oracle11g及以下数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class OracleDialect extends AbstractDbDialect {
@Override
public String columns(String dbName, String tableName) {
return "select columns.column_name AS colName, columns.data_type AS DATATYPE, columns.data_length AS DATALENGTH, columns.data_precision AS DATAPRECISION, " +
"columns.data_scale AS DATASCALE, columns.nullable AS NULLABLE, columns.column_id AS COLPOSITION, columns.data_default AS DATADEFAULT, comments.comments AS COLCOMMENT," +
"case when t.column_name is null then 0 else 1 end as COLKEY " +
"from sys.user_tab_columns columns LEFT JOIN sys.user_col_comments comments ON columns.table_name = comments.table_name AND columns.column_name = comments.column_name " +
"left join ( " +
"select col.column_name as column_name, con.table_name as table_name from user_constraints con, user_cons_columns col " +
"where con.constraint_name = col.constraint_name and con.constraint_type = 'P' " +
") t on t.table_name = columns.table_name and columns.column_name = t.column_name " +
"where columns.table_name = UPPER('" + tableName + "') order by columns.column_id ";
}
@Override
public String tables(String dbName) {
return "select tables.table_name AS TABLENAME, comments.comments AS TABLECOMMENT from sys.user_tables tables " +
"LEFT JOIN sys.user_tab_comments comments ON tables.table_name = comments.table_name ";
}
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("SELECT * FROM ( SELECT TMP.*, ROWNUM ROW_ID FROM ( ");
sqlBuilder.append(originalSql).append(" ) TMP WHERE ROWNUM <=").append((offset >= 1) ? (offset + count) : count);
sqlBuilder.append(") WHERE ROW_ID > ").append(offset);
return sqlBuilder.toString();
}
@Override
public RowMapper<DbColumn> columnMapper() {
return (ResultSet rs, int rowNum) -> {
DbColumn entity = new DbColumn();
entity.setColName(rs.getString("COLNAME"));
entity.setDataType(rs.getString("DATATYPE"));
entity.setDataLength(rs.getString("DATALENGTH"));
entity.setDataPrecision(rs.getString("DATAPRECISION"));
entity.setDataScale(rs.getString("DATASCALE"));
entity.setColKey("1".equals(rs.getString("COLKEY")) ? true : false);
entity.setNullable("Y".equals(rs.getString("NULLABLE")) ? true : false);
entity.setColPosition(rs.getInt("COLPOSITION"));
// entity.setDataDefault(rs.getString("DATADEFAULT"));
entity.setColComment(rs.getString("COLCOMMENT"));
return entity;
};
}
@Override
public RowMapper<DbTable> tableMapper() {
return (ResultSet rs, int rowNum) -> {
DbTable entity = new DbTable();
entity.setTableName(rs.getString("TABLENAME"));
entity.setTableComment(rs.getString("TABLECOMMENT"));
return entity;
};
}
}

View File

@ -0,0 +1,72 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
/**
* Postgre 数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class PostgreDialect extends AbstractDbDialect {
@Override
public String columns(String dbName, String tableName) {
return "select col.column_name AS COLNAME, col.ordinal_position AS COLPOSITION, col.column_default AS DATADEFAULT, col.is_nullable AS NULLABLE, col.udt_name AS DATATYPE, " +
"col.character_maximum_length AS DATALENGTH, col.numeric_precision AS DATAPRECISION, col.numeric_scale AS DATASCALE, des.description AS COLCOMMENT, " +
"case when t.colname is null then 0 else 1 end as COLKEY " +
"from information_schema.columns col left join pg_description des on col.table_name::regclass = des.objoid and col.ordinal_position = des.objsubid " +
"left join ( " +
"select pg_attribute.attname as colname from pg_constraint inner join pg_class on pg_constraint.conrelid = pg_class.oid " +
"inner join pg_attribute on pg_attribute.attrelid = pg_class.oid and pg_attribute.attnum = any(pg_constraint.conkey) " +
"where pg_class.relname = '" + tableName + "' and pg_constraint.contype = 'p' " +
") t on t.colname = col.column_name " +
"where col.table_catalog = '" + dbName + "' and col.table_schema = 'public' and col.table_name = '" + tableName + "' order by col.ordinal_position ";
}
@Override
public String tables(String dbName) {
return "select relname AS TABLENAME, cast(obj_description(relfilenode, 'pg_class') as varchar) AS TABLECOMMENT from pg_class " +
"where relname in (select tablename from pg_tables where schemaname = 'public' and tableowner = '" + dbName + "' and position('_2' in tablename) = 0) ";
}
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
StringBuilder sqlBuilder = new StringBuilder(originalSql);
sqlBuilder.append(" LIMIT ").append(count).append(" offset ").append(offset);
return sqlBuilder.toString();
}
@Override
public RowMapper<DbColumn> columnMapper() {
return (ResultSet rs, int rowNum) -> {
DbColumn entity = new DbColumn();
entity.setColName(rs.getString("COLNAME"));
entity.setDataType(rs.getString("DATATYPE"));
entity.setDataLength(rs.getString("DATALENGTH"));
entity.setDataPrecision(rs.getString("DATAPRECISION"));
entity.setDataScale(rs.getString("DATASCALE"));
entity.setColKey("1".equals(rs.getString("COLKEY")) ? true : false);
entity.setNullable("YES".equals(rs.getString("NULLABLE")) ? true : false);
entity.setColPosition(rs.getInt("COLPOSITION"));
entity.setDataDefault(rs.getString("DATADEFAULT"));
entity.setColComment(rs.getString("COLCOMMENT"));
return entity;
};
}
@Override
public RowMapper<DbTable> tableMapper() {
return (ResultSet rs, int rowNum) -> {
DbTable entity = new DbTable();
entity.setTableName(rs.getString("TABLENAME"));
entity.setTableComment(rs.getString("TABLECOMMENT"));
return entity;
};
}
}

View File

@ -0,0 +1,106 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.util.StringUtils;
import java.sql.ResultSet;
/**
* SQLServer 2005 数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class SQLServer2008Dialect extends AbstractDbDialect {
@Override
public String columns(String dbName, String tableName) {
return "select columns.name AS colName, columns.column_id AS COLPOSITION, columns.max_length AS DATALENGTH, columns.precision AS DATAPRECISION, columns.scale AS DATASCALE, " +
"columns.is_nullable AS NULLABLE, types.name AS DATATYPE, CAST(ep.value AS NVARCHAR(128)) AS COLCOMMENT, e.text AS DATADEFAULT, " +
"(select top 1 ind.is_primary_key from sys.index_columns ic left join sys.indexes ind on ic.object_id = ind.object_id and ic.index_id = ind.index_id and ind.name like 'PK_%' where ic.object_id=columns.object_id and ic.column_id=columns.column_id) AS COLKEY " +
"from sys.columns columns LEFT JOIN sys.types types ON columns.system_type_id = types.system_type_id " +
"LEFT JOIN syscomments e ON columns.default_object_id= e.id " +
"LEFT JOIN sys.extended_properties ep ON ep.major_id = columns.object_id AND ep.minor_id = columns.column_id AND ep.name = 'MS_Description' " +
"where columns.object_id = object_id('" + tableName + "') order by columns.column_id ";
}
@Override
public String tables(String dbName) {
return "select tables.name AS TABLENAME, CAST(ep.value AS NVARCHAR(128)) AS TABLECOMMENT " +
"from sys.tables tables LEFT JOIN sys.extended_properties ep ON ep.major_id = tables.object_id AND ep.minor_id = 0";
}
private static String getOrderByPart(String sql) {
String loweredString = sql.toLowerCase();
int orderByIndex = loweredString.indexOf("order by");
if (orderByIndex != -1) {
return sql.substring(orderByIndex);
} else {
return "";
}
}
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
StringBuilder pagingBuilder = new StringBuilder();
String orderby = getOrderByPart(originalSql);
String distinctStr = "";
String loweredString = originalSql.toLowerCase();
String sqlPartString = originalSql;
if (loweredString.trim().startsWith("select")) {
int index = 6;
if (loweredString.startsWith("select distinct")) {
distinctStr = "DISTINCT ";
index = 15;
}
sqlPartString = sqlPartString.substring(index);
}
pagingBuilder.append(sqlPartString);
// if no ORDER BY is specified use fake ORDER BY field to avoid errors
if (StringUtils.isEmpty(orderby)) {
orderby = "ORDER BY CURRENT_TIMESTAMP";
}
StringBuilder sql = new StringBuilder();
sql.append("WITH selectTemp AS (SELECT ").append(distinctStr).append("TOP 100 PERCENT ")
.append(" ROW_NUMBER() OVER (").append(orderby).append(") as __row_number__, ").append(pagingBuilder)
.append(") SELECT * FROM selectTemp WHERE __row_number__ BETWEEN ")
//FIX#299原因mysql中limit 10(offset,size) 是从第10开始不包含10,而这里用的BETWEEN是两边都包含所以改为offset+1
.append(offset + 1)
.append(" AND ")
.append(offset + count).append(" ORDER BY __row_number__");
return sql.toString();
}
@Override
public RowMapper<DbColumn> columnMapper() {
return (ResultSet rs, int rowNum) -> {
DbColumn entity = new DbColumn();
entity.setColName(rs.getString("COLNAME"));
entity.setDataType(rs.getString("DATATYPE"));
entity.setDataLength(rs.getString("DATALENGTH"));
entity.setDataPrecision(rs.getString("DATAPRECISION"));
entity.setDataScale(rs.getString("DATASCALE"));
entity.setColKey("1".equals(rs.getString("COLKEY")) ? true : false);
entity.setNullable("1".equals(rs.getString("NULLABLE")) ? true : false);
entity.setColPosition(rs.getInt("COLPOSITION"));
entity.setDataDefault(rs.getString("DATADEFAULT"));
entity.setColComment(rs.getString("COLCOMMENT"));
return entity;
};
}
@Override
public RowMapper<DbTable> tableMapper() {
return (ResultSet rs, int rowNum) -> {
DbTable entity = new DbTable();
entity.setTableName(rs.getString("TABLENAME"));
entity.setTableComment(rs.getString("TABLECOMMENT"));
return entity;
};
}
}

View File

@ -0,0 +1,17 @@
package com.czsj.common.database.dialect;
/**
* SQLServer 数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class SQLServerDialect extends SQLServer2008Dialect {
@Override
public String buildPaginationSql(String originalSql, long offset, long count) {
StringBuilder sqlBuilder = new StringBuilder(originalSql);
sqlBuilder.append(" OFFSET ").append(offset).append(" ROWS FETCH NEXT ").append(count).append(" ROWS ONLY ");
return sqlBuilder.toString();
}
}

View File

@ -0,0 +1,45 @@
package com.czsj.common.database.dialect;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
/**
* 未知 数据库方言
*
* @author yuwei
* @since 2020-03-14
*/
public class UnknownDialect extends AbstractDbDialect {
@Override
public String columns(String dbName, String tableName) {
throw new RuntimeException("不支持的数据库类型");
}
@Override
public String tables(String dbName) {
throw new RuntimeException("不支持的数据库类型");
}
@Override
public String buildPaginationSql(String sql, long offset, long count) {
throw new RuntimeException("不支持的数据库类型");
}
@Override
public String count(String sql) {
throw new RuntimeException("不支持的数据库类型");
}
@Override
public RowMapper<DbColumn> columnMapper() {
throw new RuntimeException("不支持的数据库类型");
}
@Override
public RowMapper<DbTable> tableMapper() {
throw new RuntimeException("不支持的数据库类型");
}
}

View File

@ -0,0 +1,128 @@
package com.czsj.common.database.query;
import com.czsj.common.database.service.DbDialect;
import com.czsj.common.database.service.DbQuery;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import com.czsj.core.database.core.PageResult;
import com.zaxxer.hikari.HikariDataSource;
import lombok.Setter;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
@Setter
public abstract class AbstractDbQueryFactory implements DbQuery {
protected DataSource dataSource;
protected JdbcTemplate jdbcTemplate;
protected DbDialect dbDialect;
@Override
public Connection getConnection() {
try {
return dataSource.getConnection();
} catch (SQLException e) {
throw new RuntimeException("获取数据库连接出错");
}
}
@Override
public boolean valid() {
Connection conn = null;
try {
conn = dataSource.getConnection();
return conn.isValid(0);
} catch (SQLException e) {
throw new RuntimeException("检测连通性出错");
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
throw new RuntimeException("关闭数据库连接出错");
}
}
}
}
@Override
public void close() {
if (dataSource instanceof HikariDataSource) {
((HikariDataSource) dataSource).close();
} else {
throw new RuntimeException("不合法数据源类型");
}
}
@Override
public List<DbColumn> getTableColumns(String dbName, String tableName) {
String sql = dbDialect.columns(dbName, tableName);
return jdbcTemplate.query(sql, dbDialect.columnMapper());
}
@Override
public List<DbTable> getTables(String dbName) {
String sql = dbDialect.tables(dbName);
return jdbcTemplate.query(sql, dbDialect.tableMapper());
}
@Override
public int count(String sql) {
return jdbcTemplate.queryForObject(dbDialect.count(sql), Integer.class);
}
@Override
public int count(String sql, Object[] args) {
return jdbcTemplate.queryForObject(dbDialect.count(sql), args, Integer.class);
}
@Override
public int count(String sql, Map<String, Object> params) {
NamedParameterJdbcTemplate namedJdbcTemplate = new NamedParameterJdbcTemplate(jdbcTemplate);
return namedJdbcTemplate.queryForObject(dbDialect.count(sql), params, Integer.class);
}
@Override
public List<Map<String, Object>> queryList(String sql) {
return jdbcTemplate.queryForList(sql);
}
@Override
public List<Map<String, Object>> queryList(String sql, Object[] args) {
return jdbcTemplate.queryForList(sql, args);
}
@Override
public PageResult<Map<String, Object>> queryByPage(String sql, long offset, long size) {
int total = count(sql);
String pageSql = dbDialect.buildPaginationSql(sql, offset, size);
List<Map<String, Object>> records = jdbcTemplate.queryForList(pageSql);
return new PageResult<>(total, records);
}
@Override
public PageResult<Map<String, Object>> queryByPage(String sql, Object[] args, long offset, long size) {
int total = count(sql, args);
String pageSql = dbDialect.buildPaginationSql(sql, offset, size);
List<Map<String, Object>> records = jdbcTemplate.queryForList(pageSql, args);
return new PageResult<>(total, records);
}
@Override
public PageResult<Map<String, Object>> queryByPage(String sql, Map<String, Object> params, long offset, long size) {
int total = count(sql, params);
String pageSql = dbDialect.buildPaginationSql(sql, offset, size);
NamedParameterJdbcTemplate namedJdbcTemplate = new NamedParameterJdbcTemplate(jdbcTemplate);
List<Map<String, Object>> records = namedJdbcTemplate.queryForList(pageSql, params);
return new PageResult<>(total, records);
}
}

View File

@ -0,0 +1,105 @@
package com.czsj.common.database.query;
import com.czsj.common.database.cache.DefaultSqlCache;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import com.czsj.core.database.core.PageResult;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
public class CacheDbQueryFactoryBean extends AbstractDbQueryFactory {
/**
* 默认缓存5分钟
*/
private static long DEFAULT_EXPIRE = 5 * 60 * 1000;
private static DefaultSqlCache sqlCache = new DefaultSqlCache(100, DEFAULT_EXPIRE);
private <T> T putCacheValue(String key, T value, long ttl) {
sqlCache.put(key, value, ttl);
return value;
}
@Override
public List<DbColumn> getTableColumns(String dbName, String tableName) {
Object[] args = new Object[]{dbName, tableName};
Optional.ofNullable(sqlCache.get(sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":getTableColumns", args)));
return super.getTableColumns(dbName, tableName);
}
@Override
public List<DbTable> getTables(String dbName) {
Object[] args = new Object[]{dbName};
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":getTables", args);
return (List<DbTable>) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.getTables(dbName), DEFAULT_EXPIRE));
}
@Override
public int count(String sql) {
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, null);
return (int) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.count(sql), DEFAULT_EXPIRE));
}
@Override
public int count(String sql, Object[] args) {
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, args);
return (int) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.count(sql, args), DEFAULT_EXPIRE));
}
@Override
public int count(String sql, Map<String, Object> params) {
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, params.values().toArray());
return (int) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.count(sql, params), DEFAULT_EXPIRE));
}
@Override
public List<Map<String, Object>> queryList(String sql) {
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, null);
return (List<Map<String, Object>>) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.queryList(sql), DEFAULT_EXPIRE));
}
@Override
public List<Map<String, Object>> queryList(String sql, Object[] args) {
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, args);
return (List<Map<String, Object>>) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.queryList(sql, args), DEFAULT_EXPIRE));
}
@Override
public PageResult<Map<String, Object>> queryByPage(String sql, long offset, long size) {
Object[] args = new Object[]{offset, size};
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, args);
return (PageResult<Map<String, Object>>) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.queryByPage(sql, offset, size), DEFAULT_EXPIRE));
}
@Override
public PageResult<Map<String, Object>> queryByPage(String sql, Object[] args, long offset, long size) {
Object[] objects = Arrays.copyOf(args, args.length + 2);
objects[args.length] = offset;
objects[args.length + 1] = size;
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, objects);
return (PageResult<Map<String, Object>>) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.queryByPage(sql, args, offset, size), DEFAULT_EXPIRE));
}
@Override
public PageResult<Map<String, Object>> queryByPage(String sql, Map<String, Object> params, long offset, long size) {
Object[] args = params.values().toArray();
Object[] objects = Arrays.copyOf(args, args.length + 2);
objects[args.length] = offset;
objects[args.length + 1] = size;
String cacheKey = sqlCache.buildSqlCacheKey(super.dataSource.toString() + ":" + sql, objects);
return (PageResult<Map<String, Object>>) Optional.ofNullable(sqlCache.get(cacheKey))
.orElse(putCacheValue(cacheKey, super.queryByPage(sql, params, offset, size), DEFAULT_EXPIRE));
}
}

View File

@ -0,0 +1,5 @@
package com.czsj.common.database.query;
public class DefaultDbQueryFactoryBean extends AbstractDbQueryFactory {
}

View File

@ -0,0 +1,15 @@
package com.czsj.common.database.service;
import com.czsj.common.database.constants.DbQueryProperty;
public interface DataSourceFactory {
/**
* 创建数据源实例
*
* @param property
* @return
*/
public DbQuery createDbQuery(DbQueryProperty property);
}

View File

@ -0,0 +1,55 @@
package com.czsj.common.database.service;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import org.springframework.jdbc.core.RowMapper;
/**
* 表数据查询接口
*
* @author yuwei
* @since 2020-03-14
*/
public interface DbDialect {
RowMapper<DbTable> tableMapper();
RowMapper<DbColumn> columnMapper();
/**
* 获取指定表的所有列
*
* @param dbName
* @param tableName
* @return
*/
String columns(String dbName, String tableName);
/**
* 获取数据库下的 所有表
*
* @param dbName
* @return
*/
String tables(String dbName);
/**
* 构建 分页 sql
*
* @param sql
* @param offset
* @param count
* @return
*/
String buildPaginationSql(String sql, long offset, long count);
/**
* 包装 count sql
*
* @param sql
* @return
*/
String count(String sql);
}

View File

@ -0,0 +1,123 @@
package com.czsj.common.database.service;
import com.czsj.core.database.core.DbColumn;
import com.czsj.core.database.core.DbTable;
import com.czsj.core.database.core.PageResult;
import java.sql.Connection;
import java.util.List;
import java.util.Map;
/**
* 表数据查询接口
*
* @author yuwei
* @since 2020-03-14
*/
public interface DbQuery {
/**
* 获取数据库连接
*/
Connection getConnection();
/**
* 检测连通性
*/
boolean valid();
/**
* 关闭数据源
*/
void close();
/**
* 获取指定表 具有的所有字段列表
* @param dbName
* @param tableName
* @return
*/
List<DbColumn> getTableColumns(String dbName, String tableName);
/**
* 获取指定数据库下 所有的表信息
*
* @param dbName
* @return
*/
List<DbTable> getTables(String dbName);
/**
* 获取总数
*
* @param sql
* @return
*/
int count(String sql);
/**
* 获取总数带查询参数
*
* @param sql
* @return
*/
int count(String sql, Object[] args);
/**
* 获取总数带查询参数 NamedParameterJdbcTemplate
*
* @param sql
* @return
*/
int count(String sql, Map<String, Object> params);
/**
* 查询结果列表
*
* @param sql
* @return
*/
List<Map<String, Object>> queryList(String sql);
/**
* 查询结果列表带查询参数
*
* @param sql
* @param args
* @return
*/
List<Map<String, Object>> queryList(String sql, Object[] args);
/**
* 查询结果分页
*
* @param sql
* @param offset
* @param size
* @return
*/
PageResult<Map<String, Object>> queryByPage(String sql, long offset, long size);
/**
* 查询结果分页带查询参数
* @param sql
* @param args
* @param offset
* @param size
* @return
*/
PageResult<Map<String, Object>> queryByPage(String sql, Object[] args, long offset, long size);
/**
* 查询结果分页带查询参数 NamedParameterJdbcTemplate
* @param sql
* @param params
* @param offset
* @param size
* @return
*/
PageResult<Map<String, Object>> queryByPage(String sql, Map<String, Object> params, long offset, long size);
}

View File

@ -0,0 +1,46 @@
package com.czsj.common.database.utils;
import java.security.MessageDigest;
import java.util.Arrays;
public class MD5Util {
public static void main(String[] args) throws InterruptedException {
Object[] arr = new Object[]{"dbName"};
Object[] objects = Arrays.copyOf(arr, arr.length + 2);
System.out.println(objects.length);
int length = arr.length;
objects[length] = 1;
objects[length+1] = 2;
System.out.println(Arrays.toString(objects));
// String encrypt = MD5Util.encrypt("sql" + ":" + Arrays.toString(arr));
// System.out.println(encrypt);
}
private static final char[] HEX_CHARS = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
/**
* MD5加密
*/
public static String encrypt(String value){
return encrypt(value.getBytes());
}
/**
* MD5加密
*/
public static String encrypt(byte[] value){
try {
byte[] bytes = MessageDigest.getInstance("MD5").digest(value);
char[] chars = new char[32];
for (int i = 0; i < chars.length; i = i + 2) {
byte b = bytes[i / 2];
chars[i] = HEX_CHARS[(b >>> 0x4) & 0xf];
chars[i + 1] = HEX_CHARS[b & 0xf];
}
return new String(chars);
} catch (Exception e) {
throw new RuntimeException("md5 encrypt error", e);
}
}
}

View File

@ -0,0 +1,96 @@
package com.czsj.common.database.utils;
import com.aspose.words.IMailMergeDataSource;
import com.aspose.words.ref.Ref;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class MapMailMergeDataSource implements IMailMergeDataSource {
private List<Map<String, Object>> dataList;
private int index;
/**
* word模板中的«TableStart:tableName»«TableEnd:tableName»对应
*/
private String tableName = null;
/**
* @param dataList 数据集
* @param tableName 与模板中的Name对应
*/
public MapMailMergeDataSource(List<Map<String, Object>> dataList, String tableName) {
this.dataList = dataList;
this.tableName = tableName;
index = -1;
}
/**
* @param data 单个数据集
* @param tableName 与模板中的Name对应
*/
public MapMailMergeDataSource(Map<String, Object> data, String tableName) {
if (this.dataList == null) {
this.dataList = new ArrayList<Map<String, Object>>();
this.dataList.add(data);
}
this.tableName = tableName;
index = -1;
}
/**
* 获取结果集总数
*
* @return
*/
private int getCount() {
return this.dataList.size();
}
@Override
public IMailMergeDataSource getChildDataSource(String arg0)
throws Exception {
return null;
}
@Override
public String getTableName() throws Exception {
return this.tableName;
}
/**
* 实现接口
* 获取当前index指向数据行的数据
* 将数据存入args数组中即可
*
* @return ***返回false则不绑定数据***
*/
@Override
public boolean getValue(String key, Ref<Object> args) throws Exception {
if (index < 0 || index >= this.getCount()) {
return false;
}
if (args != null) {
args.set(this.dataList.get(index).get(key));
return true;
} else {
return false;
}
}
/**
* 实现接口
* 判断是否还有下一条记录
*/
@Override
public boolean moveNext() throws Exception {
index += 1;
if (index >= this.getCount()) {
return false;
}
return true;
}
}

View File

@ -0,0 +1,47 @@
package com.czsj.common.database.utils;
import com.aspose.words.Document;
import com.aspose.words.MailMerge;
import java.util.List;
import java.util.Map;
public class MergeDataSource {
/**
* word模板普通数据填充
* @param name
* @param value
* @param modelPath
* @return
* @throws Exception
*/
public Document load(String[] name, Object[] value, String modelPath) throws Exception {
Document doc = new Document(modelPath);
// 这里可以做特殊字段处理图片插入字符对应的特殊符号[https://wenku.baidu.com/view/81b41244336c1eb91a375dcb.html]
// DocumentBuilder builder = new DocumentBuilder(doc);
// builder.moveToMergeField(key);
// builder.insertImage((BufferedImage) value);
MailMerge merge = doc.getMailMerge();
merge.execute(name, value);
return doc;
}
/**
* word模板里有集合的表格填充
* @param name
* @param value
* @param modelPath
* @param dataList
* @return
* @throws Exception
*/
public Document load(String[] name, Object[] value, String modelPath, List<Map<String, Object>> dataList, String tableName) throws Exception {
Document doc = new Document(modelPath);
MailMerge merge = doc.getMailMerge();
merge.execute(name, value);
merge.executeWithRegions(new MapMailMergeDataSource(dataList, tableName));
return doc;
}
}

View File

@ -0,0 +1,557 @@
package com.czsj.common.database.utils;//package com.czsj.common.database.utils;
//
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.data.redis.core.RedisTemplate;
//import org.springframework.stereotype.Component;
//
//import java.util.Arrays;
//import java.util.List;
//import java.util.Map;
//import java.util.Set;
//import java.util.concurrent.TimeUnit;
//
///**
// * 定义常用的 Redis操作
// */
//@Component
//public class RedisService {
//
// @Autowired
// private RedisTemplate<String, Object> redisTemplate;
//
// /**
// * 指定缓存失效时间
// *
// * @param key
// * @param time 时间()
// * @return Boolean
// */
// public Boolean expire(String key, Long time) {
// try {
// if (time > 0) {
// redisTemplate.expire(key, time, TimeUnit.SECONDS);
// }
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 根据key获取过期时间
// *
// * @param key 不能为 null
// * @return 时间() 返回 0代表为永久有效
// */
// public Long getExpire(String key) {
// return redisTemplate.getExpire(key, TimeUnit.SECONDS);
// }
//
// /**
// * 判断 key是否存在
// *
// * @param key
// * @return true 存在 false不存在
// */
// public Boolean hasKey(String key) {
// try {
// return redisTemplate.hasKey(key);
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 删除缓存
// *
// * @param key 可以传一个值 或多个
// */
// public void del(String... key) {
// if (key != null && key.length > 0) {
// if (key.length == 1) {
// redisTemplate.delete(key[0]);
// } else {
// redisTemplate.delete(Arrays.asList(key));
// }
// }
// }
//
// /**
// * 普通缓存获取
// *
// * @param key
// * @return
// */
// public Object get(String key) {
// return key == null ? null : redisTemplate.opsForValue().get(key);
// }
//
// /**
// * 普通缓存放入
// *
// * @param key
// * @param value
// * @return true成功 false失败
// */
// public Boolean set(String key, Object value) {
// try {
// redisTemplate.opsForValue().set(key, value);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 普通缓存放入并设置时间
// *
// * @param key
// * @param value
// * @param time 时间() time要大于0 如果time小于等于0 将设置无限期
// * @return true成功 false 失败
// */
// public Boolean set(String key, Object value, Long time) {
// try {
// if (time > 0) {
// redisTemplate.opsForValue().set(key, value, time, TimeUnit.SECONDS);
// } else {
// set(key, value);
// }
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 递增
// *
// * @param key
// * @param delta 要增加几(大于0)
// * @return Long
// */
// public Long incr(String key, Long delta) {
// if (delta < 0) {
// throw new RuntimeException("递增因子必须大于0");
// }
// return redisTemplate.opsForValue().increment(key, delta);
// }
//
// /**
// * 递减
// *
// * @param key
// * @param delta 要减少几(小于0)
// * @return Long
// */
// public Long decr(String key, Long delta) {
// if (delta < 0) {
// throw new RuntimeException("递减因子必须大于0");
// }
// return redisTemplate.opsForValue().increment(key, -delta);
// }
//
// /**
// * HashGet
// *
// * @param key 不能为 null
// * @param item 不能为 null
// * @return
// */
// public Object hget(String key, String item) {
// return redisTemplate.opsForHash().get(key, item);
// }
//
// /**
// * 获取 hashKey对应的所有键值
// *
// * @param key
// * @return 对应的多个键值
// */
// public Map<Object, Object> hmget(String key) {
// return redisTemplate.opsForHash().entries(key);
// }
//
// /**
// * HashSet
// *
// * @param key
// * @param map 对应多个键值
// * @return true 成功 false 失败
// */
// public Boolean hmset(String key, Map<String, Object> map) {
// try {
// redisTemplate.opsForHash().putAll(key, map);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * HashSet 并设置时间
// *
// * @param key
// * @param map 对应多个键值
// * @param time 时间()
// * @return true成功 false失败
// */
// public Boolean hmset(String key, Map<String, Object> map, Long time) {
// try {
// redisTemplate.opsForHash().putAll(key, map);
// if (time > 0) {
// expire(key, time);
// }
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 向一张hash表中放入数据,如果不存在将创建
// *
// * @param key
// * @param item
// * @param value
// * @return true 成功 false失败
// */
// public Boolean hset(String key, String item, Object value) {
// try {
// redisTemplate.opsForHash().put(key, item, value);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 向一张hash表中放入数据,如果不存在将创建
// *
// * @param key
// * @param item
// * @param value
// * @param time 时间() 注意:如果已存在的hash表有时间,这里将会替换原有的时间
// * @return true 成功 false失败
// */
// public Boolean hset(String key, String item, Object value, Long time) {
// try {
// redisTemplate.opsForHash().put(key, item, value);
// if (time > 0) {
// expire(key, time);
// }
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 删除hash表中的值
// *
// * @param key 不能为 null
// * @param item 可以使多个不能为 null
// */
// public void hdel(String key, Object... item) {
// redisTemplate.opsForHash().delete(key, item);
// }
//
// /**
// * 判断hash表中是否有该项的值
// *
// * @param key 不能为 null
// * @param item 不能为 null
// * @return true 存在 false不存在
// */
// public Boolean hHasKey(String key, String item) {
// return redisTemplate.opsForHash().hasKey(key, item);
// }
//
// /**
// * hash递增 如果不存在,就会创建一个 并把新增后的值返回
// *
// * @param key
// * @param item
// * @param by 要增加几(大于0)
// * @return Double
// */
// public Double hincr(String key, String item, Double by) {
// return redisTemplate.opsForHash().increment(key, item, by);
// }
//
// /**
// * hash递减
// *
// * @param key
// * @param item
// * @param by 要减少记(小于0)
// * @return Double
// */
// public Double hdecr(String key, String item, Double by) {
// return redisTemplate.opsForHash().increment(key, item, -by);
// }
//
// /**
// * 根据 key获取 Set中的所有值
// *
// * @param key
// * @return Set
// */
// public Set<Object> sGet(String key) {
// try {
// return redisTemplate.opsForSet().members(key);
// } catch (Exception e) {
// e.printStackTrace();
// return null;
// }
// }
//
// /**
// * 根据value从一个set中查询,是否存在
// *
// * @param key
// * @param value
// * @return true 存在 false不存在
// */
// public Boolean sHasKey(String key, Object value) {
// try {
// return redisTemplate.opsForSet().isMember(key, value);
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 将数据放入set缓存
// *
// * @param key
// * @param values 可以是多个
// * @return 成功个数
// */
// public Long sSet(String key, Object... values) {
// try {
// return redisTemplate.opsForSet().add(key, values);
// } catch (Exception e) {
// e.printStackTrace();
// return 0L;
// }
// }
//
// /**
// * 将set数据放入缓存
// *
// * @param key
// * @param time 时间()
// * @param values 可以是多个
// * @return 成功个数
// */
// public Long sSetAndTime(String key, Long time, Object... values) {
// try {
// Long count = redisTemplate.opsForSet().add(key, values);
// if (time > 0)
// expire(key, time);
// return count;
// } catch (Exception e) {
// e.printStackTrace();
// return 0L;
// }
// }
//
// /**
// * 获取set缓存的长度
// *
// * @param key
// * @return Long
// */
// public Long sGetSetSize(String key) {
// try {
// return redisTemplate.opsForSet().size(key);
// } catch (Exception e) {
// e.printStackTrace();
// return 0L;
// }
// }
//
// /**
// * 移除值为value的
// *
// * @param key
// * @param values 可以是多个
// * @return 移除的个数
// */
// public Long setRemove(String key, Object... values) {
// try {
// return redisTemplate.opsForSet().remove(key, values);
// } catch (Exception e) {
// e.printStackTrace();
// return 0L;
// }
// }
//
// /**
// * 获取list缓存的内容
// *
// * @param key
// * @param start 开始
// * @param end 结束 0 -1代表所有值
// * @return List
// */
// public List<Object> lGet(String key, int start, int end) {
// try {
// return redisTemplate.opsForList().range(key, start, end);
// } catch (Exception e) {
// e.printStackTrace();
// return null;
// }
// }
//
// /**
// * 获取list缓存的长度
// *
// * @param key
// * @return Long
// */
// public Long lGetListSize(String key) {
// try {
// return redisTemplate.opsForList().size(key);
// } catch (Exception e) {
// e.printStackTrace();
// return 0L;
// }
// }
//
// /**
// * 通过索引 获取list中的值
// *
// * @param key
// * @param index 索引 index>=0时 0 表头1 第二个元素依次类推
// * index<0时-1表尾-2倒数第二个元素依次类推
// * @return Object
// */
// public Object lGetIndex(String key, Long index) {
// try {
// return redisTemplate.opsForList().index(key, index);
// } catch (Exception e) {
// e.printStackTrace();
// return null;
// }
// }
//
// /**
// * 将list放入缓存
// *
// * @param key
// * @param value
// * @return Boolean
// */
// public Boolean lSet(String key, Object value) {
// try {
// redisTemplate.opsForList().rightPush(key, value);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 将list放入缓存
// *
// * @param key
// * @param value
// * @param time 时间()
// * @return Boolean
// */
// public Boolean lSet(String key, Object value, Long time) {
// try {
// redisTemplate.opsForList().rightPush(key, value);
// if (time > 0)
// expire(key, time);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 将list放入缓存
// *
// * @param key
// * @param value
// * @return Boolean
// */
// public Boolean lSet(String key, List<Object> value) {
// try {
// redisTemplate.opsForList().rightPushAll(key, value);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 将list放入缓存
// *
// * @param key
// * @param value
// * @param time 时间()
// * @return Boolean
// */
// public Boolean lSet(String key, List<Object> value, Long time) {
// try {
// redisTemplate.opsForList().rightPushAll(key, value);
// if (time > 0)
// expire(key, time);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 根据索引修改list中的某条数据
// *
// * @param key
// * @param index 索引
// * @param value
// * @return Boolean
// */
// public Boolean lUpdateIndex(String key, Long index, Object value) {
// try {
// redisTemplate.opsForList().set(key, index, value);
// return true;
// } catch (Exception e) {
// e.printStackTrace();
// return false;
// }
// }
//
// /**
// * 移除N个值为value
// *
// * @param key
// * @param count 移除多少个
// * @param value
// * @return 移除的个数
// */
// public Long lRemove(String key, Long count, Object value) {
// try {
// return redisTemplate.opsForList().remove(key, count, value);
// } catch (Exception e) {
// e.printStackTrace();
// return 0L;
// }
// }
//}

View File

@ -0,0 +1,109 @@
package com.czsj.common.database.utils;
import com.czsj.core.database.core.DataRole;
import com.czsj.core.database.core.DataUser;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import java.util.List;
import java.util.stream.Collectors;
public class SecurityUtil {
/**
* 获取用户
*
* @return user
*/
public static DataUser getDataUser() {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
Object principal = authentication.getPrincipal();
if (principal instanceof DataUser) {
DataUser user = (DataUser) principal;
return user;
}
}
return null;
}
/**
* 获取用户ID
*
* @return id
*/
public static String getUserId() {
DataUser user = getDataUser();
if (user != null){
return user.getId();
}
return "";
}
/**
* 获取用户部门
*
* @return id
*/
public static String getUserDeptId() {
DataUser user = getDataUser();
if (user != null){
return user.getDept();
}
return "";
}
/**
* 获取用户名称
*
* @return username
*/
public static String getUserName() {
DataUser user = getDataUser();
if (user != null){
return user.getUsername();
}
return "";
}
/**
* 获取用户昵称
*
* @return nickname
*/
public static String getNickname() {
DataUser user = getDataUser();
if (user != null){
return user.getNickname();
}
return "";
}
/**
* 获取用户角色
*
* @return username
*/
public static List<String> getUserRoleIds() {
DataUser user = getDataUser();
if (user != null){
List<String> roles = user.getRoles().stream().map(DataRole::getId).collect(Collectors.toList());
return roles;
}
return null;
}
/**
* 获取用户
*
* @return user
*/
public static boolean isAdmin() {
DataUser user = getDataUser();
if (user != null){
return user.isAdmin();
}
return false;
}
}

View File

@ -0,0 +1,281 @@
package com.czsj.common.database.utils;
import com.aspose.words.*;
import java.awt.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
public class WordUtil {
private WordUtil() {}
private static volatile WordUtil instance;
public static WordUtil getInstance() {
if(instance == null) {
synchronized (WordUtil.class) {
if(instance == null) {
instance = new WordUtil();
}
}
}
return instance;
}
/**
* 去除水印
*/
static {
String license =
"<License>\n" +
" <Data>\n" +
" <Products>\n" +
" <Product>Aspose.Cells for Java</Product>\n" +
" <Product>Aspose.Words for Java</Product>\n" +
" <Product>Aspose.Slides for Java</Product>\n" +
" </Products>\n" +
" <EditionType>Enterprise</EditionType>\n" +
" <SubscriptionExpiry>20991231</SubscriptionExpiry>\n" +
" <LicenseExpiry>20991231</LicenseExpiry>\n" +
" <SerialNumber>8bfe198c-7f0c-4ef8-8ff0-acc3237bf0d7</SerialNumber>\n" +
" </Data>\n" +
" <Signature>datax</Signature>\n" +
"</License>";
try {
new License().setLicense(new ByteArrayInputStream(license.getBytes("UTF-8")));
} catch (Exception e) {}
}
/**
* 获取文档
*
* @param fileName 模板文件 F:\模板.docx
* @return
* @throws Exception
*/
public Document getDocument(String fileName) throws Exception {
return new Document(fileName);
}
/**
* 获取文档
*
* @param inputStream 模板文件输入流
* @return
* @throws Exception
*/
public Document getDocument(InputStream inputStream) throws Exception {
return new Document(inputStream);
}
/**
* 普通数据模板 返回缓冲输入流
*
* @param name
* @param value
* @param modelPath 模板文件 F:\模板.docx
* @return 缓冲输入流 供controller层下载
* @throws Exception
*/
public ByteArrayInputStream fillWordData(String[] name, Object[] value, String modelPath) throws Exception {
Document doc = new MergeDataSource().load(name, value, modelPath);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
doc.save(bos, SaveOptions.createSaveOptions(SaveFormat.DOCX));
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
return bis;
}
/**
* 普通数据模板 直接保存到指定位置
*
* @param name
* @param value
* @param modelPath 模板文件 F:\模板.docx
* @param destPath 保存文件 F:\测试.docx
* @throws Exception
*/
public void fillWordData(String[] name, Object[] value, String modelPath, String destPath) throws Exception {
Document doc = new MergeDataSource().load(name, value, modelPath);
doc.save(destPath, SaveOptions.createSaveOptions(SaveFormat.DOCX));
}
/**
* 带集合的数据模板 返回缓冲输入流
*
* @param name
* @param value
* @param modelPath 模板文件 F:\模板.docx
* @param dataList 集合数据
* @param tableName 集合名称
* @throws Exception
*/
public ByteArrayInputStream fillWordListData(String[] name, Object[] value, String modelPath, List<Map<String, Object>> dataList, String tableName) throws Exception {
Document doc = new MergeDataSource().load(name, value, modelPath, dataList, tableName);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
doc.save(bos, SaveOptions.createSaveOptions(SaveFormat.DOCX));
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
return bis;
}
/**
* 带集合的数据模板 直接保存到指定位置
*
* @param name
* @param value
* @param modelPath 模板文件 F:\模板.docx
* @param destPath 保存文件 F:\测试.docx
* @param dataList 集合数据
* @param tableName 集合名称
* @throws Exception
*/
public void fillWordListData(String[] name, Object[] value, String modelPath, String destPath, List<Map<String, Object>> dataList, String tableName) throws Exception {
Document doc = new MergeDataSource().load(name, value, modelPath, dataList, tableName);
doc.save(destPath, SaveOptions.createSaveOptions(SaveFormat.DOCX));
}
/**
* word转pdf
* @param srcPath 文件路径 F:\\test\\审批流提交.docx
* @param destPath 目标路径 F:\\test\\20200420.pdf
* @throws Exception
*/
public void word2pdf(String srcPath, String destPath) throws Exception {
// 转换开始前时间
long old = System.currentTimeMillis();
// 要转换的word文档的路径
Document doc = new Document(srcPath);
// 全面支持DOC, DOCX, OOXML, RTF HTML, OpenDocument, PDF, EPUB, XPS, SWF 相互转换
doc.save(destPath, SaveOptions.createSaveOptions(SaveFormat.PDF));
// 转换结束后时间
long now = System.currentTimeMillis();
System.out.println("共耗时:" + ((now - old) / 1000.0) + "");
}
/**
* 创建空文档
*
* @param destPath 文件路径 F:\\test\\审批流提交.docx
* @return
*/
public void createWord(String destPath) throws Exception {
Document doc = new Document();
doc.save(destPath, SaveOptions.createSaveOptions(SaveFormat.DOCX));
}
/**
* 加水印方法
*
* @param doc word文件流
* @param watermarkText 水印内容
*/
public void insertWatermarkText(Document doc, String watermarkText) throws Exception {
Shape watermark = new Shape(doc, ShapeType.TEXT_PLAIN_TEXT);
watermark.setName("WaterMark");
watermark.getTextPath().setText(watermarkText);
watermark.getTextPath().setFontFamily("Arial");
watermark.setWidth(500);
watermark.setHeight(100);
watermark.setRotation(-40);
watermark.getFill().setColor(Color.GRAY);
watermark.setStrokeColor(Color.GRAY);
watermark.setRelativeHorizontalPosition(RelativeHorizontalPosition.PAGE);
watermark.setRelativeVerticalPosition(RelativeVerticalPosition.PAGE);
watermark.setWrapType(WrapType.NONE);
watermark.setVerticalAlignment(VerticalAlignment.CENTER);
watermark.setHorizontalAlignment(HorizontalAlignment.CENTER);
Paragraph watermarkPara = new Paragraph(doc);
watermarkPara.appendChild(watermark);
for (Section sect : doc.getSections()) {
insertWatermarkIntoHeader(watermarkPara, sect, HeaderFooterType.HEADER_PRIMARY);
insertWatermarkIntoHeader(watermarkPara, sect, HeaderFooterType.HEADER_FIRST);
insertWatermarkIntoHeader(watermarkPara, sect, HeaderFooterType.HEADER_EVEN);
}
}
private void insertWatermarkIntoHeader(Paragraph watermarkPara, Section sect, int headerType) throws Exception {
HeaderFooter header = sect.getHeadersFooters().getByHeaderFooterType(headerType);
if (header == null) {
header = new HeaderFooter(sect.getDocument(), headerType);
sect.getHeadersFooters().add(header);
}
header.appendChild(watermarkPara.deepClone(true));
}
public static void main(String[] args) throws Exception {
// Map<String, Object> map = new HashMap<>();
// map.put("companyName", "测试");
// map.put("totalSalary", new BigDecimal("12.34"));
// List<Map<String, Object>> list = new ArrayList<>();
// Map<String, Object> map1 = new HashMap<>();
// map1.put("id", "1");
// map1.put("name", "测试1");
// map1.put("age", 12);
// map1.put("sex", "");
// map1.put("salary", new BigDecimal("5.0"));
// list.add(map1);
// Map<String, Object> map2 = new HashMap<>();
// map2.put("id", "2");
// map2.put("name", "测试2");
// map2.put("age", 14);
// map2.put("sex", "");
// map2.put("salary", new BigDecimal("7.34"));
// list.add(map2);
// List<String> objects1 = new ArrayList<>();
// List<Object> objects2 = new ArrayList<>();
// for(Map.Entry<String, Object> entry : map.entrySet()){
// objects1.add(entry.getKey());
// objects2.add(entry.getValue());
// }
// WordUtil.getInstance().fillWordListData(objects1.toArray(new String[objects1.size()]), objects2.toArray(new Object[objects2.size()]), "F:\\test\\模板.docx", "F:\\test\\123.docx", list, "workerList");
// WordUtil.getInstance().word2pdf("F:\\test.docx", "F:\\20200420.pdf");
//
// // 用户表子表 TableStart:UserList TableEnd:UserList
// DataTable userTable = new DataTable("UserList");
// userTable.getColumns().add("id");
// userTable.getColumns().add("name");
// userTable.getColumns().add("age");
// userTable.getColumns().add("sex");
// userTable.getColumns().add("salary");
// for (int i = 1; i < 3; i++) {
// DataRow row = userTable.newRow();
// row.set(0, i);
// row.set(1, "name" + i);
// row.set(2, "age" + i);
// row.set(3, "sex" + i);
// row.set(4, "salary" + i);
// userTable.getRows().add(row);
// }
// // 分数表子表 TableStart:ScoreList TableEnd:ScoreList
// DataTable scoreTable = new DataTable("ScoreList");
// scoreTable.getColumns().add("id");
// scoreTable.getColumns().add("uid");
// scoreTable.getColumns().add("score");
// for (int i = 1; i < 3; i++) {
// DataRow row = scoreTable.newRow();
// row.set(0, i);
// row.set(1, i);
// row.set(2, 10*i);
// scoreTable.getRows().add(row);
// }
// // 提供数据源
// DataSet dataSet = new DataSet();
// dataSet.getTables().add(userTable);
// dataSet.getTables().add(scoreTable);
// DataRelation dataRelation = new DataRelation("UserScoreRelation", userTable.getColumns().get("id"), scoreTable.getColumns().get("uid"));
// dataSet.getRelations().add(dataRelation);
// // 合并模版
// Document doc = new Document("F:\\test.docx");
// //提供数据源
// String[] fieldNames = new String[] {"name", "address"};
// Object[] fieldValues = new Object[] {"张三", "陕西咸阳"};
// //合并模版相当于页面的渲染
// MailMerge mailMerge = doc.getMailMerge();
// mailMerge.execute(fieldNames, fieldValues);
// mailMerge.executeWithRegions(dataSet);
// doc.save("F:\\test_r.docx", SaveOptions.createSaveOptions(SaveFormat.DOCX));
}
}

View File

@ -0,0 +1,552 @@
package com.czsj.common.datasource;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.dtstack.dtcenter.loader.client.ClientCache;
import com.dtstack.dtcenter.loader.client.IClient;
import com.dtstack.dtcenter.loader.dto.SqlQueryDTO;
import com.dtstack.dtcenter.loader.dto.source.ClickHouseSourceDTO;
import com.dtstack.dtcenter.loader.dto.source.OracleSourceDTO;
import com.dtstack.dtcenter.loader.source.DataSourceType;
import com.czsj.common.domian.CkTable;
import com.czsj.common.domian.Column;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ClickhouseSource {
private static Logger logger = LoggerFactory.getLogger(ClickhouseSource.class);
private static final String SQL = "SELECT * FROM ";// 数据库操作
private static PreparedStatement pst = null;// 事务对象
public static Connection getconn(String url, String username, String password) {
ClickHouseSourceDTO sourceDTO = ClickHouseSourceDTO.builder()
.url(url)
.username(username)
.password(password)
.build();
//获取连接
IClient client = ClientCache.getClient(DataSourceType.Clickhouse.getVal());
Boolean isConnected = client.testCon(sourceDTO);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
return client.getCon(sourceDTO);
}
public static Map<String,Object> getconns(String url, String username, String password) {
Map<String,Object> map = new HashMap<>();
ClickHouseSourceDTO sourceDTO = ClickHouseSourceDTO.builder()
.url(url)
.username(username)
.password(password)
.build();
//获取连接
IClient client = ClientCache.getClient(DataSourceType.Clickhouse.getVal());
Boolean isConnected = client.testCon(sourceDTO);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
map.put("client",client);
map.put("source",sourceDTO);
return map;
}
//关闭数据库连接
public static void CloseCon(Connection con) throws Exception {
if (con != null) {
con.close();
logger.info("已断开与数据库的连接!");
}
}
public static boolean cttable(Connection conn, String sql) {
try {
Statement state = conn.createStatement();
state.executeUpdate(sql);
return true;
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
}
/**
* 自定义查询
*/
public static List<Map<String, Object>> customQuery(IClient client, ClickHouseSourceDTO source, String sql,List<String> parameters) {
// 预编译查询
List<Object> preFields = new ArrayList<>();
if(parameters!=null&&parameters.size()>0){
for(String parameter : parameters){
preFields.add(parameter);
}
}
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).preFields(preFields).build();
List<Map<String, Object>> result = client.executeQuery(source, queryDTO);
return result;
}
/**
* 自定义查询
*/
public static List<Map<String, Object>> customQuery(IClient client, OracleSourceDTO source, String sql) {
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
List<Map<String, Object>> result = client.executeQuery(source, queryDTO);
return result;
}
/**
* 修改CK数据库的字段名
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 当前字段名
* @param toname 修改字段名
* @return
*/
public static boolean updateCkFieldName(Connection conn, String colonystate, String colonyname, String database, String table, String name, String toname) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " RENAME COLUMN " + name + " TO " + toname + ";");
state.executeUpdate("ALTER TABLE " + database + "." + table + " RENAME COLUMN " + name + " TO " + toname + ";");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " RENAME COLUMN " + name + " TO " + toname);
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " RENAME COLUMN " + name + " TO " + toname + ";");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch block
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 修改CK数据库的字段类型
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 当前字段名
* @param type 字段类型
* @return
*/
public static boolean updateCkFieldType(Connection conn, String colonystate, String colonyname, String database, String table, String name, String type) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " modify COLUMN " + name + " " + type + ";");
state.executeUpdate("ALTER TABLE " + database + "." + table + " modify COLUMN " + name + " " + type + ";");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " modify COLUMN " + name + " " + type + ";");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " modify COLUMN " + name + " " + type + ";");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch block
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 修改CK数据库的注释
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 字段名
* @param comment 注释
* @return
*/
public static boolean updateCkFieldComment(Connection conn, String colonystate, String colonyname, String database, String table, String name, String comment) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " COMMENT COLUMN " + name + " '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + " COMMENT COLUMN " + name + " '" + comment + "'" + " ;");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " COMMENT COLUMN " + name + " '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " COMMENT COLUMN " + name + " '" + comment + "'" + " ;");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch block
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 添加CK数据库的字段
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 字段名
* @param type 字段类型
* @param comment 注释
* @return
*/
public static boolean insertCkField(Connection conn, String colonystate, String colonyname, String database, String table, String name, String type, String comment) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " ADD COLUMN " + name + " " + type + " comment '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + " ADD COLUMN " + name + " " + type + " comment '" + comment + "'" + " ;");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " ADD COLUMN " + name + " " + type + " comment '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " ADD COLUMN " + name + " " + type + " comment '" + comment + "'" + " ;");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch block
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 删除CK数据库的字段
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 字段名
* @return
*/
public static boolean deleteCkField(Connection conn, String colonystate, String colonyname, String database, String table, String name) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " DROP COLUMN " + name + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + " DROP COLUMN " + name + " ;");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " DROP COLUMN " + name + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " DROP COLUMN " + name + " ;");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch block
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 获取表中字段的所有注释
*
* @param tableName
* @return
*/
public static List<Column> getCkColumnComment(Connection conn, String tableName, String database) {
List<String> columnTypes = new ArrayList<>();
//与数据库的连接
PreparedStatement pStemt = null;
String tableSql = SQL + tableName;
List<Column> columnComments = new ArrayList<>();//列名注释集合
ResultSet rs = null;
try {
pStemt = conn.prepareStatement(tableSql);
rs = pStemt.executeQuery("select table,name,type,comment from `system`.columns where table ='" + tableName + "' and database = '" + database + "'");
while (rs.next()) {
Column Column = new Column();
Column.setName(rs.getString("name"));
Column.setType(rs.getString("type"));
Column.setComment(rs.getString("comment"));
columnComments.add(Column);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (rs != null) {
try {
rs.close();
CloseCon(conn);
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return columnComments;
}
/**
* 获取库中所有表名
*
* @param baseName
* @return
*/
public static List<CkTable> getCkColumnComments(Connection conn, String baseName) {
List<String> columnTypes = new ArrayList<>();
PreparedStatement pStemt = null;
String tableSql = SQL + baseName;
List<CkTable> columnComments = new ArrayList<>();//列名注释集合
ResultSet rs = null;
try {
pStemt = conn.prepareStatement(tableSql);
rs = pStemt.executeQuery("select database,name,create_table_query from `system`.tables where database = '" + baseName + "'");
while (rs.next()) {
CkTable CkTable = new CkTable();
CkTable.setDatabase(rs.getString("database"));
CkTable.setTableName(rs.getString("name"));
CkTable.setCreateTableQuery(rs.getString("create_table_query"));
columnComments.add(CkTable);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (rs != null) {
try {
rs.close();
CloseCon(conn);
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return columnComments;
}
/**
* 執行sql查詢
*
* @auther xinjingczsj
* @time 2022年7月20日
*/
public static Map<String, Object> executeQuerySql(Connection conn, String sql, int pageNum, int pageSize) throws Exception {
if (pageNum == 1) {
pageNum = 0;
}
int start = pageNum * pageSize;
StringBuilder sql1 = new StringBuilder();// 拼接sql
sql1.append(sql + "\n");
sql1.append("limit " + start + "," + pageSize + "\n");
System.out.println(sql1.toString());
pst = conn.prepareStatement(sql1.toString());
ResultSet result = pst.executeQuery();// 查询结果
ResultSetMetaData rsmd = result.getMetaData();
JSONArray tableTitle = new JSONArray();// 表格头
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
JSONObject tableTitle_Th = new JSONObject();// 表格头单元格
tableTitle_Th.put("columnname", rsmd.getColumnName(i));// 字段名
tableTitle_Th.put("tablename", rsmd.getTableName(i));// 表名
tableTitle_Th.put("columnclassname", rsmd.getColumnClassName(i));// JAVA_数据类型
tableTitle_Th.put("columntypename", rsmd.getColumnTypeName(i) + "(" + rsmd.getColumnDisplaySize(i) + ")");// DB_数据类型
tableTitle.add(tableTitle_Th);// 保存到数组
}
JSONObject table = new JSONObject();// 所有查詢的數據
JSONArray tableBody = new JSONArray();// 表格内容
while (result.next()) {
JSONArray tableRow = new JSONArray();// 表内容单元格
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
String classname = rsmd.getColumnClassName(i);// 数据类型
switch (classname) {
case "java.math.BigDecimal": {
tableRow.add(result.getBigDecimal(i));
break;
}
case "java.lang.Boolean": {
tableRow.add(result.getBoolean(i));
break;
}
case "java.lang.Byte": {
tableRow.add(result.getByte(i));
break;
}
case "java.util.Date": {
Date date = result.getDate(i);
String time = "";
if (date != null) {
time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
tableRow.add(time);
break;
}
case "java.sql.Date": {
Date date = result.getDate(i);
String time = "";
if (date != null) {
time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
tableRow.add(time);
break;
}
case "java.lang.Double": {
tableRow.add(result.getDouble(i));
break;
}
case "java.lang.Float": {
tableRow.add(result.getFloat(i));
break;
}
case "java.lang.Integer": {
tableRow.add(result.getInt(i));
break;
}
case "java.lang.Long": {
tableRow.add(result.getLong(i));
break;
}
case "java.lang.String": {
tableRow.add(result.getString(i));
break;
}
case "java.sql.Timestamp": {
String str = "9999-12-12 00:00:00";
if (!"".equals(result.getString(i)) && result.getString(i) != null) {
str = result.getString(i);
}
tableRow.add(stampToDate(Double.valueOf(dateToStamp(str))));
break;
}
case "java.math.BigInteger": {
tableRow.add(result.getBigDecimal(i));
break;
}
default:
tableRow.add(result.getString(i));
}
}
tableBody.add(tableRow);
}
table.put("tableTitle", tableTitle);
table.put("tableBody", tableBody);
Map<String, Object> remap = new HashMap<>();
JSONArray tableTitle1 = table.getJSONArray("tableTitle");
JSONArray tableBody1 = table.getJSONArray("tableBody");
List<Map<Object, Object>> list = new ArrayList<>();
List<Map<String, Object>> Datalist = new ArrayList<>();
Object[] str = new String[tableTitle1.size()];
for (int j = 0; j < tableTitle1.size(); j++) {
JSONObject tableTitle_Th = (JSONObject) tableTitle1.get(j);
Map<String, Object> tableData = new HashMap<>();
tableData.put("dataItem", tableTitle_Th.get("columnname"));
Datalist.add(tableData);
str[j] = tableTitle_Th.get("columnname");
System.out.print(tableTitle_Th.get("columnname") + "\t");
}
System.out.println("\n------------------------------------------------------------------------------------------------------------------------");
for (Object o : tableBody1) {
Map<Object, Object> tables = new HashMap<>();
JSONArray row = (JSONArray) o;
for (int j = 0; j < row.size(); j++) {
tables.put(str[j], row.get(j));
System.out.print(row.get(j) + "\t");
}
list.add(tables);
System.out.println();
}
Long x = executeQuerySqlNum(conn, sql);
remap.put("tables", list);
remap.put("tableData", Datalist);
remap.put("total", x);
return remap;
}
/**
* 查詢數據縂數量
*
* @auther xinjingczsj
* @time 2022年7月20日
*/
public static Long executeQuerySqlNum(Connection conn, String sql) throws Exception {
StringBuilder sql1 = new StringBuilder();// 拼接sql
sql1.append("select count(*) statistics from (");
sql1.append(sql);
sql1.append(")");
pst = conn.prepareStatement(sql1.toString());
ResultSet result = pst.executeQuery();// 查询结果
Long i = 0L;
while (result.next()) {
i = result.getLong("statistics");
}
conn.close();// 关闭
return i;
}
public static String stampToDate(Double time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String time_Date = sdf.format(new java.util.Date((long) (time * 1000L)));
return time_Date;
}
/*
* 将时间转换为时间戳
*/
public static String dateToStamp(String time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String stamp = "";
if (!"".equals(time)) {//时间不为空
try {
stamp = String.valueOf(sdf.parse(time).getTime() / 1000);
} catch (Exception e) {
System.out.println("参数为空!");
}
} else { //时间为空
long current_time = System.currentTimeMillis(); //获取当前时间
stamp = String.valueOf(current_time / 1000);
}
return stamp;
}
}

View File

@ -0,0 +1,84 @@
package com.czsj.common.datasource;
import com.dtstack.dtcenter.loader.cache.pool.config.PoolConfig;
import com.dtstack.dtcenter.loader.client.ClientCache;
import com.dtstack.dtcenter.loader.client.IClient;
import com.dtstack.dtcenter.loader.dto.SqlQueryDTO;
import com.dtstack.dtcenter.loader.dto.source.HiveSourceDTO;
import com.dtstack.dtcenter.loader.source.DataSourceType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.HashMap;
import java.util.Map;
public class HiveSourse {
private static Logger logger = LoggerFactory.getLogger(HiveSourse.class);
private static final String SQL = "SELECT * FROM ";// 数据库操作
private static PreparedStatement pst = null;// 事务对象
public static Connection getconn(String url, String username, String password,String schema,String defaultFS,String config){
HiveSourceDTO sourceDTO = HiveSourceDTO.builder()
.url(url)
.username(username)
.password(password)
.schema(schema)
.defaultFS(defaultFS)
.config(config)
.poolConfig(PoolConfig.builder().build())
.build();
IClient client = ClientCache.getClient(DataSourceType.HIVE.getVal());
Boolean isConnected = client.testCon(sourceDTO);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
return client.getCon(sourceDTO);
}
public static Map<String,Object> getconns(String url, String username, String password,String schema,String defaultFS,String config) {
Map<String,Object> map = new HashMap<>();
HiveSourceDTO sourceDTO = HiveSourceDTO.builder()
.url(url)
.username(username)
.password(password)
.schema(schema)
.defaultFS(defaultFS)
.config(config)
.poolConfig(PoolConfig.builder().build())
.build();
IClient client = ClientCache.getClient(DataSourceType.HIVE.getVal());
Boolean isConnected = client.testCon(sourceDTO);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
map.put("client",client);
map.put("source",sourceDTO);
return map;
}
//关闭数据库连接
public static void CloseCon(Connection con) throws Exception {
if (con != null) {
con.close();
logger.info("已断开与数据库的连接!");
}
}
/**
* 无结果查询
*/
public static Boolean executeSqlWithoutResultSet(IClient client, HiveSourceDTO source, String sql) {
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("自定义sql执行" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return aBoolean;
}
}

View File

@ -0,0 +1,551 @@
package com.czsj.common.datasource;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.dtstack.dtcenter.loader.cache.pool.config.PoolConfig;
import com.dtstack.dtcenter.loader.client.ClientCache;
import com.dtstack.dtcenter.loader.client.IClient;
import com.dtstack.dtcenter.loader.dto.SqlQueryDTO;
import com.dtstack.dtcenter.loader.dto.source.Mysql8SourceDTO;
import com.dtstack.dtcenter.loader.source.DataSourceType;
import com.czsj.common.domian.Column;
import com.czsj.common.domian.MySqlTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MySqlSource {
private static Logger logger = LoggerFactory.getLogger(MySqlSource.class);
private static final String SQL = "SELECT * FROM ";// 数据库操作
private static PreparedStatement pst = null;// 事务对象
public static Connection getconn(String url, String username, String password) {
Mysql8SourceDTO source = Mysql8SourceDTO.builder()
.url(url)
.username(username)
.password(password)
.poolConfig(PoolConfig.builder().build())
.build();
//获取连接
IClient client = ClientCache.getClient(DataSourceType.MySQL8.getVal());
Boolean isConnected = client.testCon(source);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
return client.getCon(source);
}
public static Map<String,Object> getconns(String url, String username, String password) {
Map<String,Object> map = new HashMap<>();
Mysql8SourceDTO source = Mysql8SourceDTO.builder()
.url(url)
.username(username)
.password(password)
.poolConfig(PoolConfig.builder().build())
.build();
//获取连接
IClient client = ClientCache.getClient(DataSourceType.MySQL8.getVal());
Boolean isConnected = client.testCon(source);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
map.put("client",client);
map.put("source",source);
return map;
}
//关闭数据库连接
public static void CloseCon(Connection con) throws Exception {
if (con != null) {
con.close();
logger.info("已断开与数据库的连接!");
}
}
public static boolean cttable(Connection conn, String sql) {
try {
Statement state = conn.createStatement();
state.executeUpdate(sql);
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch bloMySql
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 自定义查询
*/
public static List<Map<String, Object>> customQuery(IClient client, Mysql8SourceDTO source, String sql,List<String> parameters) {
// 预编译查询
List<Object> preFields = new ArrayList<>();
if(parameters!=null&&parameters.size()>0){
for(String parameter : parameters){
preFields.add(parameter);
}
}
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).preFields(preFields).build();
List<Map<String, Object>> result = client.executeQuery(source, queryDTO);
return result;
}
/**
* 修改MySql数据库的字段名
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 当前字段名
* @param toname 修改字段名
* @return
*/
public static boolean updateMySqlFieldName(Connection conn, String colonystate, String colonyname, String database, String table, String name, String toname) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " CHANGE " + name + " TO " + toname + ";");
state.executeUpdate("ALTER TABLE " + database + "." + table + " CHANGE " + name + " TO " + toname + ";");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " CHANGE " + name + " TO " + toname);
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " CHANGE " + name + " TO " + toname + ";");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch bloMySql
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 修改MySql数据库的字段类型
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 当前字段名
* @param type 字段类型
* @return
*/
public static boolean updateMySqlFieldType(Connection conn, String colonystate, String colonyname, String database, String table, String name, String type) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " modify " + name + " " + type + ";");
state.executeUpdate("ALTER TABLE " + database + "." + table + " modify " + name + " " + type + ";");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " modify " + name + " " + type + ";");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " modify " + name + " " + type + ";");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch bloMySql
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 修改MySql数据库的注释
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 字段名
* @param comment 注释
* @return
*/
public static boolean updateMySqlFieldComment(Connection conn, String colonystate, String colonyname, String database, String table, String name,String type, String comment) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " modify column " + name + " " + type + "comment '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + " modify column " + name + " " + type +" comment '" + comment + "'" + " ;");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " modify column " + name + " " + type + "comment '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " modify column " + name + " " + type + "comment '" + comment + "'" + " ;");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch bloMySql
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 添加MySql数据库的字段
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 字段名
* @param type 字段类型
* @param comment 注释
* @return
*/
public static boolean insertMySqlField(Connection conn, String colonystate, String colonyname, String database, String table, String name, String type, String comment) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " ADD " + name + " " + type + " comment '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + " ADD " + name + " " + type + " comment '" + comment + "'" + " ;");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " ADD " + name + " " + type + " comment '" + comment + "'" + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " ADD " + name + " " + type + " comment '" + comment + "'" + " ;");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch bloMySql
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 删除MySql数据库的字段
*
* @param conn 数据库连接对象
* @param colonystate 是否是集群
* @param colonyname 集群名 没有填null
* @param database 库名
* @param table 表名
* @param name 字段名
* @return
*/
public static boolean deleteMySqlField(Connection conn, String colonystate, String colonyname, String database, String table, String name) {
try {
Statement state = conn.createStatement();
if ("0".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + " DROP COLUMN " + name + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + " DROP COLUMN " + name + " ;");
} else if ("1".equals(colonystate)) {
System.out.println("执行语句:" + "ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " DROP COLUMN " + name + " ;");
state.executeUpdate("ALTER TABLE " + database + "." + table + "on cluster" + colonyname + " DROP COLUMN " + name + " ;");
} else {
return false;
}
System.out.println("操作成功!");
return true;
} catch (SQLException e) {
// TODO Auto-generated catch bloMySql
System.out.println("操作失败!");
e.printStackTrace();
return false;
}
}
/**
* 获取表中字段的所有注释
*
* @param tableName
* @return
*/
public static List<Column> getMySqlColumnComment(Connection conn, String tableName, String database) {
List<String> columnTypes = new ArrayList<>();
//与数据库的连接
PreparedStatement pStemt = null;
String tableSql = SQL + tableName;
List<Column> columnComments = new ArrayList<>();//列名注释集合
ResultSet rs = null;
try {
pStemt = conn.prepareStatement(tableSql);
rs = pStemt.executeQuery("select COLUMN_NAME,DATA_TYPE,COLUMN_COMMENT ,NUMERIC_PRECISION ,NUMERIC_SCALE ,CHARACTER_MAXIMUM_LENGTH from information_schema.columns where table_schema ='" + database + "' and table_name = '" + tableName + "'" + "ORDER BY ORDINAL_POSITION ");
while (rs.next()) {
Column Column = new Column();
Column.setName(rs.getString("COLUMN_NAME"));
Column.setType(rs.getString("DATA_TYPE"));
Column.setComment(rs.getString("COLUMN_COMMENT"));
Column.setDataPrecision(rs.getString("NUMERIC_PRECISION"));
Column.setDataScale(rs.getString("NUMERIC_SCALE"));
Column.setCharLength(rs.getString("CHARACTER_MAXIMUM_LENGTH"));
columnComments.add(Column);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (rs != null) {
try {
rs.close();
CloseCon(conn);
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return columnComments;
}
/**
* 获取库中所有表名
*
* @param baseName
* @return
*/
public static List<MySqlTable> getMySqlColumnComments(Connection conn, String baseName) {
List<String> columnTypes = new ArrayList<>();
PreparedStatement pStemt = null;
String tableSql = SQL + baseName;
List<MySqlTable> columnComments = new ArrayList<>();//列名注释集合
ResultSet rs = null;
try {
pStemt = conn.prepareStatement(tableSql);
rs = pStemt.executeQuery("SELECT table_name , table_comment FROM information_schema.TABLES WHERE table_schema = '" + baseName + "'");
while (rs.next()) {
MySqlTable MySqlTable = new MySqlTable();
MySqlTable.setDatabase(baseName);
MySqlTable.setTableName(rs.getString("table_name"));
MySqlTable.setComment(rs.getString("table_comment"));
columnComments.add(MySqlTable);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (rs != null) {
try {
rs.close();
CloseCon(conn);
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return columnComments;
}
/**
* 執行sql查詢
*
* @auther xinjingczsj
* @time 2022年7月20日
*/
public static Map<String, Object> executeQuerySql(Connection conn, String sql, int pageNum, int pageSize) throws Exception {
if (pageNum == 1) {
pageNum = 0;
}
int start = pageNum * pageSize;
StringBuilder sql1 = new StringBuilder();// 拼接sql
sql1.append(sql + "\n");
sql1.append("limit " + start + "," + pageSize + "\n");
System.out.println(sql1.toString());
pst = conn.prepareStatement(sql1.toString());
ResultSet result = pst.executeQuery();// 查询结果
ResultSetMetaData rsmd = result.getMetaData();
JSONArray tableTitle = new JSONArray();// 表格头
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
JSONObject tableTitle_Th = new JSONObject();// 表格头单元格
tableTitle_Th.put("columnname", rsmd.getColumnName(i));// 字段名
tableTitle_Th.put("tablename", rsmd.getTableName(i));// 表名
tableTitle_Th.put("columnclassname", rsmd.getColumnClassName(i));// JAVA_数据类型
tableTitle_Th.put("columntypename", rsmd.getColumnTypeName(i) + "(" + rsmd.getColumnDisplaySize(i) + ")");// DB_数据类型
tableTitle.add(tableTitle_Th);// 保存到数组
}
JSONObject table = new JSONObject();// 所有查詢的數據
JSONArray tableBody = new JSONArray();// 表格内容
while (result.next()) {
JSONArray tableRow = new JSONArray();// 表内容单元格
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
String classname = rsmd.getColumnClassName(i);// 数据类型
switch (classname) {
case "java.math.BigDecimal": {
tableRow.add(result.getBigDecimal(i));
break;
}
case "java.lang.Boolean": {
tableRow.add(result.getBoolean(i));
break;
}
case "java.lang.Byte": {
tableRow.add(result.getByte(i));
break;
}
case "java.util.Date": {
Date date = result.getDate(i);
String time = "";
if (date != null) {
time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
tableRow.add(time);
break;
}
case "java.sql.Date": {
Date date = result.getDate(i);
String time = "";
if (date != null) {
time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
tableRow.add(time);
break;
}
case "java.lang.Double": {
tableRow.add(result.getDouble(i));
break;
}
case "java.lang.Float": {
tableRow.add(result.getFloat(i));
break;
}
case "java.lang.Integer": {
tableRow.add(result.getInt(i));
break;
}
case "java.lang.Long": {
tableRow.add(result.getLong(i));
break;
}
case "java.lang.String": {
tableRow.add(result.getString(i));
break;
}
case "java.sql.Timestamp": {
String str = "9999-12-12 00:00:00";
if (!"".equals(result.getString(i)) && result.getString(i) != null) {
str = result.getString(i);
}
tableRow.add(stampToDate(Double.valueOf(dateToStamp(str))));
break;
}
case "java.math.BigInteger": {
tableRow.add(result.getBigDecimal(i));
break;
}
default:
tableRow.add(result.getString(i));
}
}
tableBody.add(tableRow);
}
table.put("tableTitle", tableTitle);
table.put("tableBody", tableBody);
Map<String, Object> remap = new HashMap<>();
JSONArray tableTitle1 = table.getJSONArray("tableTitle");
JSONArray tableBody1 = table.getJSONArray("tableBody");
List<Map<Object, Object>> list = new ArrayList<>();
List<Map<String, Object>> Datalist = new ArrayList<>();
Object[] str = new String[tableTitle1.size()];
for (int j = 0; j < tableTitle1.size(); j++) {
JSONObject tableTitle_Th = (JSONObject) tableTitle1.get(j);
Map<String, Object> tableData = new HashMap<>();
tableData.put("dataItem", tableTitle_Th.get("columnname"));
Datalist.add(tableData);
str[j] = tableTitle_Th.get("columnname");
System.out.print(tableTitle_Th.get("columnname") + "\t");
}
System.out.println("\n------------------------------------------------------------------------------------------------------------------------");
for (Object o : tableBody1) {
Map<Object, Object> tables = new HashMap<>();
JSONArray row = (JSONArray) o;
for (int j = 0; j < row.size(); j++) {
tables.put(str[j], row.get(j));
System.out.print(row.get(j) + "\t");
}
list.add(tables);
System.out.println();
}
Long x = executeQuerySqlNum(conn, sql);
remap.put("tables", list);
remap.put("tableData", Datalist);
remap.put("total", x);
return remap;
}
/**
* 查詢數據縂數量
*
* @auther xinjingczsj
* @time 2022年7月20日
*/
public static Long executeQuerySqlNum(Connection conn, String sql) throws Exception {
StringBuilder sql1 = new StringBuilder();// 拼接sql
sql1.append("select count(*) statistics from (");
sql1.append(sql);
sql1.append(") A");
pst = conn.prepareStatement(sql1.toString());
ResultSet result = pst.executeQuery();// 查询结果
Long i = 0L;
while (result.next()) {
i = result.getLong("statistics");
}
conn.close();// 关闭
return i;
}
public static String stampToDate(Double time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String time_Date = sdf.format(new java.util.Date((long) (time * 1000L)));
return time_Date;
}
/*
* 将时间转换为时间戳
*/
public static String dateToStamp(String time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String stamp = "";
if (!"".equals(time)) {//时间不为空
try {
stamp = String.valueOf(sdf.parse(time).getTime() / 1000);
} catch (Exception e) {
System.out.println("参数为空!");
}
} else { //时间为空
long current_time = System.currentTimeMillis(); //获取当前时间
stamp = String.valueOf(current_time / 1000);
}
return stamp;
}
}

View File

@ -0,0 +1,482 @@
package com.czsj.common.datasource;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.dtstack.dtcenter.loader.cache.pool.config.PoolConfig;
import com.dtstack.dtcenter.loader.client.ClientCache;
import com.dtstack.dtcenter.loader.client.IClient;
import com.dtstack.dtcenter.loader.dto.SqlQueryDTO;
import com.dtstack.dtcenter.loader.dto.source.OracleSourceDTO;
import com.dtstack.dtcenter.loader.source.DataSourceType;
import com.czsj.common.domian.Column;
import com.czsj.common.domian.OrcTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class OracleSourse {
private static Logger logger = LoggerFactory.getLogger(ClickhouseSource.class);
private static final String SQL = "SELECT * FROM ";// 数据库操作
private static PreparedStatement pst = null;// 事务对象
public static Connection getconn(String url, String username, String password,String schema) {
OracleSourceDTO source = OracleSourceDTO.builder()
.url(url)
.username(username)
.password(password)
.schema(schema)
.poolConfig(new PoolConfig())
.build();
//获取连接
IClient client = ClientCache.getClient(DataSourceType.Oracle.getVal());
Boolean isConnected = client.testCon(source);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
return client.getCon(source);
}
public static Map<String,Object> getconns(String url, String username, String password,String schema) {
Map<String,Object> map = new HashMap<>();
OracleSourceDTO source = OracleSourceDTO.builder()
.url(url)
.username(username)
.password(password)
.schema(schema)
.poolConfig(new PoolConfig())
.build();
//获取连接
IClient client = ClientCache.getClient(DataSourceType.Oracle.getVal());
Boolean isConnected = client.testCon(source);
if (Boolean.FALSE.equals(isConnected)) {
throw new RuntimeException("connection exception");
}
map.put("client",client);
map.put("source",source);
return map;
}
//关闭数据库连接
public static void CloseCon(Connection con) throws Exception {
if (con != null) {
con.close();
logger.info("已断开与数据库的连接!");
}
}
/**
* 无结果查询
*/
public static Boolean executeSqlWithoutResultSet(IClient client, OracleSourceDTO source, String sql) {
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("自定义sql执行" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return aBoolean;
}
/**
* 自定义查询
*/
public static List<Map<String, Object>> customQuery(IClient client, OracleSourceDTO source, String sql,List<String> parameters) {
// 预编译查询
List<Object> preFields = new ArrayList<>();
if(parameters!=null&&parameters.size()>0){
for(String parameter : parameters){
preFields.add(parameter);
}
}
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).preFields(preFields).build();
List<Map<String, Object>> result = client.executeQuery(source, queryDTO);
return result;
}
/**
* 修改ORC数据库的注释
*
* @param client 数据库连接对象
* @param source 数据库连接对象
* @param table 表名
* @param name 字段名
* @param comment 注释
* @return
*/
public static boolean updateFieldComment(IClient client, OracleSourceDTO source, String table, String name, String comment) {
String sql = "comment on column " + table + "." + name + " is " + " '" + comment + "'" + " ;";
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("修改Orc数据库的字段类型" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return true;
}
/**
* 修改orc数据库的字段名
*
* @param client 数据库连接对象
* @param source 数据库连接对象
* @param database 库名
* @param table 表名
* @param name 当前字段名
* @param toname 修改字段名
* @return
*/
public static boolean updateFieldName(IClient client, OracleSourceDTO source, String database, String table, String name, String toname) {
String sql = "ALTER TABLE " + database + "." + table + " RENAME COLUMN " + name + " TO " + toname + ";";
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("修改Orc数据库的字段名" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return true;
}
/**
* 修改ORC数据库的字段类型
*
* @param client 数据库连接对象
* @param source 数据库连接对象
* @param database 库名
* @param table 表名
* @param name 当前字段名
* @param type 字段类型
* @return
*/
public static boolean updateFieldType(IClient client, OracleSourceDTO source, String database, String table, String name, String type) {
String sql = "ALTER TABLE " + database + "." + table + " modify ( " + name + " " + type + ");";
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("修改CK数据库的字段类型" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return true;
}
/**
* 添加ORC数据库的字段
*
* @param client 数据库连接对象
* @param source 数据库连接对象
* @param database 库名
* @param table 表名
* @param name 字段名
* @param type 字段类型
* @return
*/
public static boolean insertField(IClient client, OracleSourceDTO source, String database, String table, String name, String type) {
String sql = "ALTER TABLE " + database + "." + table + " ADD " + name + " " + type + ";";
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("添加Orc数据库的字段" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return true;
}
/**
* 删除Orc数据库的字段
*
* @param client 数据库连接对象
* @param source 数据库连接对象
* @param database 库名
* @param table 表名
* @param name 字段名
* @return
*/
public static boolean deleteField(IClient client, OracleSourceDTO source, String database, String table, String name) {
String sql = "ALTER TABLE " + database + "." + table + " DROP COLUMN " + name + " ;";
SqlQueryDTO queryDTO = SqlQueryDTO.builder().sql(sql).build();
Boolean aBoolean = client.executeSqlWithoutResultSet(source, queryDTO);
if (!aBoolean) {
logger.error("添加Orc数据库的字段" + sql + " 失败\n");
throw new RuntimeException("执行失败");
}
return true;
}
/**
* 获取表中字段的所有注释
*
* @param tableName
* @return
*/
public static List<Column> getColumnComment(Connection conn, String tableName) {
List<String> columnTypes = new ArrayList<>();
//与数据库的连接
PreparedStatement pStemt = null;
String tableSql = SQL + tableName;
List<Column> columnComments = new ArrayList<>();//列名注释集合
ResultSet rs = null;
try {
pStemt = conn.prepareStatement(tableSql);
rs = pStemt.executeQuery("select a.COLUMN_NAME, a.COMMENTS,b.DATA_TYPE,b.DATA_PRECISION, b.DATA_SCALE,b.CHAR_LENGTH from user_col_comments a left join user_tab_columns b on b.COLUMN_NAME=a.column_name and b.TABLE_NAME = a.table_name where a.TABLE_NAME = '" + tableName + "' ORDER BY COLUMN_ID");
while (rs.next()) {
Column Column = new Column();
Column.setName(rs.getString("COLUMN_NAME"));
Column.setType(rs.getString("DATA_TYPE"));
Column.setComment(rs.getString("COMMENTS"));
Column.setDataPrecision(rs.getString("DATA_PRECISION"));
Column.setDataScale(rs.getString("DATA_SCALE"));
Column.setCharLength(rs.getString("CHAR_LENGTH"));
columnComments.add(Column);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (rs != null) {
try {
rs.close();
CloseCon(conn);
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return columnComments;
}
/**
* 获取库中所有表名
*
* @param baseName
* @return
*/
public static List<OrcTable> getColumnComments(Connection conn, String baseName) {
PreparedStatement pStemt = null;
String tableSql = SQL + baseName;
List<OrcTable> columnComments = new ArrayList<>();//列名注释集合
ResultSet rs = null;
try {
pStemt = conn.prepareStatement(tableSql);
rs = pStemt.executeQuery("select table_name,comments from user_tab_comments ");
while (rs.next()) {
OrcTable OrcTable = new OrcTable();
OrcTable.setDatabase(baseName);
OrcTable.setTableName(rs.getString("table_name"));
OrcTable.setComments(rs.getString("comments"));
OrcTable.setCreateTableQuery("--");
columnComments.add(OrcTable);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (rs != null) {
try {
rs.close();
CloseCon(conn);
} catch (SQLException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return columnComments;
}
/**
* 執行sql查詢
*
* @auther xinjingczsj
* @time 2022年7月20日
*/
public static Map<String, Object> executeQuerySql(Connection conn, String sql, int pageNum, int pageSize) throws Exception {
if (pageNum == 1) {
pageNum = 0;
}
int start = pageNum * pageSize;
StringBuilder sql1 = new StringBuilder();// 拼接sql
sql1.append(sql + "\n");
sql1.append("limit " + start + "," + pageSize + "\n");
System.out.println(sql1.toString());
pst = conn.prepareStatement(sql1.toString());
ResultSet result = pst.executeQuery();// 查询结果
ResultSetMetaData rsmd = result.getMetaData();
JSONArray tableTitle = new JSONArray();// 表格头
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
JSONObject tableTitle_Th = new JSONObject();// 表格头单元格
tableTitle_Th.put("columnname", rsmd.getColumnName(i));// 字段名
tableTitle_Th.put("tablename", rsmd.getTableName(i));// 表名
tableTitle_Th.put("columnclassname", rsmd.getColumnClassName(i));// JAVA_数据类型
tableTitle_Th.put("columntypename", rsmd.getColumnTypeName(i) + "(" + rsmd.getColumnDisplaySize(i) + ")");// DB_数据类型
tableTitle.add(tableTitle_Th);// 保存到数组
}
JSONObject table = new JSONObject();// 所有查詢的數據
JSONArray tableBody = new JSONArray();// 表格内容
while (result.next()) {
JSONArray tableRow = new JSONArray();// 表内容单元格
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
String classname = rsmd.getColumnClassName(i);// 数据类型
switch (classname) {
case "java.math.BigDecimal": {
tableRow.add(result.getBigDecimal(i));
break;
}
case "java.lang.Boolean": {
tableRow.add(result.getBoolean(i));
break;
}
case "java.lang.Byte": {
tableRow.add(result.getByte(i));
break;
}
case "java.util.Date": {
Date date = result.getDate(i);
String time = "";
if (date != null) {
time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
tableRow.add(time);
break;
}
case "java.sql.Date": {
Date date = result.getDate(i);
String time = "";
if (date != null) {
time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date);
}
tableRow.add(time);
break;
}
case "java.lang.Double": {
tableRow.add(result.getDouble(i));
break;
}
case "java.lang.Float": {
tableRow.add(result.getFloat(i));
break;
}
case "java.lang.Integer": {
tableRow.add(result.getInt(i));
break;
}
case "java.lang.Long": {
tableRow.add(result.getLong(i));
break;
}
case "java.lang.String": {
tableRow.add(result.getString(i));
break;
}
case "java.sql.Timestamp": {
String str = "9999-12-12 00:00:00";
if (!"".equals(result.getString(i)) && result.getString(i) != null) {
str = result.getString(i);
}
tableRow.add(stampToDate(Double.valueOf(dateToStamp(str))));
break;
}
case "java.math.BigInteger": {
tableRow.add(result.getBigDecimal(i));
break;
}
default:
tableRow.add(result.getString(i));
}
}
tableBody.add(tableRow);
}
table.put("tableTitle", tableTitle);
table.put("tableBody", tableBody);
Map<String, Object> remap = new HashMap<>();
JSONArray tableTitle1 = table.getJSONArray("tableTitle");
JSONArray tableBody1 = table.getJSONArray("tableBody");
List<Map<Object, Object>> list = new ArrayList<>();
List<Map<String, Object>> Datalist = new ArrayList<>();
Object[] str = new String[tableTitle1.size()];
for (int j = 0; j < tableTitle1.size(); j++) {
JSONObject tableTitle_Th = (JSONObject) tableTitle1.get(j);
Map<String, Object> tableData = new HashMap<>();
tableData.put("dataItem", tableTitle_Th.get("columnname"));
Datalist.add(tableData);
str[j] = tableTitle_Th.get("columnname");
System.out.print(tableTitle_Th.get("columnname") + "\t");
}
logger.info("\n------------------------------------------------------------------------------------------------------------------------");
for (Object o : tableBody1) {
Map<Object, Object> tables = new HashMap<>();
JSONArray row = (JSONArray) o;
for (int j = 0; j < row.size(); j++) {
tables.put(str[j], row.get(j));
System.out.print(row.get(j) + "\t");
}
list.add(tables);
System.out.println();
}
Long x = executeQuerySqlNum(conn, sql);
remap.put("tables", list);
remap.put("tableData", Datalist);
remap.put("total", x);
return remap;
}
/**
* 查詢數據縂數量
*
* @auther xinjingczsj
* @time 2022年7月20日
*/
public static Long executeQuerySqlNum(Connection conn, String sql) throws Exception {
StringBuilder sql1 = new StringBuilder();// 拼接sql
sql1.append("select count(*) statistics from (");
sql1.append(sql);
sql1.append(")");
pst = conn.prepareStatement(sql1.toString());
ResultSet result = pst.executeQuery();// 查询结果
Long i = 0L;
while (result.next()) {
i = result.getLong("statistics");
}
conn.close();// 关闭
return i;
}
public static String stampToDate(Double time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String time_Date = sdf.format(new java.util.Date((long) (time * 1000L)));
return time_Date;
}
/*
* 将时间转换为时间戳
*/
public static String dateToStamp(String time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String stamp = "";
if (!"".equals(time)) {//时间不为空
try {
stamp = String.valueOf(sdf.parse(time).getTime() / 1000);
} catch (Exception e) {
System.out.println("参数为空!");
}
} else { //时间为空
long current_time = System.currentTimeMillis(); //获取当前时间
stamp = String.valueOf(current_time / 1000);
}
return stamp;
}
}

View File

@ -0,0 +1,54 @@
package com.czsj.common.domian;
import java.util.List;
public class CkTable {
private String database;
private String tableName;
private String createTableQuery;
private List<Column> ColumnList;
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public List<Column> getColumnList() {
return ColumnList;
}
public void setColumnList(List<Column> columnList) {
ColumnList = columnList;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getCreateTableQuery() {
return createTableQuery;
}
public void setCreateTableQuery(String createTableQuery) {
this.createTableQuery = createTableQuery;
}
@Override
public String toString() {
return "CkTable{" +
"tableName='" + tableName + '\'' +
", ColumnList=" + ColumnList +
'}';
}
}

View File

@ -0,0 +1,58 @@
package com.czsj.common.domian;
public class Column {
private String name;
private String type;
private String comment;
private String dataPrecision;
private String DataScale;
private String charLength;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public String getDataPrecision() {
return dataPrecision;
}
public void setDataPrecision(String dataPrecision) {
this.dataPrecision = dataPrecision;
}
public String getDataScale() {
return DataScale;
}
public void setDataScale(String dataScale) {
DataScale = dataScale;
}
public String getCharLength() {
return charLength;
}
public void setCharLength(String charLength) {
this.charLength = charLength;
}
}

View File

@ -0,0 +1,64 @@
package com.czsj.common.domian;
import java.util.List;
public class MySqlTable {
private String database;
private String tableName;
private String createTableQuery;
private String comment;
private List<Column> ColumnList;
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public List<Column> getColumnList() {
return ColumnList;
}
public void setColumnList(List<Column> columnList) {
ColumnList = columnList;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getCreateTableQuery() {
return createTableQuery;
}
public void setCreateTableQuery(String createTableQuery) {
this.createTableQuery = createTableQuery;
}
@Override
public String toString() {
return "CkTable{" +
"tableName='" + tableName + '\'' +
", ColumnList=" + ColumnList +
'}';
}
}

View File

@ -0,0 +1,66 @@
package com.czsj.common.domian;
import java.util.List;
public class OrcTable {
private String database;
private String tableName;
private String comments;
private String createTableQuery;
private List<Column> ColumnList;
public String getCreateTableQuery() {
return createTableQuery;
}
public void setCreateTableQuery(String createTableQuery) {
this.createTableQuery = createTableQuery;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public List<Column> getColumnList() {
return ColumnList;
}
public void setColumnList(List<Column> columnList) {
ColumnList = columnList;
}
@Override
public String toString() {
return "OrcTable{" +
"database='" + database + '\'' +
", tableName='" + tableName + '\'' +
", comments='" + comments + '\'' +
", createTableQuery='" + createTableQuery + '\'' +
", ColumnList=" + ColumnList +
'}';
}
}

View File

@ -0,0 +1,37 @@
package com.czsj.common.handler;
import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.reflection.MetaObject;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import java.util.Date;
import static com.czsj.common.utils.SecurityUtils.getUsername;
/**
* 通用的字段填充如createBy createDate这些字段的自动填充
*
* @author huzekang
*/
@Component
@Slf4j
public class MybatisMetaObjectHandler implements MetaObjectHandler {
@Override
public void insertFill(MetaObject metaObject) {
setFieldValByName("createTime", new Date(), metaObject);
setFieldValByName("createBy", getUsername(), metaObject);
}
@Override
public void updateFill(MetaObject metaObject) {
setFieldValByName("updateTime", new Date(), metaObject);
setFieldValByName("updateBy", getUsername(), metaObject);
}
private String getCurrentUser() {
return SecurityContextHolder.getContext().getAuthentication().getPrincipal().toString();
}
}

View File

@ -56,7 +56,6 @@
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.test.skip>true</maven.test.skip>
<commons-lang3.version>3.3.2</commons-lang3.version>
<slf4j-api.version>1.7.28</slf4j-api.version>
<logback-classic.version>1.2.2</logback-classic.version>
<commons-io.version>2.4</commons-io.version>