diff --git a/czsj-admin/src/main/assembly/package.xml b/czsj-admin/src/main/assembly/package.xml new file mode 100644 index 0000000..b9018cc --- /dev/null +++ b/czsj-admin/src/main/assembly/package.xml @@ -0,0 +1,49 @@ + + dist + + dir + + false + + + ${basedir}/src/main/bin + unix + service + + *.service + + + + ${basedir}/src/main/bin + unix + + 755 + + *.sh + + + + ${basedir}/src/main/bin + windows + + + *.bat + + + + target/classes + config + + **/*.yml + **/*.xml + + + + + + lib + false + + + + \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/assembly/deploy.xml b/czsj-admin/src/main/java/com/czsj/assembly/deploy.xml new file mode 100644 index 0000000..94b370d --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/assembly/deploy.xml @@ -0,0 +1,47 @@ + + + dist + + tar.gz + + false + + + true + admin/lib + runtime + + + + + unix + ./src/main/bin + admin/bin + + **/* + + 0755 + + + ./src/main/logs + admin/logs + + + ./src/main/resources + + *.properties + logback.xml + application.yml + mapper/**/** + i18n/** + static/** + + admin/conf + + + src/main/lib + admin/lib + + + diff --git a/czsj-admin/src/main/java/com/czsj/bin/admin.bat b/czsj-admin/src/main/java/com/czsj/bin/admin.bat new file mode 100644 index 0000000..a1d6505 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/admin.bat @@ -0,0 +1,9 @@ +@echo off +set home=%~dp0 + +set conf_dir=%home%..\conf +set lib_dir=%home%..\lib\* +set log_dir=%home%..\logs + +java -Dspring.profiles.active=standalone -Dlogging.file=%log_dir%\dbApi.log -classpath %conf_dir%;%lib_dir% com.larkmidtable.admin.AdminApplication +pause \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/bin/admin.sh b/czsj-admin/src/main/java/com/czsj/bin/admin.sh new file mode 100644 index 0000000..066b472 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/admin.sh @@ -0,0 +1,266 @@ +#!/bin/bash +# + +FRIEND_NAME=ADMIN +MAIN_CLASS=com.larkmidtable.admin.AdminApplication +if [ ! ${ENV_FILE} ]; then + ENV_FILE="env.properties" +fi +SLEEP_TIMEREVAL_S=2 + +abs_path(){ + SOURCE="${BASH_SOURCE[0]}" + while [ -h "${SOURCE}" ]; do + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "${SOURCE}")" + [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" + done + echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )" +} + +function LOG(){ + currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"` + echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG} +} + +verify_java_env(){ + if [ "x${JAVA_HOME}" != "x" ]; then + ${JAVA_HOME}/bin/java -version >/dev/null 2>&1 + else + java -version >/dev/null 2>&1 + fi + if [ $? -ne 0 ]; then + cat 1>&2 </dev/null` + if [ "x"${pid_in_file} != "x" ]; then + p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'` + fi + fi + else + p=`${JPS} -l | grep "$2" | awk '{print $1}'` + fi + if [ -n "$p" ]; then + # echo "$1 ($2) is still running with pid $p" + return 0 + else + # echo "$1 ($2) does not appear in the java process table" + return 1 + fi +} + +wait_for_startup(){ + local now_s=`date '+%s'` + local stop_s=$((${now_s} + $1)) + while [ ${now_s} -le ${stop_s} ];do + status_class ${FRIEND_NAME} ${MAIN_CLASS} + if [ $? -eq 0 ]; then + return 0 + fi + sleep ${SLEEP_TIMEREVAL_S} + now_s=`date '+%s'` + done + exit 1 +} + +wait_for_stop(){ + local now_s=`date '+%s'` + local stop_s=$((${now_s} + $1)) + while [ ${now_s} -le ${stop_s} ];do + status_class ${FRIEND_NAME} ${MAIN_CLASS} + if [ $? -eq 1 ]; then + return 0 + fi + sleep ${SLEEP_TIMEREVAL_S} + now_s=`date '+%s'` + done + return 1 +} + +start_m(){ + status_class ${FRIEND_NAME} ${MAIN_CLASS} + if [ $? -eq 0 ]; then + LOG INFO "${FRIEND_NAME} has been started in process" + exit 0 + fi + LOG INFO ${EXE_JAVA} + nohup ${EXE_JAVA} >${SHELL_LOG} 2>&1 & + LOG INFO "Waiting ${FRIEND_NAME} to start complete ..." + wait_for_startup 20 + if [ $? -eq 0 ]; then + LOG INFO "${FRIEND_NAME} start success" + return 0 + else + LOG ERROR "${FRIEND_NAME} start exceeded over 20s" >&2 + return 1 + fi +} + +stop_m(){ + local p="" + if [ "x"${PID_FILE_PATH} != "x" ]; then + if [ -f ${PID_FILE_PATH} ]; then + local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null` + if [ "x"${pid_in_file} != "x" ]; then + p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'` + fi + fi + else + p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'` + fi + if [ -z "${p}" ]; then + LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table" + return 0 + fi + LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..." + kill -9 ${p} + LOG INFO "Stop successful..." +} + +shutdown_m(){ + local p="" + if [ "x"${PID_FILE_PATH} != "x" ]; then + if [ -f ${PID_FILE_PATH} ]; then + local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null` + if [ "x"${pid_in_file} != "x" ]; then + p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'` + fi + fi + else + p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'` + fi + if [ -z "${p}" ]; then + LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table" + return 0 + fi + LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..." + case "`uname`" in + CYCGWIN*) taskkill /F /PID "${p}" ;; + *) kill -9 "${p}" ;; + esac +} + +restart_m(){ + stop_m + if [ $? -eq 0 ]; then + start_m + exit $? + else + LOG ERROR "${FRIEND_NAME} restart fail" >&2 + exit 1 + fi +} +if [ ! $1 ]; then + usage + exit 1; +fi +case $1 in + start) start_m;; + stop) stop_m;; + shutdown) shutdown_m;; + restart) restart_m;; + *) + usage + exit 1 + ;; +esac +exit $? diff --git a/czsj-admin/src/main/java/com/czsj/bin/configure.sh b/czsj-admin/src/main/java/com/czsj/bin/configure.sh new file mode 100644 index 0000000..28038a6 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/configure.sh @@ -0,0 +1,202 @@ +#!/bin/bash + +DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +SHELL_LOG="${DIR}/console.out" +SERVER_NAME="admin" +USER=`whoami` +SAFE_MODE=true +SUDO_USER=false +ENV_FILE_PATH="${DIR}/env.properties" + +usage(){ + printf "Configure usage:\n" + printf "\t%-10s %-10s %-2s \n" --server "server-name" "Name of admin server" + printf "\t%-10s %-10s %-2s \n" --unsafe "unsafe mode" "Will clean the directory existed" + printf "\t%-10s %-10s %-2s \n" --safe "safe mode" "Will not modify the directory existed (Default)" + printf "\t%-10s %-10s %-2s \n" "-h|--help" "usage" "List help document" +} + +LOG(){ + currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"` + echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG} +} + +interact_echo(){ + while [ 1 ]; do + read -p "$1 (Y/N)" yn + if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then + return 0 + elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then + return 1 + else + echo "Unknown choise: [$yn], please choose again." + fi + done +} + +is_sudo_user(){ + sudo -v >/dev/null 2>&1 +} + +abs_path(){ + SOURCE="${BASH_SOURCE[0]}" + while [ -h "${SOURCE}" ]; do + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "${SOURCE}")" + [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" + done + echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )" +} + +check_exist(){ + if test -e "$1"; then + LOG INFO "Directory or file: [$1] has been exist" + if [ $2 == true ]; then + LOG INFO "Configure program will shutdown..." + exit 0 + fi + fi +} + +copy_replace(){ + file_name=$1 + if test -e "${CONF_PATH}/${file_name}";then + if [ ${SAFE_MODE} == true ]; then + check_exist "${CONF_PATH}/${file_name}" true + fi + LOG INFO "Delete file or directory: [${CONF_PATH}/${file_name}]" + rm -rf ${CONF_PATH}/${file_name} + fi + if test -e "${DIR}/../conf/${file_name}";then + LOG INFO "Copy from ${DIR}/../conf/${file_name}" + cp -R ${DIR}/../conf/${file_name} ${CONF_PATH}/ + fi +} + +mkdir_p(){ + if [ ${SAFE_MODE} == true ]; then + check_exist $1 false + fi + if [ ! -d $1 ]; then + LOG INFO "Creating directory: ["$1"]." + #mkdir -p $1 + if [ ${SUDO_USER} == true ]; then + sudo mkdir -p $1 && sudo chown -R ${USER} $1 + else + mkdir -p $1 + fi + fi +} + +while [ 1 ]; do + case ${!OPTIND} in + --server) + SERVER_NAME=$2 + shift 2 + ;; + --unsafe) + SAFE_MODE=false + shift 1 + ;; + --safe) + SAFE_MODE=true + shift 1 + ;; + --help|-h) + usage + exit 0 + ;; + *) + break + ;; + esac +done + +is_sudo_user +if [ $? == 0 ]; then + SUDO_USER=true +fi + +BIN=`abs_path` +SERVER_NAME_SIMPLE=${SERVER_NAME/flinkx-/} + +LOG_PATH=${BIN}/../logs +if [ "x${BASE_LOG_DIR}" != "x" ]; then + LOG_PATH=${BASE_LOG_DIR}/${SERVER_NAME_SIMPLE} + sed -ri "s![#]?(WEB_LOG_PATH=)\S*!\1${LOG_PATH}!g" ${ENV_FILE_PATH} +fi + +CONF_PATH=${BIN}/../conf +if [ "x${BASE_CONF_DIR}" != "x" ]; then + CONF_PATH=${BASE_CONF_DIR}/${SERVER_NAME_SIMPLE} + sed -ri "s![#]?(WEB_CONF_PATH=)\S*!\1${CONF_PATH}!g" ${ENV_FILE_PATH} +fi + +DATA_PATH=${BIN}/../data +if [ "x${BASE_DATA_DIR}" != "x" ]; then + DATA_PATH=${BASE_DATA_DIR}/${SERVER_NAME_SIMPLE} + sed -ri "s![#]?(DATA_PATH=)\S*!\1${DATA_PATH}!g" ${ENV_FILE_PATH} +fi + +echo "Start to make directory" +# Start to make directory +LOG INFO "\033[1m Start to build directory\033[0m" +mkdir_p ${LOG_PATH} +mkdir_p ${CONF_PATH} +mkdir_p ${DATA_PATH} +if [ "x${BASE_CONF_DIR}" != "x" ]; then + LOG INFO "\033[1m Start to copy configuration file/directory\033[0m" + # Copy the configuration file + copy_replace bootstrap.properties + copy_replace application.yml + copy_replace logback.xml + copy_replace i18n + copy_replace mybatis-mapper + copy_replace static +fi +echo "end to make directory" + + +BOOTSTRAP_PROP_FILE="${CONF_PATH}/bootstrap.properties" +# Start to initalize database +echo "Start to initalize database" +if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then + `mysql --version >/dev/null 2>&1` + if [ $? == 0 ]; then + LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m" + interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?" + if [ $? == 0 ]; then + read -p "Please input the db host(default: 127.0.0.1): " HOST + if [ "x${HOST}" == "x" ]; then + HOST="127.0.0.1" + fi + while [ 1 ]; do + read -p "Please input the db port(default: 3306): " PORT + if [ "x${PORT}" == "x" ]; then + PORT=3306 + break + elif [ ${PORT} -gt 0 ] 2>/dev/null; then + break + else + echo "${PORT} is not a number, please input again" + fi + done + read -p "Please input the db username(default: root): " USERNAME + if [ "x${USERNAME}" == "x" ]; then + USERNAME="root" + fi + read -p "Please input the db password(default: ""): " PASSWORD + read -p "Please input the db name(default: flinkxweb)" DATABASE + if [ "x${DATABASE}" == "x" ]; then + DATABASE="flinkxweb" + fi + mysql -h ${HOST} -P ${PORT} -u ${USERNAME} -p${PASSWORD} --default-character-set=utf8 -e \ + "CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};" + sed -ri "s![#]?(DB_HOST=)\S*!\1${HOST}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(DB_PORT=)\S*!\1${PORT}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(DB_USERNAME=)\S*!\1${USERNAME}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(DB_PASSWORD=)\S*!\1${PASSWORD}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(DB_DATABASE=)\S*!\1${DATABASE}!g" ${BOOTSTRAP_PROP_FILE} + fi + fi +fi diff --git a/czsj-admin/src/main/java/com/czsj/bin/datax.py b/czsj-admin/src/main/java/com/czsj/bin/datax.py new file mode 100644 index 0000000..d09c968 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/datax.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +import sys +import os +import signal +import subprocess +import time +import re +import socket +import json +from optparse import OptionParser +from optparse import OptionGroup +from string import Template +import codecs +import platform + +def printCopyright(): + print ''' +LarkMidTable (%s), From LarkMidTable ! +LarkMidTable All Rights Reserved. + +''' + sys.stdout.flush() + +if __name__ == "__main__": + printCopyright() + abs_file=sys.path[0] + json_file=sys.argv[1] + log_name=sys.argv[2] + startCommand = "java -server -Xms1g -Xmx1g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=E:\datax/log -Xms1g -Xmx1g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=E:\datax/log -Dloglevel=info -Dfile.encoding=UTF-8 -Dlogback.statusListenerClass=ch.qos.logback.core.status.NopStatusListener -Djava.security.egd=file:///dev/urandom -Ddatax.home=E:\datax -Dlogback.configurationFile=E:\datax/conf/logback.xml -classpath E:\datax/lib/* -Dlog.file.name=8e8e5f7d4cd0fd5_json com.alibaba.datax.core.Engine -mode standalone -jobid -1 -job %s > %s" %(json_file,log_name) + print startCommand + child_process = subprocess.Popen(startCommand, shell=True) + (stdout, stderr) = child_process.communicate() + + sys.exit(child_process.returncode) \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/bin/env.properties b/czsj-admin/src/main/java/com/czsj/bin/env.properties new file mode 100644 index 0000000..688c864 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/env.properties @@ -0,0 +1,21 @@ +# environment variables + +#JAVA_HOME="" + +WEB_LOG_PATH=${BIN}/../logs +WEB_CONF_PATH=${BIN}/../conf + +DATA_PATH=${BIN}/../data +SERVER_PORT=8080 + +#PID_FILE_PATH=${BIN}/flinkxadmin.pid + + +# mail account +MAIL_USERNAME="" +MAIL_PASSWORD="" + + +#debug +#REMOTE_DEBUG_SWITCH=true +#REMOTE_DEBUG_PORT=7003 \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/bin/flinkx-local.sh b/czsj-admin/src/main/java/com/czsj/bin/flinkx-local.sh new file mode 100644 index 0000000..94e30b1 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/flinkx-local.sh @@ -0,0 +1 @@ +sh ./bin/flinkx -mode local -jobType sync -job ./job/stream.json -flinkxDistDir ./flinkx-dist -flinkConfDir ./flinkconf diff --git a/czsj-admin/src/main/java/com/czsj/bin/flinkx-python3x.py b/czsj-admin/src/main/java/com/czsj/bin/flinkx-python3x.py new file mode 100644 index 0000000..6562229 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/flinkx-python3x.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +import sys +import os +import signal +import subprocess +import time +import re +import socket +import json +from optparse import OptionParser +from optparse import OptionGroup +from string import Template +import codecs +import platform + +def printCopyright(): + print (''' +LarkMidTable (%s), From LarkMidTable ! +LarkMidTable All Rights Reserved. + +''') + sys.stdout.flush() + +if __name__ == "__main__": + printCopyright() + abs_file=sys.path[0] + json_file=sys.argv[1] + log_name=sys.argv[2] + startCommand = "java -cp %s/lib/* com.dtstack.flinkx.client.Launcher -mode local -jobType sync -job %s -flinkxDistDir %s/flinkx-dist -flinkConfDir %s/flinkconf > %s/%s" %(abs_file,json_file,abs_file,abs_file,abs_file,log_name) + print(startCommand) + child_process = subprocess.Popen(startCommand, shell=True) + (stdout, stderr) = child_process.communicate() + + sys.exit(child_process.returncode) diff --git a/czsj-admin/src/main/java/com/czsj/bin/flinkx.py b/czsj-admin/src/main/java/com/czsj/bin/flinkx.py new file mode 100644 index 0000000..8ce8f45 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/bin/flinkx.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +import sys +import os +import signal +import subprocess +import time +import re +import socket +import json +from optparse import OptionParser +from optparse import OptionGroup +from string import Template +import codecs +import platform + +def printCopyright(): + print ''' +LarkMidTable (%s), From LarkMidTable ! +LarkMidTable All Rights Reserved. + +''' + sys.stdout.flush() + +if __name__ == "__main__": + printCopyright() + abs_file=sys.path[0] + json_file=sys.argv[1] + log_name=sys.argv[2] + startCommand = "java -cp %s/lib/* com.dtstack.flinkx.client.Launcher -mode local -jobType sync -job %s -flinkxDistDir %s/flinkx-dist -flinkConfDir %s/flinkconf > %s" %(abs_file,json_file,abs_file,abs_file,log_name) + print startCommand + child_process = subprocess.Popen(startCommand, shell=True) + (stdout, stderr) = child_process.communicate() + + sys.exit(child_process.returncode) diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseController.java new file mode 100644 index 0000000..d945ba4 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseController.java @@ -0,0 +1,148 @@ +package com.czsj.web.controller.bigdata; + +import com.baomidou.mybatisplus.extension.api.ApiController; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import com.ruoshui.common.constant.HttpStatus; +import com.ruoshui.common.core.domain.AjaxResult; +import com.ruoshui.common.core.domain.model.LoginUser; +import com.ruoshui.common.core.page.PageDomain; +import com.ruoshui.common.core.page.TableDataInfo; +import com.ruoshui.common.core.page.TableSupport; +import com.ruoshui.common.utils.DateUtils; +import com.ruoshui.common.utils.PageUtils; +import com.ruoshui.common.utils.SecurityUtils; +import com.ruoshui.common.utils.StringUtils; +import com.ruoshui.common.utils.sql.SqlUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.WebDataBinder; +import org.springframework.web.bind.annotation.InitBinder; + +import java.beans.PropertyEditorSupport; +import java.util.Date; +import java.util.List; + +/** + * web层通用数据处理 + * + * @author ruoshui + */ +public class BaseController extends ApiController +{ + protected final Logger logger = LoggerFactory.getLogger(this.getClass()); + + /** + * 将前台传递过来的日期格式的字符串,自动转化为Date类型 + */ + @InitBinder + public void initBinder(WebDataBinder binder) + { + // Date 类型转换 + binder.registerCustomEditor(Date.class, new PropertyEditorSupport() + { + @Override + public void setAsText(String text) + { + setValue(DateUtils.parseDate(text)); + } + }); + } + + /** + * 设置请求分页数据 + */ + protected void startPage() + { + PageUtils.startPage(); + } + + /** + * 设置请求排序数据 + */ + protected void startOrderBy() + { + PageDomain pageDomain = TableSupport.buildPageRequest(); + if (StringUtils.isNotEmpty(pageDomain.getOrderBy())) + { + String orderBy = SqlUtil.escapeOrderBySql(pageDomain.getOrderBy()); + PageHelper.orderBy(orderBy); + } + } + + /** + * 清理分页的线程变量 + */ + protected void clearPage() + { + PageUtils.clearPage(); + } + + /** + * 响应请求分页数据 + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected TableDataInfo getDataTable(List list) + { + TableDataInfo rspData = new TableDataInfo(); + rspData.setCode(HttpStatus.SUCCESS); + rspData.setMsg("查询成功"); + rspData.setRows(list); + rspData.setTotal(new PageInfo(list).getTotal()); + return rspData; + } + + + + /** + * 响应返回结果 + * + * @param rows 影响行数 + * @return 操作结果 + */ + protected AjaxResult toAjax(int rows) + { + return rows > 0 ? AjaxResult.success() : AjaxResult.error(); + } + + + /** + * 页面跳转 + */ + public String redirect(String url) + { + return StringUtils.format("redirect:{}", url); + } + + /** + * 获取用户缓存信息 + */ + public LoginUser getLoginUser() + { + return SecurityUtils.getLoginUser(); + } + + /** + * 获取登录用户id + */ + public Long getUserId() + { + return getLoginUser().getUserId(); + } + + /** + * 获取登录部门id + */ + public Long getDeptId() + { + return getLoginUser().getDeptId(); + } + + /** + * 获取登录用户名 + */ + public String getUsername() + { + return getLoginUser().getUsername(); + } +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseForm.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseForm.java new file mode 100644 index 0000000..13e7f39 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseForm.java @@ -0,0 +1,253 @@ +package com.czsj.web.controller.bigdata; + +import cn.hutool.core.util.BooleanUtil; +import cn.hutool.core.util.NumberUtil; +import cn.hutool.core.util.ObjectUtil; +import cn.hutool.core.util.StrUtil; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.ruoshui.bigdata.util.PageUtils; +import com.ruoshui.bigdata.util.ServletUtils; +import lombok.extern.slf4j.Slf4j; + +import javax.servlet.http.HttpServletRequest; +import java.net.URLDecoder; +import java.util.Enumeration; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * 基础参数辅助类 + * + * @author zhouhongfa@gz-yibo.com + * @version 1.0 + * @since 2019/5/15 + */ +@Slf4j +public class BaseForm { + /** + * 查询参数对象 + */ + protected Map values = new LinkedHashMap<>(); + + /** + * 当前页码 + */ + private Long current = 1L; + + /** + * 页大小 + */ + private Long size = 10L; + + /** + * 构造方法 + */ + public BaseForm() { + try { + HttpServletRequest request = ServletUtils.getRequest(); + Enumeration params = request.getParameterNames(); + while (params.hasMoreElements()) { + String name = params.nextElement(); + String value = StrUtil.trim(request.getParameter(name)); + this.set(name, URLDecoder.decode(value, "UTF-8")); + } + this.parsePagingQueryParams(); + } catch (Exception e) { + e.printStackTrace(); + log.error("BaseControlForm initialize parameters setting error:" + e); + } + } + + /** + * 获取页码 + * + * @return + */ + public Long getPageNo() { + String pageNum = StrUtil.toString(this.get("current")); + if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) { + this.current = Long.parseLong(pageNum); + } + return this.current; + } + + /** + * 获取页大小 + * + * @return + */ + public Long getPageSize() { + String pageSize = StrUtil.toString(this.get("size")); + + if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) { + this.size = Long.parseLong(pageSize); + } + return this.size; + } + + /** + * 获得参数信息对象 + * + * @return + */ + public Map getParameters() { + return values; + } + + /** + * 根据key获取values中的值 + * + * @param name + * @return + */ + public Object get(String name) { + if (values == null) { + values = new LinkedHashMap<>(); + return null; + } + return this.values.get(name); + } + + /** + * 根据key获取values中String类型值 + * + * @param key + * @return String + */ + public String getString(String key) { + return StrUtil.toString(get(key)); + } + + /** + * 获取排序字段 + * + * @return + */ + public String getSort() { + return StrUtil.toString(this.values.get("sort")); + } + + /** + * 获取排序 + * + * @return + */ + public String getOrder() { + return StrUtil.toString(this.values.get("order")); + } + + /** + * 获取排序 + * + * @return + */ + public String getOrderby() { + return StrUtil.toString(this.values.get("orderby")); + } + + /** + * 解析出mybatis plus分页查询参数 + */ + public Page getPlusPagingQueryEntity() { + Page page = new Page(); + //如果无current,默认返回1000条数据 + page.setCurrent(this.getPageNo()); + page.setSize(this.getPageSize()); + if (ObjectUtil.isNotNull(this.get("ifCount"))) { + page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount"))); + } else { + //默认给true + page.setSearchCount(true); + } + return page; + } + + /** + * 解析分页排序参数(pageHelper) + */ + public void parsePagingQueryParams() { + // 排序字段解析 + String orderBy = StrUtil.toString(this.get("orderby")).trim(); + String sortName = StrUtil.toString(this.get("sort")).trim(); + String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase(); + + if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) { + if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) { + sortOrder = "asc"; + } + this.set("orderby", sortName + " " + sortOrder); + } + } + + + /** + * 设置参数 + * + * @param name 参数名称 + * @param value 参数值 + */ + public void set(String name, Object value) { + if (ObjectUtil.isNotNull(value)) { + this.values.put(name, value); + } + } + + /** + * 移除参数 + * + * @param name + */ + public void remove(String name) { + this.values.remove(name); + } + + /** + * 清除所有参数 + */ + public void clear() { + if (values != null) { + values.clear(); + } + } + + + /** + * 自定义查询组装 + * + * @param map + * @return + */ + protected QueryWrapper pageQueryWrapperCustom(Map map, QueryWrapper queryWrapper) { + // mybatis plus 分页相关的参数 + Map pageParams = PageUtils.filterPageParams(map); + //过滤空值,分页查询相关的参数 + Map colQueryMap = PageUtils.filterColumnQueryParams(map); + //排序 操作 + pageParams.forEach((k, v) -> { + switch (k) { + case "ascs": + queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v))); + break; + case "descs": + queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v))); + break; + } + }); + + //遍历进行字段查询条件组装 + colQueryMap.forEach((k, v) -> { + switch (k) { + case "pluginName": + case "datasourceName": + queryWrapper.like(StrUtil.toUnderlineCase(k), v); + break; + default: + queryWrapper.eq(StrUtil.toUnderlineCase(k), v); + } + }); + + return queryWrapper; + } + +} \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseResourceController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseResourceController.java new file mode 100644 index 0000000..262056b --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/BaseResourceController.java @@ -0,0 +1,94 @@ +package com.czsj.web.controller.bigdata; + +import com.ruoshui.bigdata.entity.BaseResource; +import com.ruoshui.bigdata.mapper.BaseResourceMapper; +import com.ruoshui.bigdata.util.AESUtil; +import com.ruoshui.core.biz.model.ReturnT; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@RestController +@RequestMapping("/api/base/resource") +@Api(tags = "基础建设-资源管理") +public class BaseResourceController { + + private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + @Autowired + private BaseResourceMapper baseResourceMapper; + + @ApiOperation("获取所有数据") + @GetMapping("/list") + @PreAuthorize("@ss.hasPermi('datax:resource:list')") + public ReturnT> selectList( + @RequestParam(value = "current", required = false, defaultValue = "1") int current, + @RequestParam(value = "size", required = false, defaultValue = "10") int size, + @RequestParam(value = "name", required = false) String name) { + // page list + List list = baseResourceMapper.findList((current - 1) * size,size,name); + Map maps = new HashMap<>(); + maps.put("recordsTotal", list.size()); // 过滤后的总记录数 + maps.put("data", list); // 分页列表 + return new ReturnT<>(maps); + } + + @ApiOperation("新增数据") + @PostMapping("/add") + @PreAuthorize("@ss.hasPermi('datax:resource:add')") + public ReturnT insert(HttpServletRequest request, @RequestBody BaseResource entity) { + entity.setUpdate_time(sdf.format(new Date())); + entity.setServerPassword(AESUtil.encrypt(entity.getServerPassword())); + this.baseResourceMapper.save(entity); + return ReturnT.SUCCESS; + } + + @ApiOperation("修改数据") + @PostMapping(value = "/update") + @PreAuthorize("@ss.hasPermi('datax:resource:edit')") + public ReturnT update(@RequestBody BaseResource entity) { + entity.setUpdate_time(sdf.format(new Date())); + //查询元数据 + BaseResource byId = baseResourceMapper.getById(entity.getId()); + if(entity.getServerPassword().equals(byId.getServerPassword())){ + entity.setServerPassword(AESUtil.encrypt(AESUtil.decrypt(entity.getServerPassword()))); + }else{ + entity.setServerPassword(AESUtil.encrypt(entity.getServerPassword())); + } + baseResourceMapper.update(entity); + return ReturnT.SUCCESS; + } + + @RequestMapping(value = "/remove", method = RequestMethod.POST) + @ApiOperation("删除数据") + @PreAuthorize("@ss.hasPermi('datax:resource:remove')") + public ReturnT delete(int id) { + int result = baseResourceMapper.delete(id); + return result != 1 ? ReturnT.FAIL : ReturnT.SUCCESS; + } + + @RequestMapping(value = "/getResource", method = RequestMethod.POST) + @ApiOperation("查询资源列表") + @PreAuthorize("@ss.hasPermi('datax:resource:query')") + public ReturnT getResource() { + List result = baseResourceMapper.getResource(); + return new ReturnT(result); + } + + @RequestMapping(value = "/getFileResource", method = RequestMethod.POST) + @ApiOperation("查询资源列表") + @PreAuthorize("@ss.hasPermi('datax:resource:query')") + public ReturnT getFileResource() { + List result = baseResourceMapper.getFileResource(); + return new ReturnT(result); + } + +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/DataxJsonController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/DataxJsonController.java new file mode 100644 index 0000000..d994a38 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/DataxJsonController.java @@ -0,0 +1,48 @@ +package com.czsj.web.controller.bigdata; + +import com.baomidou.mybatisplus.extension.api.R; +import com.ruoshui.bigdata.core.util.I18nUtil; +import com.ruoshui.bigdata.dto.DataXJsonBuildDto; +import com.ruoshui.bigdata.service.DataxJsonService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Created by jingwk on 2022/05/05 + */ + +@RestController +@RequestMapping("api/dataxJson") +@Api(tags = "组装datax json的控制器") +public class DataxJsonController extends BaseController { + + @Autowired + private DataxJsonService dataxJsonService; + + + @PostMapping("/buildJson") + @ApiOperation("JSON构建") + public R buildJobJson(@RequestBody DataXJsonBuildDto dto) { + String key = "system_please_choose"; + if (dto.getReaderDatasourceId() == null) { + return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerDataSource")); + } + if (dto.getWriterDatasourceId() == null) { + return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerDataSource")); + } + if (CollectionUtils.isEmpty(dto.getReaderColumns())) { + return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerColumns")); + } + if (CollectionUtils.isEmpty(dto.getWriterColumns())) { + return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerColumns")); + } + return success(dataxJsonService.buildJobJson(dto)); + } + +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobApiController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobApiController.java new file mode 100644 index 0000000..8b0dd5b --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobApiController.java @@ -0,0 +1,145 @@ +package com.czsj.web.controller.bigdata; + + +import com.ruoshui.bigdata.core.conf.JobAdminConfig; +import com.ruoshui.bigdata.core.util.JacksonUtil; +import com.ruoshui.core.biz.AdminBiz; +import com.ruoshui.core.biz.model.HandleCallbackParam; +import com.ruoshui.core.biz.model.HandleProcessCallbackParam; +import com.ruoshui.core.biz.model.RegistryParam; +import com.ruoshui.core.biz.model.ReturnT; +import com.ruoshui.core.util.JobRemotingUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; +import java.util.List; + +/** + * Created by xuxueli on 17/5/10. + */ +@RestController +@RequestMapping("/api") +public class JobApiController { + + @Autowired + private AdminBiz adminBiz; + + /** + * callback + * + * @param data + * @return + */ + @RequestMapping("/callback") + public ReturnT callback(HttpServletRequest request, @RequestBody(required = false) String data) { + // valid + if (JobAdminConfig.getAdminConfig().getAccessToken()!=null + && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0 + && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) { + return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong."); + } + + // param + List callbackParamList = null; + try { + callbackParamList = JacksonUtil.readValue(data, List.class, HandleCallbackParam.class); + } catch (Exception e) { } + if (callbackParamList==null || callbackParamList.size()==0) { + return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid."); + } + + // invoke + return adminBiz.callback(callbackParamList); + } + + /** + * callback + * + * @param data + * @return + */ + @RequestMapping("/processCallback") + public ReturnT processCallback(HttpServletRequest request, @RequestBody(required = false) String data) { + // valid + if (JobAdminConfig.getAdminConfig().getAccessToken()!=null + && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0 + && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) { + return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong."); + } + + // param + List callbackParamList = null; + try { + callbackParamList = JacksonUtil.readValue(data, List.class, HandleProcessCallbackParam.class); + } catch (Exception e) { } + if (callbackParamList==null || callbackParamList.size()==0) { + return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid."); + } + + // invoke + return adminBiz.processCallback(callbackParamList); + } + + + + /** + * registry + * + * @param data + * @return + */ + @RequestMapping("/registry") + public ReturnT registry(HttpServletRequest request, @RequestBody(required = false) String data) { + // valid + if (JobAdminConfig.getAdminConfig().getAccessToken()!=null + && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0 + && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) { + return new ReturnT(ReturnT.FAIL_CODE, "The access token is wrong."); + } + + // param + RegistryParam registryParam = null; + try { + registryParam = JacksonUtil.readValue(data, RegistryParam.class); + } catch (Exception e) {} + if (registryParam == null) { + return new ReturnT(ReturnT.FAIL_CODE, "The request data invalid."); + } + + // invoke + return adminBiz.registry(registryParam); + } + + /** + * registry remove + * + * @param data + * @return + */ + @RequestMapping("/registryRemove") + public ReturnT registryRemove(HttpServletRequest request, @RequestBody(required = false) String data) { + // valid + if (JobAdminConfig.getAdminConfig().getAccessToken()!=null + && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0 + && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) { + return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong."); + } + + // param + RegistryParam registryParam = null; + try { + registryParam = JacksonUtil.readValue(data, RegistryParam.class); + } catch (Exception e) {} + if (registryParam == null) { + return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid."); + } + + // invoke + return adminBiz.registryRemove(registryParam); + } + + +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobCodeController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobCodeController.java new file mode 100644 index 0000000..23aba96 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobCodeController.java @@ -0,0 +1,76 @@ +package com.czsj.web.controller.bigdata; + +import com.ruoshui.bigdata.core.util.I18nUtil; +import com.ruoshui.bigdata.entity.JobInfo; +import com.ruoshui.bigdata.entity.JobLogGlue; +import com.ruoshui.bigdata.mapper.JobInfoMapper; +import com.ruoshui.bigdata.mapper.JobLogGlueMapper; +import com.ruoshui.core.biz.model.ReturnT; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.ui.Model; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import java.util.Date; + +import static com.ruoshui.core.biz.model.ReturnT.FAIL_CODE; + + +/** + * Created by jingwk on 2019/11/17 + */ +@RestController +@RequestMapping("/jobcode") +@Api(tags = "任务状态接口") +public class JobCodeController { + + @Autowired + private JobInfoMapper jobInfoMapper; + @Autowired + private JobLogGlueMapper jobLogGlueMapper; + + + @RequestMapping(value = "/save", method = RequestMethod.POST) + @ApiOperation("保存任务状态") + public ReturnT save(Model model, int id, String glueSource, String glueRemark) { + // valid + if (glueRemark == null) { + return new ReturnT<>(FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark"))); + } + if (glueRemark.length() < 4 || glueRemark.length() > 100) { + return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_remark_limit")); + } + JobInfo existsJobInfo = jobInfoMapper.loadById(id); + if (existsJobInfo == null) { + return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_jobid_invalid")); + } + + // update new code + existsJobInfo.setGlueSource(glueSource); + existsJobInfo.setGlueRemark(glueRemark); + existsJobInfo.setGlueUpdatetime(new Date()); + + existsJobInfo.setUpdateTime(new Date()); + jobInfoMapper.update(existsJobInfo); + + // log old code + JobLogGlue jobLogGlue = new JobLogGlue(); + jobLogGlue.setJobId(existsJobInfo.getId()); + jobLogGlue.setGlueType(existsJobInfo.getGlueType()); + jobLogGlue.setGlueSource(glueSource); + jobLogGlue.setGlueRemark(glueRemark); + + jobLogGlue.setAddTime(new Date()); + jobLogGlue.setUpdateTime(new Date()); + jobLogGlueMapper.save(jobLogGlue); + + // remove code backup more than 30 + jobLogGlueMapper.removeOld(existsJobInfo.getId(), 30); + + return ReturnT.SUCCESS; + } + +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobDatasourceController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobDatasourceController.java new file mode 100644 index 0000000..55a3c3a --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobDatasourceController.java @@ -0,0 +1,140 @@ +package com.czsj.web.controller.bigdata; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.api.R; +import com.ruoshui.bigdata.core.util.LocalCacheUtil; +import com.ruoshui.bigdata.entity.JobDatasource; +import com.ruoshui.bigdata.service.JobDatasourceService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import java.io.IOException; +import java.io.Serializable; +import java.util.List; + +/** + * jdbc数据源配置控制器层 + * + * @author zhouhongfa@gz-yibo.com + * @version v1.0 + * @since 2019-07-30 + */ +@RestController +@RequestMapping("/api/jobJdbcDatasource") +@Api(tags = "jdbc数据源配置接口") +public class JobDatasourceController extends BaseController { + /** + * 服务对象 + */ + @Autowired + private JobDatasourceService jobJdbcDatasourceService; + + /** + * 分页查询所有数据 + * + * @return 所有数据 + */ + @GetMapping + @ApiOperation("分页查询所有数据") + @ApiImplicitParams( + {@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true), + @ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true), + @ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"), + @ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"), + @ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔") + }) + @PreAuthorize("@ss.hasPermi('datax:Datasource:list')") + public R> selectAll() { + BaseForm form = new BaseForm(); + QueryWrapper query = (QueryWrapper) form.pageQueryWrapperCustom(form.getParameters(), new QueryWrapper()); + return success(jobJdbcDatasourceService.page(form.getPlusPagingQueryEntity(), query)); + } + + /** + * 获取所有数据源 + * @return + */ + @ApiOperation("获取所有数据源") + @GetMapping("/all") + @PreAuthorize("@ss.hasPermi('datax:Datasource:query')") + public R> selectAllDatasource() { + return success(this.jobJdbcDatasourceService.selectAllDatasource()); + } + + /** + * 通过主键查询单条数据 + * + * @param id 主键 + * @return 单条数据 + */ + @ApiOperation("通过主键查询单条数据") + @GetMapping("{id}") + @PreAuthorize("@ss.hasPermi('datax:Datasource:query')") + public R selectOne(@PathVariable Serializable id) { + return success(this.jobJdbcDatasourceService.getById(id)); + } + + /** + * 新增数据 + * + * @param entity 实体对象 + * @return 新增结果 + */ + @ApiOperation("新增数据") + @PostMapping + @PreAuthorize("@ss.hasPermi('datax:Datasource:add')") + public R insert(@RequestBody JobDatasource entity) { + return success(this.jobJdbcDatasourceService.save(entity)); + } + + /** + * 修改数据 + * + * @param entity 实体对象 + * @return 修改结果 + */ + @PutMapping + @ApiOperation("修改数据") + @PreAuthorize("@ss.hasPermi('datax:Datasource:edit')") + public R update(@RequestBody JobDatasource entity) { + LocalCacheUtil.remove(entity.getDatasourceName()); + JobDatasource d = jobJdbcDatasourceService.getById(entity.getId()); + if (null != d.getJdbcUsername() && entity.getJdbcUsername().equals(d.getJdbcUsername())) { + entity.setJdbcUsername(null); + } + if (null != entity.getJdbcPassword() && entity.getJdbcPassword().equals(d.getJdbcPassword())) { + entity.setJdbcPassword(null); + } + return success(this.jobJdbcDatasourceService.updateById(entity)); + } + + /** + * 删除数据 + * + * @param idList 主键结合 + * @return 删除结果 + */ + @DeleteMapping + @ApiOperation("删除数据") + @PreAuthorize("@ss.hasPermi('datax:Datasource:remove')") + public R delete(@RequestParam("idList") List idList) { + return success(this.jobJdbcDatasourceService.removeByIds(idList)); + } + + /** + * 测试数据源 + * @param jobJdbcDatasource + * @return + */ + @PostMapping("/test") + @ApiOperation("测试数据") + public R dataSourceTest (@RequestBody JobDatasource jobJdbcDatasource) throws IOException { + return success(jobJdbcDatasourceService.dataSourceTest(jobJdbcDatasource)); + } +} \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobGroupController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobGroupController.java new file mode 100644 index 0000000..37b54ee --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobGroupController.java @@ -0,0 +1,179 @@ +package com.czsj.web.controller.bigdata; + +import com.ruoshui.bigdata.core.util.I18nUtil; +import com.ruoshui.bigdata.entity.JobGroup; +import com.ruoshui.bigdata.entity.JobRegistry; +import com.ruoshui.bigdata.mapper.JobGroupMapper; +import com.ruoshui.bigdata.mapper.JobInfoMapper; +import com.ruoshui.bigdata.mapper.JobRegistryMapper; +import com.ruoshui.core.biz.model.ReturnT; +import com.ruoshui.core.enums.RegistryConfig; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import java.util.*; + +/** + * Created by jingwk on 2019/11/17 + */ +@RestController +@RequestMapping("/api/jobGroup") +@Api(tags = "执行器管理接口") +public class JobGroupController { + + @Autowired + public JobInfoMapper jobInfoMapper; + @Autowired + public JobGroupMapper jobGroupMapper; + @Autowired + private JobRegistryMapper jobRegistryMapper; + + @GetMapping("/list") + @ApiOperation("执行器列表") + @PreAuthorize("@ss.hasPermi('datax:executor:list')") + public ReturnT> getExecutorList() { + return new ReturnT<>(jobGroupMapper.findAll()); + } + + @PostMapping("/save") + @ApiOperation("新建执行器") + @PreAuthorize("@ss.hasPermi('datax:executor:add')") + public ReturnT save(@RequestBody JobGroup jobGroup) { + + // valid + if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) { + return new ReturnT(500, (I18nUtil.getString("system_please_input") + "AppName")); + } + if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) { + return new ReturnT(500, I18nUtil.getString("jobgroup_field_appName_length")); + } + if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) { + return new ReturnT(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title"))); + } + if (jobGroup.getAddressType() != 0) { + if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) { + return new ReturnT(500, I18nUtil.getString("jobgroup_field_addressType_limit")); + } + String[] addresses = jobGroup.getAddressList().split(","); + for (String item : addresses) { + if (item == null || item.trim().length() == 0) { + return new ReturnT(500, I18nUtil.getString("jobgroup_field_registryList_invalid")); + } + } + } + + int ret = jobGroupMapper.save(jobGroup); + return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL; + } + + @PostMapping("/update") + @ApiOperation("更新执行器") + @PreAuthorize("@ss.hasPermi('datax:executor:edit')") + public ReturnT update(@RequestBody JobGroup jobGroup) { + // valid + if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) { + return new ReturnT(500, (I18nUtil.getString("system_please_input") + "AppName")); + } + if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) { + return new ReturnT(500, I18nUtil.getString("jobgroup_field_appName_length")); + } + if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) { + return new ReturnT(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title"))); + } + if (jobGroup.getAddressType() == 0) { + // 0=自动注册 + List registryList = findRegistryByAppName(jobGroup.getAppName()); + String addressListStr = null; + if (registryList != null && !registryList.isEmpty()) { + Collections.sort(registryList); + addressListStr = ""; + for (String item : registryList) { + addressListStr += item + ","; + } + addressListStr = addressListStr.substring(0, addressListStr.length() - 1); + } + jobGroup.setAddressList(addressListStr); + } else { + // 1=手动录入 + if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) { + return new ReturnT(500, I18nUtil.getString("jobgroup_field_addressType_limit")); + } + String[] addresses = jobGroup.getAddressList().split(","); + for (String item : addresses) { + if (item == null || item.trim().length() == 0) { + return new ReturnT(500, I18nUtil.getString("jobgroup_field_registryList_invalid")); + } + } + } + + int ret = jobGroupMapper.update(jobGroup); + return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL; + } + + private List findRegistryByAppName(String appNameParam) { + HashMap> appAddressMap = new HashMap<>(); + List list = jobRegistryMapper.findAll(RegistryConfig.DEAD_TIMEOUT, new Date()); + if (list != null) { + for (JobRegistry item : list) { + if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) { + String appName = item.getRegistryKey(); + List registryList = appAddressMap.get(appName); + if (registryList == null) { + registryList = new ArrayList<>(); + } + + if (!registryList.contains(item.getRegistryValue())) { + registryList.add(item.getRegistryValue()); + } + appAddressMap.put(appName, registryList); + } + } + } + return appAddressMap.get(appNameParam); + } + + @PostMapping("/remove") + @ApiOperation("移除执行器") + @PreAuthorize("@ss.hasPermi('datax:executor:remove')") + public ReturnT remove(int id) { + + // valid + int count = jobInfoMapper.pageListCount(0, 10, id, -1, null, null, 0,null); + if (count > 0) { + return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_0")); + } + + List allList = jobGroupMapper.findAll(); + if (allList.size() == 1) { + return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_1")); + } + + int ret = jobGroupMapper.remove(id); + return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL; + } + + @RequestMapping(value = "/loadById", method = RequestMethod.POST) + @ApiOperation("根据id获取执行器") + @PreAuthorize("@ss.hasPermi('datax:executor:query')") + public ReturnT loadById(int id) { + JobGroup jobGroup = jobGroupMapper.load(id); + return jobGroup != null ? new ReturnT<>(jobGroup) : new ReturnT<>(ReturnT.FAIL_CODE, null); + } + + @GetMapping("/query") + @ApiOperation("查询执行器") + @PreAuthorize("@ss.hasPermi('datax:executor:query')") + public ReturnT> get(@ApiParam(value = "执行器AppName") + @RequestParam(value = "appName", required = false) String appName, + @ApiParam(value = "执行器名称") + @RequestParam(value = "title", required = false) String title, + @ApiParam(value = "执行器地址列表") + @RequestParam(value = "addressList", required = false) String addressList) { + return new ReturnT<>(jobGroupMapper.find(appName, title, addressList)); + } + +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobInfoController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobInfoController.java new file mode 100644 index 0000000..ce304e2 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobInfoController.java @@ -0,0 +1,138 @@ +package com.czsj.web.controller.bigdata; + + +import com.ruoshui.bigdata.core.cron.CronExpression; +import com.ruoshui.bigdata.core.thread.JobTriggerPoolHelper; +import com.ruoshui.bigdata.core.trigger.TriggerTypeEnum; +import com.ruoshui.bigdata.core.util.I18nUtil; +import com.ruoshui.bigdata.dto.DataXBatchJsonBuildDto; +import com.ruoshui.bigdata.dto.TriggerJobDto; +import com.ruoshui.bigdata.entity.JobInfo; +import com.ruoshui.bigdata.service.JobService; +import com.ruoshui.core.biz.model.ReturnT; +import com.ruoshui.core.util.DateUtil; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + +/** + * index controller + * + * @author xuxueli 2015-12-19 16:13:16 + */ +@Api(tags = "任务配置接口") +@RestController +@RequestMapping("/api/job") +public class JobInfoController extends BaseController{ + + @Autowired + private JobService jobService; + + + @GetMapping("/pageList") + @ApiOperation("任务列表") + @PreAuthorize("@ss.hasPermi('datax:job:list')") + public ReturnT> pageList(@RequestParam(required = false, defaultValue = "0") int current, + @RequestParam(required = false, defaultValue = "10") int size, + int jobGroup, int triggerStatus, String jobDesc, String glueType, Integer[] projectIds) { + + return new ReturnT<>(jobService.pageList((current-1)*size, size, jobGroup, triggerStatus, jobDesc, glueType, 0, projectIds)); + } + + @GetMapping("/list") + @ApiOperation("全部任务列表") + @PreAuthorize("@ss.hasPermi('datax:job:query')") + public ReturnT> list(){ + return new ReturnT<>(jobService.list()); + } + + @PostMapping("/add") + @ApiOperation("添加任务") + @PreAuthorize("@ss.hasPermi('datax:job:add')") + public ReturnT add(HttpServletRequest request, @RequestBody JobInfo jobInfo) { + jobInfo.setUserId(getUserId()); + return jobService.add(jobInfo); + } + + @PostMapping("/update") + @ApiOperation("更新任务") + @PreAuthorize("@ss.hasPermi('datax:job:edit')") + public ReturnT update(HttpServletRequest request,@RequestBody JobInfo jobInfo) { + jobInfo.setUserId(getUserId()); + return jobService.update(jobInfo); + } + + @PostMapping(value = "/remove/{id}") + @ApiOperation("移除任务") + @PreAuthorize("@ss.hasPermi('datax:job:remove')") + public ReturnT remove(@PathVariable(value = "id") int id) { + return jobService.remove(id); + } + + @RequestMapping(value = "/stop",method = RequestMethod.POST) + @ApiOperation("停止任务") + @PreAuthorize("@ss.hasPermi('datax:job:startorstop')") + public ReturnT pause(int id) { + return jobService.stop(id); + } + + @RequestMapping(value = "/start",method = RequestMethod.POST) + @ApiOperation("开启任务") + @PreAuthorize("@ss.hasPermi('datax:job:startorstop')") + public ReturnT start(int id) { + return jobService.start(id); + } + + @PostMapping(value = "/trigger") + @ApiOperation("触发任务") + @PreAuthorize("@ss.hasPermi('datax:job:trigger')") + public ReturnT triggerJob(@RequestBody TriggerJobDto dto) { + // force cover job param + String executorParam=dto.getExecutorParam(); + if (executorParam == null) { + executorParam = ""; + } + JobTriggerPoolHelper.trigger(dto.getJobId(), TriggerTypeEnum.MANUAL, -1, null, executorParam); + return ReturnT.SUCCESS; + } + + @GetMapping("/nextTriggerTime") + @ApiOperation("获取近5次触发时间") + public ReturnT> nextTriggerTime(String cron) { + List result = new ArrayList<>(); + try { + CronExpression cronExpression = new CronExpression(cron); + Date lastTime = new Date(); + for (int i = 0; i < 5; i++) { + lastTime = cronExpression.getNextValidTimeAfter(lastTime); + if (lastTime != null) { + result.add(DateUtil.formatDateTime(lastTime)); + } else { + break; + } + } + } catch (ParseException e) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid")); + } + return new ReturnT<>(result); + } + + @PostMapping("/batchAdd") + @ApiOperation("批量创建任务") + public ReturnT batchAdd(@RequestBody DataXBatchJsonBuildDto dto) throws IOException { + if (dto.getTemplateId() ==0) { + return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_temp"))); + } + return jobService.batchAdd(dto); + } +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobLogController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobLogController.java new file mode 100644 index 0000000..842b499 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobLogController.java @@ -0,0 +1,177 @@ +package com.czsj.web.controller.bigdata; + + +import com.ruoshui.bigdata.core.kill.KillJob; +import com.ruoshui.bigdata.core.scheduler.JobScheduler; +import com.ruoshui.bigdata.core.util.I18nUtil; +import com.ruoshui.bigdata.entity.JobInfo; +import com.ruoshui.bigdata.entity.JobLog; +import com.ruoshui.bigdata.mapper.JobInfoMapper; +import com.ruoshui.bigdata.mapper.JobLogMapper; +import com.ruoshui.core.biz.ExecutorBiz; +import com.ruoshui.core.biz.model.LogResult; +import com.ruoshui.core.biz.model.ReturnT; +import com.ruoshui.core.util.DateUtil; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Created by jingwk on 2019/11/17 + */ +@RestController +@RequestMapping("/api/log") +@Api(tags = "任务运行日志接口") +public class JobLogController { + private static Logger logger = LoggerFactory.getLogger(JobLogController.class); + + @Autowired + public JobInfoMapper jobInfoMapper; + @Autowired + public JobLogMapper jobLogMapper; + + @GetMapping("/pageList") + @ApiOperation("运行日志列表") + @PreAuthorize("@ss.hasPermi('datax:joblog:list')") + public ReturnT> pageList( + @RequestParam(required = false, defaultValue = "0") int current, + @RequestParam(required = false, defaultValue = "10") int size, + int jobGroup, int jobId, int logStatus, String filterTime) { + + // parse param + Date triggerTimeStart = null; + Date triggerTimeEnd = null; + if (filterTime != null && filterTime.trim().length() > 0) { + String[] temp = filterTime.split(" - "); + if (temp.length == 2) { + triggerTimeStart = DateUtil.parseDateTime(temp[0]); + triggerTimeEnd = DateUtil.parseDateTime(temp[1]); + } + } + + // page query + List data = jobLogMapper.pageList((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus); + int cnt = jobLogMapper.pageListCount((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus); + + // package result + Map maps = new HashMap<>(); + maps.put("recordsTotal", cnt); // 总记录数 + maps.put("recordsFiltered", cnt); // 过滤后的总记录数 + maps.put("data", data); // 分页列表 + return new ReturnT<>(maps); + } + + @RequestMapping(value = "/logDetailCat", method = RequestMethod.GET) + @ApiOperation("运行日志详情") + @PreAuthorize("@ss.hasPermi('datax:joblog:logDetailCat')") + public ReturnT logDetailCat(String executorAddress, long triggerTime, long logId, int fromLineNum) { + try { + ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(executorAddress); + ReturnT logResult = executorBiz.log(triggerTime, logId, fromLineNum); + + // is end + if (logResult.getContent() != null && fromLineNum > logResult.getContent().getToLineNum()) { + JobLog jobLog = jobLogMapper.load(logId); + if (jobLog.getHandleCode() > 0) { + logResult.getContent().setEnd(true); + } + } + + return logResult; + } catch (Exception e) { + logger.error(e.getMessage(), e); + return new ReturnT<>(ReturnT.FAIL_CODE, e.getMessage()); + } + } + + @RequestMapping(value = "/logKill", method = RequestMethod.POST) + @ApiOperation("kill任务") + @PreAuthorize("@ss.hasPermi('datax:joblog:killJob')") + public ReturnT logKill(int id) { + // base check + JobLog log = jobLogMapper.load(id); + JobInfo jobInfo = jobInfoMapper.loadById(log.getJobId()); + if (jobInfo == null) { + return new ReturnT<>(500, I18nUtil.getString("jobinfo_glue_jobid_invalid")); + } + if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) { + return new ReturnT<>(500, I18nUtil.getString("joblog_kill_log_limit")); + } + + // request of kill + ReturnT runResult; + try { + ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(log.getExecutorAddress()); + runResult = executorBiz.kill(jobInfo.getId()); + } catch (Exception e) { + logger.error(e.getMessage(), e); + runResult = new ReturnT<>(500, e.getMessage()); + } + + if (ReturnT.SUCCESS_CODE == runResult.getCode()) { + log.setHandleCode(ReturnT.FAIL_CODE); + log.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : "")); + log.setHandleTime(new Date()); + jobLogMapper.updateHandleInfo(log); + return new ReturnT<>(runResult.getMsg()); + } else { + return new ReturnT<>(500, runResult.getMsg()); + } + } + + @PostMapping("/clearLog") + @ApiOperation("清理日志") + @PreAuthorize("@ss.hasPermi('datax:joblog:clearLog')") + public ReturnT clearLog(int jobGroup, int jobId, int type) { + + Date clearBeforeTime = null; + int clearBeforeNum = 0; + if (type == 1) { + clearBeforeTime = DateUtil.addMonths(new Date(), -1); // 清理一个月之前日志数据 + } else if (type == 2) { + clearBeforeTime = DateUtil.addMonths(new Date(), -3); // 清理三个月之前日志数据 + } else if (type == 3) { + clearBeforeTime = DateUtil.addMonths(new Date(), -6); // 清理六个月之前日志数据 + } else if (type == 4) { + clearBeforeTime = DateUtil.addYears(new Date(), -1); // 清理一年之前日志数据 + } else if (type == 5) { + clearBeforeNum = 1000; // 清理一千条以前日志数据 + } else if (type == 6) { + clearBeforeNum = 10000; // 清理一万条以前日志数据 + } else if (type == 7) { + clearBeforeNum = 30000; // 清理三万条以前日志数据 + } else if (type == 8) { + clearBeforeNum = 100000; // 清理十万条以前日志数据 + } else if (type == 9) { + clearBeforeNum = 0; // 清理所有日志数据 + } else { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_invalid")); + } + + List logIds; + do { + logIds = jobLogMapper.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000); + if (logIds != null && logIds.size() > 0) { + jobLogMapper.clearLog(logIds); + } + } while (logIds != null && logIds.size() > 0); + + return ReturnT.SUCCESS; + } + + @ApiOperation("停止该job作业") + @PostMapping("/killJob") + @PreAuthorize("@ss.hasPermi('datax:joblog:killJob')") + public ReturnT killJob(@RequestBody JobLog log) { + return KillJob.trigger(log.getId(), log.getTriggerTime(), log.getExecutorAddress(), log.getProcessId()); + } +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobProjectController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobProjectController.java new file mode 100644 index 0000000..fd0c6e2 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobProjectController.java @@ -0,0 +1,117 @@ +package com.czsj.web.controller.bigdata; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.api.R; +import com.ruoshui.bigdata.entity.JobProject; +import com.ruoshui.bigdata.service.JobProjectService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.io.Serializable; +import java.util.List; + +/** + * project manage controller + * + * @author jingwk 2022-05-24 16:13:16 + */ +@RestController +@RequestMapping("/api/jobProject") +@Api(tags = "项目管理模块") +public class JobProjectController extends BaseController { + + @Autowired + private JobProjectService jobProjectService; + + + /** + * 分页查询所有数据 + * + * @return 所有数据 + */ + @GetMapping + @ApiOperation("分页查询所有数据") + @PreAuthorize("@ss.hasPermi('datax:jobProject:list')") + public R> selectAll(@RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize, + @RequestParam("pageNo") Integer pageNo) { + + return success(jobProjectService.getProjectListPaging(pageSize, pageNo, searchVal)); + } + + /** + * Get all project + * + * @return + */ + @ApiOperation("获取所有数据") + @GetMapping("/list") + @PreAuthorize("@ss.hasPermi('datax:jobProject:query')") + public R> selectList() { + QueryWrapper query = new QueryWrapper(); + query.eq("flag", true); + return success(jobProjectService.list(query)); + } + + /** + * 通过主键查询单条数据 + * + * @param id 主键 + * @return 单条数据 + */ + @ApiOperation("通过主键查询单条数据") + @GetMapping("{id}") + @PreAuthorize("@ss.hasPermi('datax:jobProject:query')") + public R selectOne(@PathVariable Serializable id) { + return success(this.jobProjectService.getById(id)); + } + + /** + * 新增数据 + * + * @param entity 实体对象 + * @return 新增结果 + */ + @ApiOperation("新增数据") + @PostMapping + @PreAuthorize("@ss.hasPermi('datax:jobProject:add')") + public R insert(HttpServletRequest request, @RequestBody JobProject entity) { + entity.setUserId(getUserId()); + return success(this.jobProjectService.save(entity)); + } + + + /** + * 修改数据 + * + * @param entity 实体对象 + * @return 修改结果 + */ + @PutMapping + @ApiOperation("修改数据") + @PreAuthorize("@ss.hasPermi('datax:jobProject:edit')") + public R update(@RequestBody JobProject entity) { + JobProject project = jobProjectService.getById(entity.getId()); + project.setName(entity.getName()); + project.setDescription(entity.getDescription()); + return success(this.jobProjectService.updateById(entity)); + } + + /** + * 删除数据 + * + * @param idList 主键结合 + * @return 删除结果 + */ + @DeleteMapping + @ApiOperation("删除数据") + @PreAuthorize("@ss.hasPermi('datax:jobProject:remove')") + public R delete(@RequestParam("idList") List idList) { + return success(this.jobProjectService.removeByIds(idList)); + } +} \ No newline at end of file diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobRegistryController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobRegistryController.java new file mode 100644 index 0000000..54b4c65 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobRegistryController.java @@ -0,0 +1,90 @@ +package com.czsj.web.controller.bigdata; + +import cn.hutool.core.util.StrUtil; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.api.R; +import com.ruoshui.bigdata.entity.JobRegistry; +import com.ruoshui.bigdata.service.JobRegistryService; +import com.ruoshui.bigdata.util.PageUtils; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import java.util.Map; + +/** + * Created by jingwk on 2019/11/17 + */ +@RestController +@RequestMapping("/api/jobRegistry") +@Api(tags = "执行器资源监控") +public class JobRegistryController extends BaseController { + + @Autowired + private JobRegistryService jobRegistryService; + + /** + * 分页查询所有数据 + * + * @return 所有数据 + */ + @GetMapping + @ApiOperation("分页查询所有数据") + @ApiImplicitParams( + {@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true), + @ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true), + @ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"), + @ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"), + @ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔") + }) + public R> selectAll() { + BaseForm baseForm = new BaseForm(); + return success(this.jobRegistryService.page(baseForm.getPlusPagingQueryEntity(), pageQueryWrapperCustom(baseForm.getParameters()))); + } + + /** + * 自定义查询组装 + * + * @param map + * @return + */ + protected QueryWrapper pageQueryWrapperCustom(Map map) { + // mybatis plus 分页相关的参数 + Map pageHelperParams = PageUtils.filterPageParams(map); + //过滤空值,分页查询相关的参数 + Map columnQueryMap = PageUtils.filterColumnQueryParams(map); + + QueryWrapper queryWrapper = new QueryWrapper<>(); + + //排序 操作 + pageHelperParams.forEach((k, v) -> { + switch (k) { + case "ascs": + queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v))); + break; + case "descs": + queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v))); + break; + } + }); + + //遍历进行字段查询条件组装 + columnQueryMap.forEach((k, v) -> { + switch (k) { + case "datasourceName": + queryWrapper.like(StrUtil.toUnderlineCase(k), v); + break; + default: + queryWrapper.eq(StrUtil.toUnderlineCase(k), v); + } + }); + + return queryWrapper; + } +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobTemplateController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobTemplateController.java new file mode 100644 index 0000000..28e808c --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/JobTemplateController.java @@ -0,0 +1,89 @@ +package com.czsj.web.controller.bigdata; + + +import com.ruoshui.bigdata.core.cron.CronExpression; +import com.ruoshui.bigdata.core.util.I18nUtil; +import com.ruoshui.bigdata.entity.JobTemplate; +import com.ruoshui.bigdata.service.JobTemplateService; +import com.ruoshui.core.biz.model.ReturnT; +import com.ruoshui.core.util.DateUtil; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + +/** + * template controller + * + * @author jingwk 2019-12-22 16:13:16 + */ +@Api(tags = "任务配置接口") +@RestController +@RequestMapping("/api/jobTemplate") +public class JobTemplateController extends BaseController{ + + @Autowired + private JobTemplateService jobTemplateService; + + @GetMapping("/pageList") + @ApiOperation("任务模板列表") + @PreAuthorize("@ss.hasPermi('datax:jobTemplate:list')") + public ReturnT> pageList(@RequestParam(required = false, defaultValue = "0") int current, + @RequestParam(required = false, defaultValue = "10") int size, + int jobGroup, String jobDesc, String executorHandler, int userId, Integer[] projectIds) { + + return new ReturnT<>(jobTemplateService.pageList((current-1)*size, size, jobGroup, jobDesc, executorHandler, userId, projectIds)); + } + + @PostMapping("/add") + @ApiOperation("添加任务模板") + @PreAuthorize("@ss.hasPermi('datax:jobTemplate:add')") + public ReturnT add(HttpServletRequest request, @RequestBody JobTemplate jobTemplate) { + jobTemplate.setUserId(getUserId()); + return jobTemplateService.add(jobTemplate); + } + + @PostMapping("/update") + @ApiOperation("更新任务") + @PreAuthorize("@ss.hasPermi('datax:jobTemplate:edit')") + public ReturnT update(HttpServletRequest request,@RequestBody JobTemplate jobTemplate) { + jobTemplate.setUserId(getUserId()); + return jobTemplateService.update(jobTemplate); + } + + @PostMapping(value = "/remove/{id}") + @ApiOperation("移除任务模板") + @PreAuthorize("@ss.hasPermi('datax:jobTemplate:remove')") + public ReturnT remove(@PathVariable(value = "id") int id) { + return jobTemplateService.remove(id); + } + + @GetMapping("/nextTriggerTime") + @ApiOperation("获取近5次触发时间") + public ReturnT> nextTriggerTime(String cron) { + List result = new ArrayList<>(); + try { + CronExpression cronExpression = new CronExpression(cron); + Date lastTime = new Date(); + for (int i = 0; i < 5; i++) { + lastTime = cronExpression.getNextValidTimeAfter(lastTime); + if (lastTime != null) { + result.add(DateUtil.formatDateTime(lastTime)); + } else { + break; + } + } + } catch (ParseException e) { + return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid")); + } + return new ReturnT<>(result); + } +} diff --git a/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/MetadataController.java b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/MetadataController.java new file mode 100644 index 0000000..3661964 --- /dev/null +++ b/czsj-admin/src/main/java/com/czsj/web/controller/bigdata/MetadataController.java @@ -0,0 +1,106 @@ +package com.czsj.web.controller.bigdata; + +import com.baomidou.mybatisplus.extension.api.R; +import com.ruoshui.bigdata.service.DatasourceQueryService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; + +/** + * 查询数据库表名,字段的控制器 + * + * @author jingwk + * @ClassName MetadataController + * @Version 2.1.2 + * @since 2022/05/31 20:48 + */ +@RestController +@RequestMapping("api/metadata") +@Api(tags = "jdbc数据库查询控制器") +public class MetadataController extends BaseController { + + @Autowired + private DatasourceQueryService datasourceQueryService; + + /** + * 根据数据源id获取mongo库名 + * + * @param datasourceId + * @return + */ + @GetMapping("/getDBs") + @ApiOperation("根据数据源id获取mongo库名") + public R> getDBs(Long datasourceId) throws IOException { + return success(datasourceQueryService.getDBs(datasourceId)); + } + + + /** + * 根据数据源id,dbname获取CollectionNames + * + * @param datasourceId + * @return + */ + @GetMapping("/collectionNames") + @ApiOperation("根据数据源id,dbname获取CollectionNames") + public R> getCollectionNames(Long datasourceId,String dbName) throws IOException { + return success(datasourceQueryService.getCollectionNames(datasourceId,dbName)); + } + + /** + * 获取PG table schema + * + * @param datasourceId + * @return + */ + @GetMapping("/getDBSchema") + @ApiOperation("根据数据源id获取 db schema") + public R> getTableSchema(Long datasourceId) { + return success(datasourceQueryService.getTableSchema(datasourceId)); + } + + /** + * 根据数据源id获取可用表名 + * + * @param datasourceId + * @return + */ + @GetMapping("/getTables") + @ApiOperation("根据数据源id获取可用表名") + public R> getTableNames(Long datasourceId,String tableSchema) throws IOException { + return success(datasourceQueryService.getTables(datasourceId,tableSchema)); + } + + /** + * 根据数据源id和表名获取所有字段 + * + * @param datasourceId 数据源id + * @param tableName 表名 + * @return + */ + @GetMapping("/getColumns") + @ApiOperation("根据数据源id和表名获取所有字段") + public R> getColumns(Long datasourceId, String tableName) throws IOException { + return success(datasourceQueryService.getColumns(datasourceId, tableName)); + } + + /** + * 根据数据源id和sql语句获取所有字段 + * + * @param datasourceId 数据源id + * @param querySql 表名 + * @return + */ + @GetMapping("/getColumnsByQuerySql") + @ApiOperation("根据数据源id和sql语句获取所有字段") + public R> getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException { + return success(datasourceQueryService.getColumnsByQuerySql(datasourceId, querySql)); + } +} diff --git a/czsj-admin/src/main/lib/HiveJDBC41.jar b/czsj-admin/src/main/lib/HiveJDBC41.jar new file mode 100644 index 0000000..098d8be Binary files /dev/null and b/czsj-admin/src/main/lib/HiveJDBC41.jar differ diff --git a/czsj-admin/src/main/lib/ojdbc6-11.2.0.3.jar b/czsj-admin/src/main/lib/ojdbc6-11.2.0.3.jar new file mode 100644 index 0000000..01da074 Binary files /dev/null and b/czsj-admin/src/main/lib/ojdbc6-11.2.0.3.jar differ diff --git a/czsj-admin/src/main/lib/sql-1.10.0.jar b/czsj-admin/src/main/lib/sql-1.10.0.jar new file mode 100644 index 0000000..a8e609f Binary files /dev/null and b/czsj-admin/src/main/lib/sql-1.10.0.jar differ diff --git a/czsj-admin/src/main/lib/sqljdbc4-4.0.jar b/czsj-admin/src/main/lib/sqljdbc4-4.0.jar new file mode 100644 index 0000000..d6b7f6d Binary files /dev/null and b/czsj-admin/src/main/lib/sqljdbc4-4.0.jar differ diff --git a/czsj-admin/src/main/resources/logging/logback-dev.xml b/czsj-admin/src/main/resources/logging/logback-dev.xml new file mode 100644 index 0000000..d90d2d0 --- /dev/null +++ b/czsj-admin/src/main/resources/logging/logback-dev.xml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - [%logger{0}:%L - %M] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/czsj-admin/src/main/resources/logging/logback-docker.xml b/czsj-admin/src/main/resources/logging/logback-docker.xml new file mode 100644 index 0000000..41ab524 --- /dev/null +++ b/czsj-admin/src/main/resources/logging/logback-docker.xml @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - [%logger{0} - %M] %msg%n + + + + + ${LOG_HOME}/info.log + + INFO + + + + ${LOG_HOME}/old/info.log.%d{yyyy-MM-dd}.%i + + + ${LOG_SIZE} + + + ${LOG_NUM} + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [TxId : %X{PtxId}] %-5level %logger{50} - [%logger{0} - %M] %msg%n + + + + + ${LOG_HOME}/error.log + + + ERROR + ACCEPT + DENY + + + + ${LOG_HOME}/old/error.log.%d{yyyy-MM-dd}.%i + + + ${LOG_SIZE} + + !--日志文件保留天数--> + ${LOG_NUM} + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger - [%logger{0} - %M] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/czsj-admin/src/main/resources/logging/logback-prod.xml b/czsj-admin/src/main/resources/logging/logback-prod.xml new file mode 100644 index 0000000..9e3b548 --- /dev/null +++ b/czsj-admin/src/main/resources/logging/logback-prod.xml @@ -0,0 +1,87 @@ + + + + + + + + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}[%M:%L] %msg%n + + + + + ${LOG_HOME}/info.log + + + + ${LOG_HOME}/old/info.log.%d{yyyy-MM-dd}.%i + + + ${LOG_SIZE} + + + ${LOG_NUM} + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%logger{50} - %M:%L] %msg%n + + + + + ${LOG_HOME}/error.log + + + ERROR + ACCEPT + DENY + + + + ${LOG_HOME}/old/error.log.%d{yyyy-MM-dd}.%i + + + ${LOG_SIZE} + + + ${LOG_NUM} + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%logger - %M:%L] %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/czsj-admin/src/main/resources/templates/api_1.0.0.docx b/czsj-admin/src/main/resources/templates/api_1.0.0.docx new file mode 100644 index 0000000..e681b97 Binary files /dev/null and b/czsj-admin/src/main/resources/templates/api_1.0.0.docx differ diff --git a/czsj-admin/src/main/resources/templates/metadata_1.0.0.doc b/czsj-admin/src/main/resources/templates/metadata_1.0.0.doc new file mode 100644 index 0000000..54cf1d4 --- /dev/null +++ b/czsj-admin/src/main/resources/templates/metadata_1.0.0.doc @@ -0,0 +1,3 @@ + + +数据库表结构文档数据库名: MERGEFIELD database \* MERGEFORMAT «database»文档版本:1.0.0文档描述:数据库 MERGEFIELD database \* MERGEFORMAT «database»详细设计文档 MERGEFIELD TableStart:TableList \* MERGEFORMAT «TableStart:TableList»名: MERGEFIELD tableName \* MERGEFORMAT «tableName»说明:«tableComment» 数据列:名称数据类型长度精度小数位允许空值主键默认值说明 MERGEFIELD TableStart:ColumnList \* MERGEFORMAT «TableStart:ColumnList» MERGEFIELD columnPosition \* MERGEFORMAT «columnPosition» MERGEFIELD columnName \* MERGEFORMAT «columnName» MERGEFIELD dataType \* MERGEFORMAT «dataType» MERGEFIELD dataLength \* MERGEFORMAT «dataLength» MERGEFIELD dataPrecision \* MERGEFORMAT «dataPrecision» MERGEFIELD dataScale \* MERGEFORMAT «dataScale» MERGEFIELD columnNullable \* MERGEFORMAT «columnNullable» MERGEFIELD columnKey \* MERGEFORMAT «columnKey» MERGEFIELD dataDefault \* MERGEFORMAT «dataDefault» MERGEFIELD columnComment \* MERGEFORMAT «columnComment» MERGEFIELD TableEnd:ColumnList \* MERGEFORMAT «TableEnd:ColumnList» MERGEFIELD TableEnd:TableList \* MERGEFORMAT «TableEnd:TableList»2052-11.1.0.9740数据库表结构文档screwAdministrator62020-07-30T03:20:00Z2020-07-30T07:41:00Z11811771010Microsoft Office Word082falsefalse1185falsefalse16.0000 \ No newline at end of file