【feat】基于若水,实现admin模块的整合

This commit is contained in:
Kris 2024-12-31 10:38:53 +08:00
parent 40d81a8dfe
commit 08fb14d3b8
33 changed files with 2740 additions and 0 deletions

View File

@ -0,0 +1,49 @@
<assembly>
<id>dist</id>
<formats>
<format>dir</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
<directory>${basedir}/src/main/bin</directory>
<lineEnding>unix</lineEnding>
<outputDirectory>service</outputDirectory>
<includes>
<include>*.service</include>
</includes>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/bin</directory>
<lineEnding>unix</lineEnding>
<outputDirectory/>
<fileMode>755</fileMode>
<includes>
<include>*.sh</include>
</includes>
</fileSet>
<fileSet>
<directory>${basedir}/src/main/bin</directory>
<lineEnding>windows</lineEnding>
<outputDirectory/>
<includes>
<include>*.bat</include>
</includes>
</fileSet>
<fileSet>
<directory>target/classes</directory>
<outputDirectory>config</outputDirectory>
<includes>
<include>**/*.yml</include>
<include>**/*.xml</include>
</includes>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<outputDirectory>lib</outputDirectory>
<unpack>false</unpack>
</dependencySet>
</dependencySets>
</assembly>

View File

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>dist</id>
<formats>
<format>tar.gz</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<dependencySets>
<dependencySet>
<useProjectArtifact>true</useProjectArtifact>
<outputDirectory>admin/lib</outputDirectory>
<scope>runtime</scope>
</dependencySet>
</dependencySets>
<fileSets>
<fileSet>
<lineEnding>unix</lineEnding>
<directory>./src/main/bin</directory>
<outputDirectory>admin/bin</outputDirectory>
<includes>
<include>**/*</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
<directory>./src/main/logs</directory>
<outputDirectory>admin/logs</outputDirectory>
</fileSet>
<fileSet>
<directory>./src/main/resources</directory>
<includes>
<include>*.properties</include>
<include>logback.xml</include>
<include>application.yml</include>
<include>mapper/**/**</include>
<include>i18n/**</include>
<include>static/**</include>
</includes>
<outputDirectory>admin/conf</outputDirectory>
</fileSet>
<fileSet>
<directory>src/main/lib</directory>
<outputDirectory>admin/lib</outputDirectory>
</fileSet>
</fileSets>
</assembly>

View File

@ -0,0 +1,9 @@
@echo off
set home=%~dp0
set conf_dir=%home%..\conf
set lib_dir=%home%..\lib\*
set log_dir=%home%..\logs
java -Dspring.profiles.active=standalone -Dlogging.file=%log_dir%\dbApi.log -classpath %conf_dir%;%lib_dir% com.larkmidtable.admin.AdminApplication
pause

View File

@ -0,0 +1,266 @@
#!/bin/bash
#
FRIEND_NAME=ADMIN
MAIN_CLASS=com.larkmidtable.admin.AdminApplication
if [ ! ${ENV_FILE} ]; then
ENV_FILE="env.properties"
fi
SLEEP_TIMEREVAL_S=2
abs_path(){
SOURCE="${BASH_SOURCE[0]}"
while [ -h "${SOURCE}" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "${SOURCE}")"
[[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
done
echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
}
function LOG(){
currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
}
verify_java_env(){
if [ "x${JAVA_HOME}" != "x" ]; then
${JAVA_HOME}/bin/java -version >/dev/null 2>&1
else
java -version >/dev/null 2>&1
fi
if [ $? -ne 0 ]; then
cat 1>&2 <<EOF
+========================================================================+
| Error: Java Environment is not availiable, Please check your JAVA_HOME |
+------------------------------------------------------------------------+
EOF
return 1
fi
return 0
}
load_env(){
LOG INFO "load environment variables"
while read line
do
if [[ ! -z $(echo "${line}" | grep "=") ]]; then
key=${line%%=*}
value=${line#*=}
key1=$(echo ${key} | tr '.' '_')
if [ -z $(echo "${key1}" | grep -P '\s*#+.*') ]; then
eval "${key1}=${value}"
fi
fi
done < "${BIN}/${ENV_FILE}"
}
BIN=`abs_path`
SHELL_LOG="${BIN}/console.out"
load_env
#verify environment
verify_java_env
if [ $? -ne 0 ]; then
exit $?
fi
if [[ ! ${SERVICE_LOG_PATH} ]]; then
SERVICE_LOG_PATH=${BIN}/../logs
fi
if [[ ! ${SERVICE_CONF_PATH} ]]; then
SERVICE_CONF_PATH=${BIN}/../conf
fi
if [[ ! ${DATA_PATH} ]]; then
DATA_PATH=${BIN}/../data
fi
if [[ ! ${MAIL_USERNAME} ]]; then
MAIL_USERNAME="flinkx"
fi
if [[ ! ${MAIL_PASSWORD} ]]; then
MAIL_PASSWORD="123456"
fi
if [[ ! ${JAVA_OPTS} ]]; then
JAVA_OPTS=" -Xms2g -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8"
fi
if [[ ! ${REMOTE_DEBUG_SWITCH} ]]; then
REMOTE_DEBUG_SWITCH=false
fi
if [[ ! ${REMOTE_DEBUG_PORT} ]]; then
REMOTE_DEBUG_PORT="8089"
fi
LIB_PATH=${BIN}/../lib
USER_DIR=${BIN}/../
CLASSPATH=${LIB_PATH}"/*:"${SERVICE_CONF_PATH}":."
if [ ${REMOTE_DEBUG_SWITCH} == true ]; then
JAVA_OPTS=${JAVA_OPTS}" -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${REMOTE_DEBUG_PORT}"
fi
JAVA_OPTS=${JAVA_OPTS}" -XX:HeapDumpPath="${SERVICE_LOG_PATH}" -Dlog.path="${SERVICE_LOG_PATH}
JAVA_OPTS=${JAVA_OPTS}" -Duser.dir="${USER_DIR}
JAVA_OPTS=${JAVA_OPTS}" -Ddata.path="${DATA_PATH}" -Dmail.username="${MAIL_USERNAME}" -Dmail.password="${MAIL_PASSWORD}
if [ "x"${PID_FILE_PATH} != "x" ]; then
JAVA_OPTS=${JAVA_OPTS}" -Dpid.file="${PID_FILE_PATH}
fi
JAVA_OPTS=${JAVA_OPTS}" -Dlogging.config="${SERVICE_CONF_PATH}"/logback.xml"
JAVA_OPTS=${JAVA_OPTS}" -classpath "${CLASSPATH}
if [ "x${JAVA_HOME}" != "x" ]; then
EXE_JAVA=${JAVA_HOME}"/bin/java "${JAVA_OPTS}" "${MAIN_CLASS}
JPS=${JAVA_HOME}/bin/jps
else
EXE_JAVA="java "${JAVA_OPTS}" "${MAIN_CLASS}
JPS="jps"
fi
usage(){
echo " usage is [start|stop|shutdown|restart]"
}
# check if the process still in jvm
status_class(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "$2" | awk '{print $1}'`
fi
if [ -n "$p" ]; then
# echo "$1 ($2) is still running with pid $p"
return 0
else
# echo "$1 ($2) does not appear in the java process table"
return 1
fi
}
wait_for_startup(){
local now_s=`date '+%s'`
local stop_s=$((${now_s} + $1))
while [ ${now_s} -le ${stop_s} ];do
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 0 ]; then
return 0
fi
sleep ${SLEEP_TIMEREVAL_S}
now_s=`date '+%s'`
done
exit 1
}
wait_for_stop(){
local now_s=`date '+%s'`
local stop_s=$((${now_s} + $1))
while [ ${now_s} -le ${stop_s} ];do
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 1 ]; then
return 0
fi
sleep ${SLEEP_TIMEREVAL_S}
now_s=`date '+%s'`
done
return 1
}
start_m(){
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} has been started in process"
exit 0
fi
LOG INFO ${EXE_JAVA}
nohup ${EXE_JAVA} >${SHELL_LOG} 2>&1 &
LOG INFO "Waiting ${FRIEND_NAME} to start complete ..."
wait_for_startup 20
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} start success"
return 0
else
LOG ERROR "${FRIEND_NAME} start exceeded over 20s" >&2
return 1
fi
}
stop_m(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
fi
if [ -z "${p}" ]; then
LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
return 0
fi
LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
kill -9 ${p}
LOG INFO "Stop successful..."
}
shutdown_m(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
fi
if [ -z "${p}" ]; then
LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
return 0
fi
LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
case "`uname`" in
CYCGWIN*) taskkill /F /PID "${p}" ;;
*) kill -9 "${p}" ;;
esac
}
restart_m(){
stop_m
if [ $? -eq 0 ]; then
start_m
exit $?
else
LOG ERROR "${FRIEND_NAME} restart fail" >&2
exit 1
fi
}
if [ ! $1 ]; then
usage
exit 1;
fi
case $1 in
start) start_m;;
stop) stop_m;;
shutdown) shutdown_m;;
restart) restart_m;;
*)
usage
exit 1
;;
esac
exit $?

View File

@ -0,0 +1,202 @@
#!/bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
SHELL_LOG="${DIR}/console.out"
SERVER_NAME="admin"
USER=`whoami`
SAFE_MODE=true
SUDO_USER=false
ENV_FILE_PATH="${DIR}/env.properties"
usage(){
printf "Configure usage:\n"
printf "\t%-10s %-10s %-2s \n" --server "server-name" "Name of admin server"
printf "\t%-10s %-10s %-2s \n" --unsafe "unsafe mode" "Will clean the directory existed"
printf "\t%-10s %-10s %-2s \n" --safe "safe mode" "Will not modify the directory existed (Default)"
printf "\t%-10s %-10s %-2s \n" "-h|--help" "usage" "List help document"
}
LOG(){
currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
}
interact_echo(){
while [ 1 ]; do
read -p "$1 (Y/N)" yn
if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
return 0
elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
return 1
else
echo "Unknown choise: [$yn], please choose again."
fi
done
}
is_sudo_user(){
sudo -v >/dev/null 2>&1
}
abs_path(){
SOURCE="${BASH_SOURCE[0]}"
while [ -h "${SOURCE}" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "${SOURCE}")"
[[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
done
echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
}
check_exist(){
if test -e "$1"; then
LOG INFO "Directory or file: [$1] has been exist"
if [ $2 == true ]; then
LOG INFO "Configure program will shutdown..."
exit 0
fi
fi
}
copy_replace(){
file_name=$1
if test -e "${CONF_PATH}/${file_name}";then
if [ ${SAFE_MODE} == true ]; then
check_exist "${CONF_PATH}/${file_name}" true
fi
LOG INFO "Delete file or directory: [${CONF_PATH}/${file_name}]"
rm -rf ${CONF_PATH}/${file_name}
fi
if test -e "${DIR}/../conf/${file_name}";then
LOG INFO "Copy from ${DIR}/../conf/${file_name}"
cp -R ${DIR}/../conf/${file_name} ${CONF_PATH}/
fi
}
mkdir_p(){
if [ ${SAFE_MODE} == true ]; then
check_exist $1 false
fi
if [ ! -d $1 ]; then
LOG INFO "Creating directory: ["$1"]."
#mkdir -p $1
if [ ${SUDO_USER} == true ]; then
sudo mkdir -p $1 && sudo chown -R ${USER} $1
else
mkdir -p $1
fi
fi
}
while [ 1 ]; do
case ${!OPTIND} in
--server)
SERVER_NAME=$2
shift 2
;;
--unsafe)
SAFE_MODE=false
shift 1
;;
--safe)
SAFE_MODE=true
shift 1
;;
--help|-h)
usage
exit 0
;;
*)
break
;;
esac
done
is_sudo_user
if [ $? == 0 ]; then
SUDO_USER=true
fi
BIN=`abs_path`
SERVER_NAME_SIMPLE=${SERVER_NAME/flinkx-/}
LOG_PATH=${BIN}/../logs
if [ "x${BASE_LOG_DIR}" != "x" ]; then
LOG_PATH=${BASE_LOG_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(WEB_LOG_PATH=)\S*!\1${LOG_PATH}!g" ${ENV_FILE_PATH}
fi
CONF_PATH=${BIN}/../conf
if [ "x${BASE_CONF_DIR}" != "x" ]; then
CONF_PATH=${BASE_CONF_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(WEB_CONF_PATH=)\S*!\1${CONF_PATH}!g" ${ENV_FILE_PATH}
fi
DATA_PATH=${BIN}/../data
if [ "x${BASE_DATA_DIR}" != "x" ]; then
DATA_PATH=${BASE_DATA_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(DATA_PATH=)\S*!\1${DATA_PATH}!g" ${ENV_FILE_PATH}
fi
echo "Start to make directory"
# Start to make directory
LOG INFO "\033[1m Start to build directory\033[0m"
mkdir_p ${LOG_PATH}
mkdir_p ${CONF_PATH}
mkdir_p ${DATA_PATH}
if [ "x${BASE_CONF_DIR}" != "x" ]; then
LOG INFO "\033[1m Start to copy configuration file/directory\033[0m"
# Copy the configuration file
copy_replace bootstrap.properties
copy_replace application.yml
copy_replace logback.xml
copy_replace i18n
copy_replace mybatis-mapper
copy_replace static
fi
echo "end to make directory"
BOOTSTRAP_PROP_FILE="${CONF_PATH}/bootstrap.properties"
# Start to initalize database
echo "Start to initalize database"
if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then
`mysql --version >/dev/null 2>&1`
if [ $? == 0 ]; then
LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m"
interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?"
if [ $? == 0 ]; then
read -p "Please input the db host(default: 127.0.0.1): " HOST
if [ "x${HOST}" == "x" ]; then
HOST="127.0.0.1"
fi
while [ 1 ]; do
read -p "Please input the db port(default: 3306): " PORT
if [ "x${PORT}" == "x" ]; then
PORT=3306
break
elif [ ${PORT} -gt 0 ] 2>/dev/null; then
break
else
echo "${PORT} is not a number, please input again"
fi
done
read -p "Please input the db username(default: root): " USERNAME
if [ "x${USERNAME}" == "x" ]; then
USERNAME="root"
fi
read -p "Please input the db password(default: ""): " PASSWORD
read -p "Please input the db name(default: flinkxweb)" DATABASE
if [ "x${DATABASE}" == "x" ]; then
DATABASE="flinkxweb"
fi
mysql -h ${HOST} -P ${PORT} -u ${USERNAME} -p${PASSWORD} --default-character-set=utf8 -e \
"CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};"
sed -ri "s![#]?(DB_HOST=)\S*!\1${HOST}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_PORT=)\S*!\1${PORT}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_USERNAME=)\S*!\1${USERNAME}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_PASSWORD=)\S*!\1${PASSWORD}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_DATABASE=)\S*!\1${DATABASE}!g" ${BOOTSTRAP_PROP_FILE}
fi
fi
fi

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import signal
import subprocess
import time
import re
import socket
import json
from optparse import OptionParser
from optparse import OptionGroup
from string import Template
import codecs
import platform
def printCopyright():
print '''
LarkMidTable (%s), From LarkMidTable !
LarkMidTable All Rights Reserved.
'''
sys.stdout.flush()
if __name__ == "__main__":
printCopyright()
abs_file=sys.path[0]
json_file=sys.argv[1]
log_name=sys.argv[2]
startCommand = "java -server -Xms1g -Xmx1g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=E:\datax/log -Xms1g -Xmx1g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=E:\datax/log -Dloglevel=info -Dfile.encoding=UTF-8 -Dlogback.statusListenerClass=ch.qos.logback.core.status.NopStatusListener -Djava.security.egd=file:///dev/urandom -Ddatax.home=E:\datax -Dlogback.configurationFile=E:\datax/conf/logback.xml -classpath E:\datax/lib/* -Dlog.file.name=8e8e5f7d4cd0fd5_json com.alibaba.datax.core.Engine -mode standalone -jobid -1 -job %s > %s" %(json_file,log_name)
print startCommand
child_process = subprocess.Popen(startCommand, shell=True)
(stdout, stderr) = child_process.communicate()
sys.exit(child_process.returncode)

View File

@ -0,0 +1,21 @@
# environment variables
#JAVA_HOME=""
WEB_LOG_PATH=${BIN}/../logs
WEB_CONF_PATH=${BIN}/../conf
DATA_PATH=${BIN}/../data
SERVER_PORT=8080
#PID_FILE_PATH=${BIN}/flinkxadmin.pid
# mail account
MAIL_USERNAME=""
MAIL_PASSWORD=""
#debug
#REMOTE_DEBUG_SWITCH=true
#REMOTE_DEBUG_PORT=7003

View File

@ -0,0 +1 @@
sh ./bin/flinkx -mode local -jobType sync -job ./job/stream.json -flinkxDistDir ./flinkx-dist -flinkConfDir ./flinkconf

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import signal
import subprocess
import time
import re
import socket
import json
from optparse import OptionParser
from optparse import OptionGroup
from string import Template
import codecs
import platform
def printCopyright():
print ('''
LarkMidTable (%s), From LarkMidTable !
LarkMidTable All Rights Reserved.
''')
sys.stdout.flush()
if __name__ == "__main__":
printCopyright()
abs_file=sys.path[0]
json_file=sys.argv[1]
log_name=sys.argv[2]
startCommand = "java -cp %s/lib/* com.dtstack.flinkx.client.Launcher -mode local -jobType sync -job %s -flinkxDistDir %s/flinkx-dist -flinkConfDir %s/flinkconf > %s/%s" %(abs_file,json_file,abs_file,abs_file,abs_file,log_name)
print(startCommand)
child_process = subprocess.Popen(startCommand, shell=True)
(stdout, stderr) = child_process.communicate()
sys.exit(child_process.returncode)

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import signal
import subprocess
import time
import re
import socket
import json
from optparse import OptionParser
from optparse import OptionGroup
from string import Template
import codecs
import platform
def printCopyright():
print '''
LarkMidTable (%s), From LarkMidTable !
LarkMidTable All Rights Reserved.
'''
sys.stdout.flush()
if __name__ == "__main__":
printCopyright()
abs_file=sys.path[0]
json_file=sys.argv[1]
log_name=sys.argv[2]
startCommand = "java -cp %s/lib/* com.dtstack.flinkx.client.Launcher -mode local -jobType sync -job %s -flinkxDistDir %s/flinkx-dist -flinkConfDir %s/flinkconf > %s" %(abs_file,json_file,abs_file,abs_file,log_name)
print startCommand
child_process = subprocess.Popen(startCommand, shell=True)
(stdout, stderr) = child_process.communicate()
sys.exit(child_process.returncode)

View File

@ -0,0 +1,148 @@
package com.czsj.web.controller.bigdata;
import com.baomidou.mybatisplus.extension.api.ApiController;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.ruoshui.common.constant.HttpStatus;
import com.ruoshui.common.core.domain.AjaxResult;
import com.ruoshui.common.core.domain.model.LoginUser;
import com.ruoshui.common.core.page.PageDomain;
import com.ruoshui.common.core.page.TableDataInfo;
import com.ruoshui.common.core.page.TableSupport;
import com.ruoshui.common.utils.DateUtils;
import com.ruoshui.common.utils.PageUtils;
import com.ruoshui.common.utils.SecurityUtils;
import com.ruoshui.common.utils.StringUtils;
import com.ruoshui.common.utils.sql.SqlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import java.beans.PropertyEditorSupport;
import java.util.Date;
import java.util.List;
/**
* web层通用数据处理
*
* @author ruoshui
*/
public class BaseController extends ApiController
{
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* 将前台传递过来的日期格式的字符串自动转化为Date类型
*/
@InitBinder
public void initBinder(WebDataBinder binder)
{
// Date 类型转换
binder.registerCustomEditor(Date.class, new PropertyEditorSupport()
{
@Override
public void setAsText(String text)
{
setValue(DateUtils.parseDate(text));
}
});
}
/**
* 设置请求分页数据
*/
protected void startPage()
{
PageUtils.startPage();
}
/**
* 设置请求排序数据
*/
protected void startOrderBy()
{
PageDomain pageDomain = TableSupport.buildPageRequest();
if (StringUtils.isNotEmpty(pageDomain.getOrderBy()))
{
String orderBy = SqlUtil.escapeOrderBySql(pageDomain.getOrderBy());
PageHelper.orderBy(orderBy);
}
}
/**
* 清理分页的线程变量
*/
protected void clearPage()
{
PageUtils.clearPage();
}
/**
* 响应请求分页数据
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected TableDataInfo getDataTable(List<?> list)
{
TableDataInfo rspData = new TableDataInfo();
rspData.setCode(HttpStatus.SUCCESS);
rspData.setMsg("查询成功");
rspData.setRows(list);
rspData.setTotal(new PageInfo(list).getTotal());
return rspData;
}
/**
* 响应返回结果
*
* @param rows 影响行数
* @return 操作结果
*/
protected AjaxResult toAjax(int rows)
{
return rows > 0 ? AjaxResult.success() : AjaxResult.error();
}
/**
* 页面跳转
*/
public String redirect(String url)
{
return StringUtils.format("redirect:{}", url);
}
/**
* 获取用户缓存信息
*/
public LoginUser getLoginUser()
{
return SecurityUtils.getLoginUser();
}
/**
* 获取登录用户id
*/
public Long getUserId()
{
return getLoginUser().getUserId();
}
/**
* 获取登录部门id
*/
public Long getDeptId()
{
return getLoginUser().getDeptId();
}
/**
* 获取登录用户名
*/
public String getUsername()
{
return getLoginUser().getUsername();
}
}

View File

@ -0,0 +1,253 @@
package com.czsj.web.controller.bigdata;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruoshui.bigdata.util.PageUtils;
import com.ruoshui.bigdata.util.ServletUtils;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpServletRequest;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* 基础参数辅助类
*
* @author zhouhongfa@gz-yibo.com
* @version 1.0
* @since 2019/5/15
*/
@Slf4j
public class BaseForm {
/**
* 查询参数对象
*/
protected Map<String, Object> values = new LinkedHashMap<>();
/**
* 当前页码
*/
private Long current = 1L;
/**
* 页大小
*/
private Long size = 10L;
/**
* 构造方法
*/
public BaseForm() {
try {
HttpServletRequest request = ServletUtils.getRequest();
Enumeration<String> params = request.getParameterNames();
while (params.hasMoreElements()) {
String name = params.nextElement();
String value = StrUtil.trim(request.getParameter(name));
this.set(name, URLDecoder.decode(value, "UTF-8"));
}
this.parsePagingQueryParams();
} catch (Exception e) {
e.printStackTrace();
log.error("BaseControlForm initialize parameters setting error" + e);
}
}
/**
* 获取页码
*
* @return
*/
public Long getPageNo() {
String pageNum = StrUtil.toString(this.get("current"));
if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) {
this.current = Long.parseLong(pageNum);
}
return this.current;
}
/**
* 获取页大小
*
* @return
*/
public Long getPageSize() {
String pageSize = StrUtil.toString(this.get("size"));
if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) {
this.size = Long.parseLong(pageSize);
}
return this.size;
}
/**
* 获得参数信息对象
*
* @return
*/
public Map<String, Object> getParameters() {
return values;
}
/**
* 根据key获取values中的值
*
* @param name
* @return
*/
public Object get(String name) {
if (values == null) {
values = new LinkedHashMap<>();
return null;
}
return this.values.get(name);
}
/**
* 根据key获取values中String类型值
*
* @param key
* @return String
*/
public String getString(String key) {
return StrUtil.toString(get(key));
}
/**
* 获取排序字段
*
* @return
*/
public String getSort() {
return StrUtil.toString(this.values.get("sort"));
}
/**
* 获取排序
*
* @return
*/
public String getOrder() {
return StrUtil.toString(this.values.get("order"));
}
/**
* 获取排序
*
* @return
*/
public String getOrderby() {
return StrUtil.toString(this.values.get("orderby"));
}
/**
* 解析出mybatis plus分页查询参数
*/
public Page getPlusPagingQueryEntity() {
Page page = new Page();
//如果无current默认返回1000条数据
page.setCurrent(this.getPageNo());
page.setSize(this.getPageSize());
if (ObjectUtil.isNotNull(this.get("ifCount"))) {
page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount")));
} else {
//默认给true
page.setSearchCount(true);
}
return page;
}
/**
* 解析分页排序参数pageHelper
*/
public void parsePagingQueryParams() {
// 排序字段解析
String orderBy = StrUtil.toString(this.get("orderby")).trim();
String sortName = StrUtil.toString(this.get("sort")).trim();
String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase();
if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) {
if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) {
sortOrder = "asc";
}
this.set("orderby", sortName + " " + sortOrder);
}
}
/**
* 设置参数
*
* @param name 参数名称
* @param value 参数值
*/
public void set(String name, Object value) {
if (ObjectUtil.isNotNull(value)) {
this.values.put(name, value);
}
}
/**
* 移除参数
*
* @param name
*/
public void remove(String name) {
this.values.remove(name);
}
/**
* 清除所有参数
*/
public void clear() {
if (values != null) {
values.clear();
}
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
protected QueryWrapper<?> pageQueryWrapperCustom(Map<String, Object> map, QueryWrapper<?> queryWrapper) {
// mybatis plus 分页相关的参数
Map<String, Object> pageParams = PageUtils.filterPageParams(map);
//过滤空值分页查询相关的参数
Map<String, Object> colQueryMap = PageUtils.filterColumnQueryParams(map);
//排序 操作
pageParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
colQueryMap.forEach((k, v) -> {
switch (k) {
case "pluginName":
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@ -0,0 +1,94 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.entity.BaseResource;
import com.ruoshui.bigdata.mapper.BaseResourceMapper;
import com.ruoshui.bigdata.util.AESUtil;
import com.ruoshui.core.biz.model.ReturnT;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/api/base/resource")
@Api(tags = "基础建设-资源管理")
public class BaseResourceController {
private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Autowired
private BaseResourceMapper baseResourceMapper;
@ApiOperation("获取所有数据")
@GetMapping("/list")
@PreAuthorize("@ss.hasPermi('datax:resource:list')")
public ReturnT<Map<String, Object>> selectList(
@RequestParam(value = "current", required = false, defaultValue = "1") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "name", required = false) String name) {
// page list
List<BaseResource> list = baseResourceMapper.findList((current - 1) * size,size,name);
Map<String, Object> maps = new HashMap<>();
maps.put("recordsTotal", list.size()); // 过滤后的总记录数
maps.put("data", list); // 分页列表
return new ReturnT<>(maps);
}
@ApiOperation("新增数据")
@PostMapping("/add")
@PreAuthorize("@ss.hasPermi('datax:resource:add')")
public ReturnT<String> insert(HttpServletRequest request, @RequestBody BaseResource entity) {
entity.setUpdate_time(sdf.format(new Date()));
entity.setServerPassword(AESUtil.encrypt(entity.getServerPassword()));
this.baseResourceMapper.save(entity);
return ReturnT.SUCCESS;
}
@ApiOperation("修改数据")
@PostMapping(value = "/update")
@PreAuthorize("@ss.hasPermi('datax:resource:edit')")
public ReturnT<String> update(@RequestBody BaseResource entity) {
entity.setUpdate_time(sdf.format(new Date()));
//查询元数据
BaseResource byId = baseResourceMapper.getById(entity.getId());
if(entity.getServerPassword().equals(byId.getServerPassword())){
entity.setServerPassword(AESUtil.encrypt(AESUtil.decrypt(entity.getServerPassword())));
}else{
entity.setServerPassword(AESUtil.encrypt(entity.getServerPassword()));
}
baseResourceMapper.update(entity);
return ReturnT.SUCCESS;
}
@RequestMapping(value = "/remove", method = RequestMethod.POST)
@ApiOperation("删除数据")
@PreAuthorize("@ss.hasPermi('datax:resource:remove')")
public ReturnT<String> delete(int id) {
int result = baseResourceMapper.delete(id);
return result != 1 ? ReturnT.FAIL : ReturnT.SUCCESS;
}
@RequestMapping(value = "/getResource", method = RequestMethod.POST)
@ApiOperation("查询资源列表")
@PreAuthorize("@ss.hasPermi('datax:resource:query')")
public ReturnT<String> getResource() {
List<BaseResource> result = baseResourceMapper.getResource();
return new ReturnT(result);
}
@RequestMapping(value = "/getFileResource", method = RequestMethod.POST)
@ApiOperation("查询资源列表")
@PreAuthorize("@ss.hasPermi('datax:resource:query')")
public ReturnT<String> getFileResource() {
List<BaseResource> result = baseResourceMapper.getFileResource();
return new ReturnT(result);
}
}

View File

@ -0,0 +1,48 @@
package com.czsj.web.controller.bigdata;
import com.baomidou.mybatisplus.extension.api.R;
import com.ruoshui.bigdata.core.util.I18nUtil;
import com.ruoshui.bigdata.dto.DataXJsonBuildDto;
import com.ruoshui.bigdata.service.DataxJsonService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by jingwk on 2022/05/05
*/
@RestController
@RequestMapping("api/dataxJson")
@Api(tags = "组装datax json的控制器")
public class DataxJsonController extends BaseController {
@Autowired
private DataxJsonService dataxJsonService;
@PostMapping("/buildJson")
@ApiOperation("JSON构建")
public R<String> buildJobJson(@RequestBody DataXJsonBuildDto dto) {
String key = "system_please_choose";
if (dto.getReaderDatasourceId() == null) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerDataSource"));
}
if (dto.getWriterDatasourceId() == null) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerDataSource"));
}
if (CollectionUtils.isEmpty(dto.getReaderColumns())) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerColumns"));
}
if (CollectionUtils.isEmpty(dto.getWriterColumns())) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerColumns"));
}
return success(dataxJsonService.buildJobJson(dto));
}
}

View File

@ -0,0 +1,145 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.core.conf.JobAdminConfig;
import com.ruoshui.bigdata.core.util.JacksonUtil;
import com.ruoshui.core.biz.AdminBiz;
import com.ruoshui.core.biz.model.HandleCallbackParam;
import com.ruoshui.core.biz.model.HandleProcessCallbackParam;
import com.ruoshui.core.biz.model.RegistryParam;
import com.ruoshui.core.biz.model.ReturnT;
import com.ruoshui.core.util.JobRemotingUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/**
* Created by xuxueli on 17/5/10.
*/
@RestController
@RequestMapping("/api")
public class JobApiController {
@Autowired
private AdminBiz adminBiz;
/**
* callback
*
* @param data
* @return
*/
@RequestMapping("/callback")
public ReturnT<String> callback(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
List<HandleCallbackParam> callbackParamList = null;
try {
callbackParamList = JacksonUtil.readValue(data, List.class, HandleCallbackParam.class);
} catch (Exception e) { }
if (callbackParamList==null || callbackParamList.size()==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.callback(callbackParamList);
}
/**
* callback
*
* @param data
* @return
*/
@RequestMapping("/processCallback")
public ReturnT<String> processCallback(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
List<HandleProcessCallbackParam> callbackParamList = null;
try {
callbackParamList = JacksonUtil.readValue(data, List.class, HandleProcessCallbackParam.class);
} catch (Exception e) { }
if (callbackParamList==null || callbackParamList.size()==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.processCallback(callbackParamList);
}
/**
* registry
*
* @param data
* @return
*/
@RequestMapping("/registry")
public ReturnT<String> registry(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
RegistryParam registryParam = null;
try {
registryParam = JacksonUtil.readValue(data, RegistryParam.class);
} catch (Exception e) {}
if (registryParam == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.registry(registryParam);
}
/**
* registry remove
*
* @param data
* @return
*/
@RequestMapping("/registryRemove")
public ReturnT<String> registryRemove(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
RegistryParam registryParam = null;
try {
registryParam = JacksonUtil.readValue(data, RegistryParam.class);
} catch (Exception e) {}
if (registryParam == null) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.registryRemove(registryParam);
}
}

View File

@ -0,0 +1,76 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.core.util.I18nUtil;
import com.ruoshui.bigdata.entity.JobInfo;
import com.ruoshui.bigdata.entity.JobLogGlue;
import com.ruoshui.bigdata.mapper.JobInfoMapper;
import com.ruoshui.bigdata.mapper.JobLogGlueMapper;
import com.ruoshui.core.biz.model.ReturnT;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Date;
import static com.ruoshui.core.biz.model.ReturnT.FAIL_CODE;
/**
* Created by jingwk on 2019/11/17
*/
@RestController
@RequestMapping("/jobcode")
@Api(tags = "任务状态接口")
public class JobCodeController {
@Autowired
private JobInfoMapper jobInfoMapper;
@Autowired
private JobLogGlueMapper jobLogGlueMapper;
@RequestMapping(value = "/save", method = RequestMethod.POST)
@ApiOperation("保存任务状态")
public ReturnT<String> save(Model model, int id, String glueSource, String glueRemark) {
// valid
if (glueRemark == null) {
return new ReturnT<>(FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")));
}
if (glueRemark.length() < 4 || glueRemark.length() > 100) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_remark_limit"));
}
JobInfo existsJobInfo = jobInfoMapper.loadById(id);
if (existsJobInfo == null) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
}
// update new code
existsJobInfo.setGlueSource(glueSource);
existsJobInfo.setGlueRemark(glueRemark);
existsJobInfo.setGlueUpdatetime(new Date());
existsJobInfo.setUpdateTime(new Date());
jobInfoMapper.update(existsJobInfo);
// log old code
JobLogGlue jobLogGlue = new JobLogGlue();
jobLogGlue.setJobId(existsJobInfo.getId());
jobLogGlue.setGlueType(existsJobInfo.getGlueType());
jobLogGlue.setGlueSource(glueSource);
jobLogGlue.setGlueRemark(glueRemark);
jobLogGlue.setAddTime(new Date());
jobLogGlue.setUpdateTime(new Date());
jobLogGlueMapper.save(jobLogGlue);
// remove code backup more than 30
jobLogGlueMapper.removeOld(existsJobInfo.getId(), 30);
return ReturnT.SUCCESS;
}
}

View File

@ -0,0 +1,140 @@
package com.czsj.web.controller.bigdata;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.ruoshui.bigdata.core.util.LocalCacheUtil;
import com.ruoshui.bigdata.entity.JobDatasource;
import com.ruoshui.bigdata.service.JobDatasourceService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
/**
* jdbc数据源配置控制器层
*
* @author zhouhongfa@gz-yibo.com
* @version v1.0
* @since 2019-07-30
*/
@RestController
@RequestMapping("/api/jobJdbcDatasource")
@Api(tags = "jdbc数据源配置接口")
public class JobDatasourceController extends BaseController {
/**
* 服务对象
*/
@Autowired
private JobDatasourceService jobJdbcDatasourceService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@ApiImplicitParams(
{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
})
@PreAuthorize("@ss.hasPermi('datax:Datasource:list')")
public R<IPage<JobDatasource>> selectAll() {
BaseForm form = new BaseForm();
QueryWrapper<JobDatasource> query = (QueryWrapper<JobDatasource>) form.pageQueryWrapperCustom(form.getParameters(), new QueryWrapper<JobDatasource>());
return success(jobJdbcDatasourceService.page(form.getPlusPagingQueryEntity(), query));
}
/**
* 获取所有数据源
* @return
*/
@ApiOperation("获取所有数据源")
@GetMapping("/all")
@PreAuthorize("@ss.hasPermi('datax:Datasource:query')")
public R<List<JobDatasource>> selectAllDatasource() {
return success(this.jobJdbcDatasourceService.selectAllDatasource());
}
/**
* 通过主键查询单条数据
*
* @param id 主键
* @return 单条数据
*/
@ApiOperation("通过主键查询单条数据")
@GetMapping("{id}")
@PreAuthorize("@ss.hasPermi('datax:Datasource:query')")
public R<JobDatasource> selectOne(@PathVariable Serializable id) {
return success(this.jobJdbcDatasourceService.getById(id));
}
/**
* 新增数据
*
* @param entity 实体对象
* @return 新增结果
*/
@ApiOperation("新增数据")
@PostMapping
@PreAuthorize("@ss.hasPermi('datax:Datasource:add')")
public R<Boolean> insert(@RequestBody JobDatasource entity) {
return success(this.jobJdbcDatasourceService.save(entity));
}
/**
* 修改数据
*
* @param entity 实体对象
* @return 修改结果
*/
@PutMapping
@ApiOperation("修改数据")
@PreAuthorize("@ss.hasPermi('datax:Datasource:edit')")
public R<Boolean> update(@RequestBody JobDatasource entity) {
LocalCacheUtil.remove(entity.getDatasourceName());
JobDatasource d = jobJdbcDatasourceService.getById(entity.getId());
if (null != d.getJdbcUsername() && entity.getJdbcUsername().equals(d.getJdbcUsername())) {
entity.setJdbcUsername(null);
}
if (null != entity.getJdbcPassword() && entity.getJdbcPassword().equals(d.getJdbcPassword())) {
entity.setJdbcPassword(null);
}
return success(this.jobJdbcDatasourceService.updateById(entity));
}
/**
* 删除数据
*
* @param idList 主键结合
* @return 删除结果
*/
@DeleteMapping
@ApiOperation("删除数据")
@PreAuthorize("@ss.hasPermi('datax:Datasource:remove')")
public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
return success(this.jobJdbcDatasourceService.removeByIds(idList));
}
/**
* 测试数据源
* @param jobJdbcDatasource
* @return
*/
@PostMapping("/test")
@ApiOperation("测试数据")
public R<Boolean> dataSourceTest (@RequestBody JobDatasource jobJdbcDatasource) throws IOException {
return success(jobJdbcDatasourceService.dataSourceTest(jobJdbcDatasource));
}
}

View File

@ -0,0 +1,179 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.core.util.I18nUtil;
import com.ruoshui.bigdata.entity.JobGroup;
import com.ruoshui.bigdata.entity.JobRegistry;
import com.ruoshui.bigdata.mapper.JobGroupMapper;
import com.ruoshui.bigdata.mapper.JobInfoMapper;
import com.ruoshui.bigdata.mapper.JobRegistryMapper;
import com.ruoshui.core.biz.model.ReturnT;
import com.ruoshui.core.enums.RegistryConfig;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import java.util.*;
/**
* Created by jingwk on 2019/11/17
*/
@RestController
@RequestMapping("/api/jobGroup")
@Api(tags = "执行器管理接口")
public class JobGroupController {
@Autowired
public JobInfoMapper jobInfoMapper;
@Autowired
public JobGroupMapper jobGroupMapper;
@Autowired
private JobRegistryMapper jobRegistryMapper;
@GetMapping("/list")
@ApiOperation("执行器列表")
@PreAuthorize("@ss.hasPermi('datax:executor:list')")
public ReturnT<List<JobGroup>> getExecutorList() {
return new ReturnT<>(jobGroupMapper.findAll());
}
@PostMapping("/save")
@ApiOperation("新建执行器")
@PreAuthorize("@ss.hasPermi('datax:executor:add')")
public ReturnT<String> save(@RequestBody JobGroup jobGroup) {
// valid
if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
}
if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
}
if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
}
if (jobGroup.getAddressType() != 0) {
if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
}
String[] addresses = jobGroup.getAddressList().split(",");
for (String item : addresses) {
if (item == null || item.trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
}
}
}
int ret = jobGroupMapper.save(jobGroup);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
@PostMapping("/update")
@ApiOperation("更新执行器")
@PreAuthorize("@ss.hasPermi('datax:executor:edit')")
public ReturnT<String> update(@RequestBody JobGroup jobGroup) {
// valid
if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
}
if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
}
if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
}
if (jobGroup.getAddressType() == 0) {
// 0=自动注册
List<String> registryList = findRegistryByAppName(jobGroup.getAppName());
String addressListStr = null;
if (registryList != null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item : registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length() - 1);
}
jobGroup.setAddressList(addressListStr);
} else {
// 1=手动录入
if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
}
String[] addresses = jobGroup.getAddressList().split(",");
for (String item : addresses) {
if (item == null || item.trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
}
}
}
int ret = jobGroupMapper.update(jobGroup);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
private List<String> findRegistryByAppName(String appNameParam) {
HashMap<String, List<String>> appAddressMap = new HashMap<>();
List<JobRegistry> list = jobRegistryMapper.findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (JobRegistry item : list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appName = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appName);
if (registryList == null) {
registryList = new ArrayList<>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appName, registryList);
}
}
}
return appAddressMap.get(appNameParam);
}
@PostMapping("/remove")
@ApiOperation("移除执行器")
@PreAuthorize("@ss.hasPermi('datax:executor:remove')")
public ReturnT<String> remove(int id) {
// valid
int count = jobInfoMapper.pageListCount(0, 10, id, -1, null, null, 0,null);
if (count > 0) {
return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_0"));
}
List<JobGroup> allList = jobGroupMapper.findAll();
if (allList.size() == 1) {
return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_1"));
}
int ret = jobGroupMapper.remove(id);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
@RequestMapping(value = "/loadById", method = RequestMethod.POST)
@ApiOperation("根据id获取执行器")
@PreAuthorize("@ss.hasPermi('datax:executor:query')")
public ReturnT<JobGroup> loadById(int id) {
JobGroup jobGroup = jobGroupMapper.load(id);
return jobGroup != null ? new ReturnT<>(jobGroup) : new ReturnT<>(ReturnT.FAIL_CODE, null);
}
@GetMapping("/query")
@ApiOperation("查询执行器")
@PreAuthorize("@ss.hasPermi('datax:executor:query')")
public ReturnT<List<JobGroup>> get(@ApiParam(value = "执行器AppName")
@RequestParam(value = "appName", required = false) String appName,
@ApiParam(value = "执行器名称")
@RequestParam(value = "title", required = false) String title,
@ApiParam(value = "执行器地址列表")
@RequestParam(value = "addressList", required = false) String addressList) {
return new ReturnT<>(jobGroupMapper.find(appName, title, addressList));
}
}

View File

@ -0,0 +1,138 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.core.cron.CronExpression;
import com.ruoshui.bigdata.core.thread.JobTriggerPoolHelper;
import com.ruoshui.bigdata.core.trigger.TriggerTypeEnum;
import com.ruoshui.bigdata.core.util.I18nUtil;
import com.ruoshui.bigdata.dto.DataXBatchJsonBuildDto;
import com.ruoshui.bigdata.dto.TriggerJobDto;
import com.ruoshui.bigdata.entity.JobInfo;
import com.ruoshui.bigdata.service.JobService;
import com.ruoshui.core.biz.model.ReturnT;
import com.ruoshui.core.util.DateUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* index controller
*
* @author xuxueli 2015-12-19 16:13:16
*/
@Api(tags = "任务配置接口")
@RestController
@RequestMapping("/api/job")
public class JobInfoController extends BaseController{
@Autowired
private JobService jobService;
@GetMapping("/pageList")
@ApiOperation("任务列表")
@PreAuthorize("@ss.hasPermi('datax:job:list')")
public ReturnT<Map<String, Object>> pageList(@RequestParam(required = false, defaultValue = "0") int current,
@RequestParam(required = false, defaultValue = "10") int size,
int jobGroup, int triggerStatus, String jobDesc, String glueType, Integer[] projectIds) {
return new ReturnT<>(jobService.pageList((current-1)*size, size, jobGroup, triggerStatus, jobDesc, glueType, 0, projectIds));
}
@GetMapping("/list")
@ApiOperation("全部任务列表")
@PreAuthorize("@ss.hasPermi('datax:job:query')")
public ReturnT<List<JobInfo>> list(){
return new ReturnT<>(jobService.list());
}
@PostMapping("/add")
@ApiOperation("添加任务")
@PreAuthorize("@ss.hasPermi('datax:job:add')")
public ReturnT<String> add(HttpServletRequest request, @RequestBody JobInfo jobInfo) {
jobInfo.setUserId(getUserId());
return jobService.add(jobInfo);
}
@PostMapping("/update")
@ApiOperation("更新任务")
@PreAuthorize("@ss.hasPermi('datax:job:edit')")
public ReturnT<String> update(HttpServletRequest request,@RequestBody JobInfo jobInfo) {
jobInfo.setUserId(getUserId());
return jobService.update(jobInfo);
}
@PostMapping(value = "/remove/{id}")
@ApiOperation("移除任务")
@PreAuthorize("@ss.hasPermi('datax:job:remove')")
public ReturnT<String> remove(@PathVariable(value = "id") int id) {
return jobService.remove(id);
}
@RequestMapping(value = "/stop",method = RequestMethod.POST)
@ApiOperation("停止任务")
@PreAuthorize("@ss.hasPermi('datax:job:startorstop')")
public ReturnT<String> pause(int id) {
return jobService.stop(id);
}
@RequestMapping(value = "/start",method = RequestMethod.POST)
@ApiOperation("开启任务")
@PreAuthorize("@ss.hasPermi('datax:job:startorstop')")
public ReturnT<String> start(int id) {
return jobService.start(id);
}
@PostMapping(value = "/trigger")
@ApiOperation("触发任务")
@PreAuthorize("@ss.hasPermi('datax:job:trigger')")
public ReturnT<String> triggerJob(@RequestBody TriggerJobDto dto) {
// force cover job param
String executorParam=dto.getExecutorParam();
if (executorParam == null) {
executorParam = "";
}
JobTriggerPoolHelper.trigger(dto.getJobId(), TriggerTypeEnum.MANUAL, -1, null, executorParam);
return ReturnT.SUCCESS;
}
@GetMapping("/nextTriggerTime")
@ApiOperation("获取近5次触发时间")
public ReturnT<List<String>> nextTriggerTime(String cron) {
List<String> result = new ArrayList<>();
try {
CronExpression cronExpression = new CronExpression(cron);
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = cronExpression.getNextValidTimeAfter(lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (ParseException e) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
}
return new ReturnT<>(result);
}
@PostMapping("/batchAdd")
@ApiOperation("批量创建任务")
public ReturnT<String> batchAdd(@RequestBody DataXBatchJsonBuildDto dto) throws IOException {
if (dto.getTemplateId() ==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_temp")));
}
return jobService.batchAdd(dto);
}
}

View File

@ -0,0 +1,177 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.core.kill.KillJob;
import com.ruoshui.bigdata.core.scheduler.JobScheduler;
import com.ruoshui.bigdata.core.util.I18nUtil;
import com.ruoshui.bigdata.entity.JobInfo;
import com.ruoshui.bigdata.entity.JobLog;
import com.ruoshui.bigdata.mapper.JobInfoMapper;
import com.ruoshui.bigdata.mapper.JobLogMapper;
import com.ruoshui.core.biz.ExecutorBiz;
import com.ruoshui.core.biz.model.LogResult;
import com.ruoshui.core.biz.model.ReturnT;
import com.ruoshui.core.util.DateUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by jingwk on 2019/11/17
*/
@RestController
@RequestMapping("/api/log")
@Api(tags = "任务运行日志接口")
public class JobLogController {
private static Logger logger = LoggerFactory.getLogger(JobLogController.class);
@Autowired
public JobInfoMapper jobInfoMapper;
@Autowired
public JobLogMapper jobLogMapper;
@GetMapping("/pageList")
@ApiOperation("运行日志列表")
@PreAuthorize("@ss.hasPermi('datax:joblog:list')")
public ReturnT<Map<String, Object>> pageList(
@RequestParam(required = false, defaultValue = "0") int current,
@RequestParam(required = false, defaultValue = "10") int size,
int jobGroup, int jobId, int logStatus, String filterTime) {
// parse param
Date triggerTimeStart = null;
Date triggerTimeEnd = null;
if (filterTime != null && filterTime.trim().length() > 0) {
String[] temp = filterTime.split(" - ");
if (temp.length == 2) {
triggerTimeStart = DateUtil.parseDateTime(temp[0]);
triggerTimeEnd = DateUtil.parseDateTime(temp[1]);
}
}
// page query
List<JobLog> data = jobLogMapper.pageList((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
int cnt = jobLogMapper.pageListCount((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
// package result
Map<String, Object> maps = new HashMap<>();
maps.put("recordsTotal", cnt); // 总记录数
maps.put("recordsFiltered", cnt); // 过滤后的总记录数
maps.put("data", data); // 分页列表
return new ReturnT<>(maps);
}
@RequestMapping(value = "/logDetailCat", method = RequestMethod.GET)
@ApiOperation("运行日志详情")
@PreAuthorize("@ss.hasPermi('datax:joblog:logDetailCat')")
public ReturnT<LogResult> logDetailCat(String executorAddress, long triggerTime, long logId, int fromLineNum) {
try {
ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(executorAddress);
ReturnT<LogResult> logResult = executorBiz.log(triggerTime, logId, fromLineNum);
// is end
if (logResult.getContent() != null && fromLineNum > logResult.getContent().getToLineNum()) {
JobLog jobLog = jobLogMapper.load(logId);
if (jobLog.getHandleCode() > 0) {
logResult.getContent().setEnd(true);
}
}
return logResult;
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<>(ReturnT.FAIL_CODE, e.getMessage());
}
}
@RequestMapping(value = "/logKill", method = RequestMethod.POST)
@ApiOperation("kill任务")
@PreAuthorize("@ss.hasPermi('datax:joblog:killJob')")
public ReturnT<String> logKill(int id) {
// base check
JobLog log = jobLogMapper.load(id);
JobInfo jobInfo = jobInfoMapper.loadById(log.getJobId());
if (jobInfo == null) {
return new ReturnT<>(500, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
}
if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) {
return new ReturnT<>(500, I18nUtil.getString("joblog_kill_log_limit"));
}
// request of kill
ReturnT<String> runResult;
try {
ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(log.getExecutorAddress());
runResult = executorBiz.kill(jobInfo.getId());
} catch (Exception e) {
logger.error(e.getMessage(), e);
runResult = new ReturnT<>(500, e.getMessage());
}
if (ReturnT.SUCCESS_CODE == runResult.getCode()) {
log.setHandleCode(ReturnT.FAIL_CODE);
log.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : ""));
log.setHandleTime(new Date());
jobLogMapper.updateHandleInfo(log);
return new ReturnT<>(runResult.getMsg());
} else {
return new ReturnT<>(500, runResult.getMsg());
}
}
@PostMapping("/clearLog")
@ApiOperation("清理日志")
@PreAuthorize("@ss.hasPermi('datax:joblog:clearLog')")
public ReturnT<String> clearLog(int jobGroup, int jobId, int type) {
Date clearBeforeTime = null;
int clearBeforeNum = 0;
if (type == 1) {
clearBeforeTime = DateUtil.addMonths(new Date(), -1); // 清理一个月之前日志数据
} else if (type == 2) {
clearBeforeTime = DateUtil.addMonths(new Date(), -3); // 清理三个月之前日志数据
} else if (type == 3) {
clearBeforeTime = DateUtil.addMonths(new Date(), -6); // 清理六个月之前日志数据
} else if (type == 4) {
clearBeforeTime = DateUtil.addYears(new Date(), -1); // 清理一年之前日志数据
} else if (type == 5) {
clearBeforeNum = 1000; // 清理一千条以前日志数据
} else if (type == 6) {
clearBeforeNum = 10000; // 清理一万条以前日志数据
} else if (type == 7) {
clearBeforeNum = 30000; // 清理三万条以前日志数据
} else if (type == 8) {
clearBeforeNum = 100000; // 清理十万条以前日志数据
} else if (type == 9) {
clearBeforeNum = 0; // 清理所有日志数据
} else {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_invalid"));
}
List<Long> logIds;
do {
logIds = jobLogMapper.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000);
if (logIds != null && logIds.size() > 0) {
jobLogMapper.clearLog(logIds);
}
} while (logIds != null && logIds.size() > 0);
return ReturnT.SUCCESS;
}
@ApiOperation("停止该job作业")
@PostMapping("/killJob")
@PreAuthorize("@ss.hasPermi('datax:joblog:killJob')")
public ReturnT<String> killJob(@RequestBody JobLog log) {
return KillJob.trigger(log.getId(), log.getTriggerTime(), log.getExecutorAddress(), log.getProcessId());
}
}

View File

@ -0,0 +1,117 @@
package com.czsj.web.controller.bigdata;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.ruoshui.bigdata.entity.JobProject;
import com.ruoshui.bigdata.service.JobProjectService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.io.Serializable;
import java.util.List;
/**
* project manage controller
*
* @author jingwk 2022-05-24 16:13:16
*/
@RestController
@RequestMapping("/api/jobProject")
@Api(tags = "项目管理模块")
public class JobProjectController extends BaseController {
@Autowired
private JobProjectService jobProjectService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@PreAuthorize("@ss.hasPermi('datax:jobProject:list')")
public R<IPage<JobProject>> selectAll(@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize,
@RequestParam("pageNo") Integer pageNo) {
return success(jobProjectService.getProjectListPaging(pageSize, pageNo, searchVal));
}
/**
* Get all project
*
* @return
*/
@ApiOperation("获取所有数据")
@GetMapping("/list")
@PreAuthorize("@ss.hasPermi('datax:jobProject:query')")
public R<List<JobProject>> selectList() {
QueryWrapper<JobProject> query = new QueryWrapper();
query.eq("flag", true);
return success(jobProjectService.list(query));
}
/**
* 通过主键查询单条数据
*
* @param id 主键
* @return 单条数据
*/
@ApiOperation("通过主键查询单条数据")
@GetMapping("{id}")
@PreAuthorize("@ss.hasPermi('datax:jobProject:query')")
public R<JobProject> selectOne(@PathVariable Serializable id) {
return success(this.jobProjectService.getById(id));
}
/**
* 新增数据
*
* @param entity 实体对象
* @return 新增结果
*/
@ApiOperation("新增数据")
@PostMapping
@PreAuthorize("@ss.hasPermi('datax:jobProject:add')")
public R<Boolean> insert(HttpServletRequest request, @RequestBody JobProject entity) {
entity.setUserId(getUserId());
return success(this.jobProjectService.save(entity));
}
/**
* 修改数据
*
* @param entity 实体对象
* @return 修改结果
*/
@PutMapping
@ApiOperation("修改数据")
@PreAuthorize("@ss.hasPermi('datax:jobProject:edit')")
public R<Boolean> update(@RequestBody JobProject entity) {
JobProject project = jobProjectService.getById(entity.getId());
project.setName(entity.getName());
project.setDescription(entity.getDescription());
return success(this.jobProjectService.updateById(entity));
}
/**
* 删除数据
*
* @param idList 主键结合
* @return 删除结果
*/
@DeleteMapping
@ApiOperation("删除数据")
@PreAuthorize("@ss.hasPermi('datax:jobProject:remove')")
public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
return success(this.jobProjectService.removeByIds(idList));
}
}

View File

@ -0,0 +1,90 @@
package com.czsj.web.controller.bigdata;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.ruoshui.bigdata.entity.JobRegistry;
import com.ruoshui.bigdata.service.JobRegistryService;
import com.ruoshui.bigdata.util.PageUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
/**
* Created by jingwk on 2019/11/17
*/
@RestController
@RequestMapping("/api/jobRegistry")
@Api(tags = "执行器资源监控")
public class JobRegistryController extends BaseController {
@Autowired
private JobRegistryService jobRegistryService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@ApiImplicitParams(
{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
})
public R<IPage<JobRegistry>> selectAll() {
BaseForm baseForm = new BaseForm();
return success(this.jobRegistryService.page(baseForm.getPlusPagingQueryEntity(), pageQueryWrapperCustom(baseForm.getParameters())));
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
protected QueryWrapper<JobRegistry> pageQueryWrapperCustom(Map<String, Object> map) {
// mybatis plus 分页相关的参数
Map<String, Object> pageHelperParams = PageUtils.filterPageParams(map);
//过滤空值分页查询相关的参数
Map<String, Object> columnQueryMap = PageUtils.filterColumnQueryParams(map);
QueryWrapper<JobRegistry> queryWrapper = new QueryWrapper<>();
//排序 操作
pageHelperParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
columnQueryMap.forEach((k, v) -> {
switch (k) {
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@ -0,0 +1,89 @@
package com.czsj.web.controller.bigdata;
import com.ruoshui.bigdata.core.cron.CronExpression;
import com.ruoshui.bigdata.core.util.I18nUtil;
import com.ruoshui.bigdata.entity.JobTemplate;
import com.ruoshui.bigdata.service.JobTemplateService;
import com.ruoshui.core.biz.model.ReturnT;
import com.ruoshui.core.util.DateUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* template controller
*
* @author jingwk 2019-12-22 16:13:16
*/
@Api(tags = "任务配置接口")
@RestController
@RequestMapping("/api/jobTemplate")
public class JobTemplateController extends BaseController{
@Autowired
private JobTemplateService jobTemplateService;
@GetMapping("/pageList")
@ApiOperation("任务模板列表")
@PreAuthorize("@ss.hasPermi('datax:jobTemplate:list')")
public ReturnT<Map<String, Object>> pageList(@RequestParam(required = false, defaultValue = "0") int current,
@RequestParam(required = false, defaultValue = "10") int size,
int jobGroup, String jobDesc, String executorHandler, int userId, Integer[] projectIds) {
return new ReturnT<>(jobTemplateService.pageList((current-1)*size, size, jobGroup, jobDesc, executorHandler, userId, projectIds));
}
@PostMapping("/add")
@ApiOperation("添加任务模板")
@PreAuthorize("@ss.hasPermi('datax:jobTemplate:add')")
public ReturnT<String> add(HttpServletRequest request, @RequestBody JobTemplate jobTemplate) {
jobTemplate.setUserId(getUserId());
return jobTemplateService.add(jobTemplate);
}
@PostMapping("/update")
@ApiOperation("更新任务")
@PreAuthorize("@ss.hasPermi('datax:jobTemplate:edit')")
public ReturnT<String> update(HttpServletRequest request,@RequestBody JobTemplate jobTemplate) {
jobTemplate.setUserId(getUserId());
return jobTemplateService.update(jobTemplate);
}
@PostMapping(value = "/remove/{id}")
@ApiOperation("移除任务模板")
@PreAuthorize("@ss.hasPermi('datax:jobTemplate:remove')")
public ReturnT<String> remove(@PathVariable(value = "id") int id) {
return jobTemplateService.remove(id);
}
@GetMapping("/nextTriggerTime")
@ApiOperation("获取近5次触发时间")
public ReturnT<List<String>> nextTriggerTime(String cron) {
List<String> result = new ArrayList<>();
try {
CronExpression cronExpression = new CronExpression(cron);
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = cronExpression.getNextValidTimeAfter(lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (ParseException e) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
}
return new ReturnT<>(result);
}
}

View File

@ -0,0 +1,106 @@
package com.czsj.web.controller.bigdata;
import com.baomidou.mybatisplus.extension.api.R;
import com.ruoshui.bigdata.service.DatasourceQueryService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
/**
* 查询数据库表名字段的控制器
*
* @author jingwk
* @ClassName MetadataController
* @Version 2.1.2
* @since 2022/05/31 20:48
*/
@RestController
@RequestMapping("api/metadata")
@Api(tags = "jdbc数据库查询控制器")
public class MetadataController extends BaseController {
@Autowired
private DatasourceQueryService datasourceQueryService;
/**
* 根据数据源id获取mongo库名
*
* @param datasourceId
* @return
*/
@GetMapping("/getDBs")
@ApiOperation("根据数据源id获取mongo库名")
public R<List<String>> getDBs(Long datasourceId) throws IOException {
return success(datasourceQueryService.getDBs(datasourceId));
}
/**
* 根据数据源id,dbname获取CollectionNames
*
* @param datasourceId
* @return
*/
@GetMapping("/collectionNames")
@ApiOperation("根据数据源id,dbname获取CollectionNames")
public R<List<String>> getCollectionNames(Long datasourceId,String dbName) throws IOException {
return success(datasourceQueryService.getCollectionNames(datasourceId,dbName));
}
/**
* 获取PG table schema
*
* @param datasourceId
* @return
*/
@GetMapping("/getDBSchema")
@ApiOperation("根据数据源id获取 db schema")
public R<List<String>> getTableSchema(Long datasourceId) {
return success(datasourceQueryService.getTableSchema(datasourceId));
}
/**
* 根据数据源id获取可用表名
*
* @param datasourceId
* @return
*/
@GetMapping("/getTables")
@ApiOperation("根据数据源id获取可用表名")
public R<List<String>> getTableNames(Long datasourceId,String tableSchema) throws IOException {
return success(datasourceQueryService.getTables(datasourceId,tableSchema));
}
/**
* 根据数据源id和表名获取所有字段
*
* @param datasourceId 数据源id
* @param tableName 表名
* @return
*/
@GetMapping("/getColumns")
@ApiOperation("根据数据源id和表名获取所有字段")
public R<List<String>> getColumns(Long datasourceId, String tableName) throws IOException {
return success(datasourceQueryService.getColumns(datasourceId, tableName));
}
/**
* 根据数据源id和sql语句获取所有字段
*
* @param datasourceId 数据源id
* @param querySql 表名
* @return
*/
@GetMapping("/getColumnsByQuerySql")
@ApiOperation("根据数据源id和sql语句获取所有字段")
public R<List<String>> getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException {
return success(datasourceQueryService.getColumnsByQuerySql(datasourceId, querySql));
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,51 @@
<configuration>
<!-- 引用yml配置中的变量值 -->
<springProperty scope="context" name="LOG_HOME" source="log.file.root"/>
<springProperty scope="context" name="LOG_SIZE" source="log.file.maxSize"/>
<springProperty scope="context" name="LOG_NUM" source="log.file.maxHistory"/>
<springProperty scope="context" name="APP_NAME" source="app.app-name"/>
<springProperty scope="context" name="APP_NAME_NODE" source="app.app-name-node"/>
<springProperty scope="context" name="TRACE_URLS" source="yttrace.urls"/>
<springProperty scope="context" name="TRACE_CLUSTERNAME" source="yttrace.clusterName"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<!--<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - [%logger{0} - %M] %msg%n</pattern>-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - [%logger{0}:%L - %M] %msg%n</pattern>
</encoder>
</appender>
<logger name="org" level="INFO"/>
<logger name="com.alibaba" level="INFO"/>
<!--<logger name="springfox" level="ERROR"/>-->
<!--myibatis log configure-->
<!--<logger name="jdbc.sqlonly" level="OFF"/>-->
<!--<logger name="jdbc.sqltiming" level="OFF"/>-->
<!--<logger name="jdbc.audit" level="OFF"/>-->
<!--<logger name="jdbc.resultset" level="OFF"/>-->
<!--<logger name="jdbc.resultsettable" level="OFF"/>-->
<!--<logger name="jdbc.connection" level="OFF"/>-->
<!--<logger name="com.apache.ibatis" level="${log.sql.level}"/>-->
<!--<logger name="com.ibatis" level="${log.sql.level}"/>-->
<!--<logger name="java.sql.Connection" level="${log.sql.level}"/>-->
<!--<logger name="java.sql.Statement" level="${log.sql.level}"/>-->
<!--<logger name="java.sql.PreparedStatement" level="${log.sql.level}"/>-->
<!--<logger name="java.sql.ResultSet" level="${log.sql.level}"/>-->
<logger name="org.dozer" level="WARN"/>
<logger name="net.rubyeye.xmemcached" level="WARN"/>
<logger name="com.google.code" level="WARN"/>
<root level="INFO">
<appender-ref ref="STDOUT"/>
</root>
</configuration>

View File

@ -0,0 +1,96 @@
<configuration>
<!-- 引用yml配置中的变量值 -->
<springProperty scope="context" name="LOG_HOME" source="log.file.root"/>
<springProperty scope="context" name="LOG_SIZE" source="log.file.maxSize"/>
<springProperty scope="context" name="LOG_NUM" source="log.file.maxHistory"/>
<springProperty scope="context" name="APP_NAME" source="app.app-name"/>
<springProperty scope="context" name="APP_NAME_NODE" source="app.app-name-node"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - [%logger{0} - %M] %msg%n</pattern>
</encoder>
</appender>
<appender name="PROJECT" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/info.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件输出的文件名 -->
<fileNamePattern>${LOG_HOME}/old/info.log.%d{yyyy-MM-dd}.%i</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!--日志文件最大的大小,当超过maxFileSize中指定大大小时文件名中的变量%i会加一-->
<maxFileSize>${LOG_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>${LOG_NUM}</maxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [TxId : %X{PtxId}] %-5level %logger{50} - [%logger{0} - %M] %msg%n</pattern>
</encoder>
</appender>
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/error.log</file>
<!-- 过滤器只记录ERROR级别的日志 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<!-- 最常用的滚动策略,它根据时间来制定滚动策略.既负责滚动也负责出发滚动 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_HOME}/old/error.log.%d{yyyy-MM-dd}.%i</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!--日志文件最大的大小,当超过maxFileSize中指定大大小时文件名中的变量%i会加一-->
<maxFileSize>${LOG_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
!--日志文件保留天数-->
<maxHistory>${LOG_NUM}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger - [%logger{0} - %M] %msg%n</pattern>
</encoder>
</appender>
<logger name="org" level="INFO"/>
<logger name="com.alibaba" level="WARN"/>
<logger name="springfox" level="ERROR"/>
<!--myibatis log configure-->
<logger name="jdbc.sqlonly" level="OFF"/>
<logger name="jdbc.sqltiming" level="OFF"/>
<logger name="jdbc.audit" level="OFF"/>
<logger name="jdbc.resultset" level="OFF"/>
<logger name="jdbc.resultsettable" level="OFF"/>
<logger name="jdbc.connection" level="OFF"/>
<logger name="com.apache.ibatis" level="${log.sql.level}"/>
<logger name="com.ibatis" level="${log.sql.level}"/>
<logger name="java.sql.Connection" level="${log.sql.level}"/>
<logger name="java.sql.Statement" level="${log.sql.level}"/>
<logger name="java.sql.PreparedStatement" level="${log.sql.level}"/>
<logger name="java.sql.ResultSet" level="${log.sql.level}"/>
<logger name="org.dozer" level="WARN"/>
<logger name="net.rubyeye.xmemcached" level="WARN"/>
<logger name="com.google.code" level="WARN"/>
<root level="INFO">
<appender-ref ref="PROJECT"/>
<appender-ref ref="ERROR"/>
</root>
</configuration>

View File

@ -0,0 +1,87 @@
<configuration>
<!-- 引用yml配置中的变量值 -->
<springProperty scope="context" name="LOG_HOME" source="log.file.root"/>
<springProperty scope="context" name="LOG_SIZE" source="log.file.maxSize"/>
<springProperty scope="context" name="LOG_NUM" source="log.file.maxHistory"/>
<springProperty scope="context" name="APP_NAME" source="app.app-name"/>
<springProperty scope="context" name="APP_NAME_NODE" source="app.app-name-node"/>
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}[%M:%L] %msg%n</pattern>
</encoder>
</appender>
<appender name="PROJECT" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/info.log</file>
<!-- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter> -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件输出的文件名 -->
<fileNamePattern>${LOG_HOME}/old/info.log.%d{yyyy-MM-dd}.%i</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!--日志文件最大的大小,当超过maxFileSize中指定大大小时文件名中的变量%i会加一-->
<maxFileSize>${LOG_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>${LOG_NUM}</maxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期%thread表示线程名%-5level级别从左显示5个字符宽度%msg日志消息%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%logger{50} - %M:%L] %msg%n</pattern>
</encoder>
</appender>
<appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_HOME}/error.log</file>
<!-- 过滤器只记录ERROR级别的日志 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<!-- 最常用的滚动策略,它根据时间来制定滚动策略.既负责滚动也负责出发滚动 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_HOME}/old/error.log.%d{yyyy-MM-dd}.%i</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!--日志文件最大的大小,当超过maxFileSize中指定大大小时文件名中的变量%i会加一-->
<maxFileSize>${LOG_SIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!--日志文件保留天数-->
<maxHistory>${LOG_NUM}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%logger - %M:%L] %msg%n</pattern>
</encoder>
</appender>
<logger name="org" level="INFO"/>
<logger name="com.alibaba" level="INFO"/>
<logger name="springfox" level="INFO"/>
<!--myibatis log configure-->
<logger name="jdbc.sqlonly" level="OFF"/>
<logger name="jdbc.sqltiming" level="OFF"/>
<logger name="jdbc.audit" level="OFF"/>
<logger name="jdbc.resultset" level="OFF"/>
<logger name="jdbc.resultsettable" level="OFF"/>
<logger name="jdbc.connection" level="OFF"/>
<logger name="com.apache.ibatis" level="${log.sql.level}"/>
<logger name="com.ibatis" level="${log.sql.level}"/>
<logger name="java.sql.Connection" level="${log.sql.level}"/>
<logger name="java.sql.Statement" level="${log.sql.level}"/>
<logger name="java.sql.PreparedStatement" level="${log.sql.level}"/>
<logger name="java.sql.ResultSet" level="${log.sql.level}"/>
<logger name="org.dozer" level="INFO"/>
<logger name="net.rubyeye.xmemcached" level="INFO"/>
<logger name="com.google.code" level="INFO"/>
<root level="INFO">
<appender-ref ref="PROJECT"/>
<appender-ref ref="ERROR"/>
</root>
</configuration>

Binary file not shown.

File diff suppressed because one or more lines are too long