Compare commits
25 Commits
0c78cafc84
...
4fc53bf74a
Author | SHA1 | Date | |
---|---|---|---|
4fc53bf74a | |||
d4a9b186c2 | |||
8670465683 | |||
e7209d9684 | |||
51b4ebde80 | |||
996e977aa3 | |||
face908fab | |||
e532b21465 | |||
4697cd95a1 | |||
2b49cd4e00 | |||
1318cfca60 | |||
2b8c281b1f | |||
91e03d7c53 | |||
83dde4dbff | |||
cadbb3d490 | |||
2e48a9def7 | |||
8dfee0c830 | |||
2ef79ab159 | |||
d087e1b8c5 | |||
f221ee3f69 | |||
5ea0ff490b | |||
7048ea42b2 | |||
c1fe4914b0 | |||
89d7ed90a9 | |||
47fead11ad |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
|||||||
/target/
|
/target/
|
||||||
/logs/
|
/logs/
|
||||||
|
.idea/
|
||||||
|
@ -13,6 +13,6 @@ RUN yum install kde-l10n-Chinese -y && \
|
|||||||
yum install glibc-common -y && \
|
yum install glibc-common -y && \
|
||||||
localedef -c -f UTF-8 -i zh_CN zh_CN.utf8
|
localedef -c -f UTF-8 -i zh_CN zh_CN.utf8
|
||||||
ENV LC_ALL zh_CN.UTF-8
|
ENV LC_ALL zh_CN.UTF-8
|
||||||
ENV JAVA_OPTS="-Xms256m -Xmx2048m"
|
ENV JAVA_OPTS="-server -Xms5g -Xmx5g -XX:MaxMetaspaceSize=512m -XX:+UseG1GC -XX:MaxGCPauseMillis=200"
|
||||||
WORKDIR /data-dump
|
WORKDIR /data-dump
|
||||||
ENTRYPOINT [ "sh", "-c", "nginx && sh /xxl-job-admin/start.sh && java $JAVA_OPTS -jar data-dump.jar" ]
|
ENTRYPOINT [ "sh", "-c", "nginx && sh /xxl-job-admin/start.sh && java $JAVA_OPTS -jar data-dump.jar" ]
|
@ -1,8 +1,14 @@
|
|||||||
# 程序配置
|
# 程序配置
|
||||||
# xxl job数据库链接
|
# xxl job数据库链接
|
||||||
|
#cook
|
||||||
|
#dbUrl="jdbc:mysql://127.0.0.1:3306/data-dump-xxl-job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai"
|
||||||
|
#dbUsername="root"
|
||||||
|
#dbPassword="celnet@2025.bln"
|
||||||
|
#携科
|
||||||
dbUrl="jdbc:mysql://127.0.0.1:3306/data-dump-xxl-job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai"
|
dbUrl="jdbc:mysql://127.0.0.1:3306/data-dump-xxl-job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai"
|
||||||
dbUsername="root"
|
dbUsername="root"
|
||||||
dbPassword="celnet@2025.bln"
|
dbPassword="Celnet2025.QY"
|
||||||
|
#其它
|
||||||
#dbUrl="jdbc:mysql://127.0.0.113306/data-dump-xxl-job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai"
|
#dbUrl="jdbc:mysql://127.0.0.113306/data-dump-xxl-job?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&serverTimezone=Asia/Shanghai"
|
||||||
#dbUsername="msg"
|
#dbUsername="msg"
|
||||||
#dbPassword="msg@2021"
|
#dbPassword="msg@2021"
|
||||||
|
Binary file not shown.
Binary file not shown.
@ -1,936 +0,0 @@
|
|||||||
[data-dump] 2025-02-14 17:50:26.504 ERROR 33936 [main] com.zaxxer.hikari.pool.HikariPool[594] HikariPool-1 - Exception during pool initialization.
|
|
||||||
|
|
||||||
com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at com.mysql.cj.jdbc.exceptions.SQLError.createCommunicationsException(SQLError.java:174)
|
|
||||||
at com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(SQLExceptionsMapping.java:64)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:828)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:448)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:241)
|
|
||||||
at com.mysql.cj.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:198)
|
|
||||||
at com.zaxxer.hikari.util.DriverDataSource.getConnection(DriverDataSource.java:138)
|
|
||||||
at com.zaxxer.hikari.pool.PoolBase.newConnection(PoolBase.java:364)
|
|
||||||
at com.zaxxer.hikari.pool.PoolBase.newPoolEntry(PoolBase.java:206)
|
|
||||||
at com.zaxxer.hikari.pool.HikariPool.createPoolEntry(HikariPool.java:476)
|
|
||||||
at com.zaxxer.hikari.pool.HikariPool.checkFailFast(HikariPool.java:561)
|
|
||||||
at com.zaxxer.hikari.pool.HikariPool.<init>(HikariPool.java:115)
|
|
||||||
at com.zaxxer.hikari.HikariDataSource.getConnection(HikariDataSource.java:112)
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.fetchConnection(DataSourceUtils.java:159)
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.doGetConnection(DataSourceUtils.java:117)
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.getConnection(DataSourceUtils.java:80)
|
|
||||||
at org.mybatis.spring.transaction.SpringManagedTransaction.openConnection(SpringManagedTransaction.java:80)
|
|
||||||
at org.mybatis.spring.transaction.SpringManagedTransaction.getConnection(SpringManagedTransaction.java:67)
|
|
||||||
at org.apache.ibatis.executor.BaseExecutor.getConnection(BaseExecutor.java:337)
|
|
||||||
at org.apache.ibatis.executor.SimpleExecutor.prepareStatement(SimpleExecutor.java:86)
|
|
||||||
at org.apache.ibatis.executor.SimpleExecutor.doQuery(SimpleExecutor.java:62)
|
|
||||||
at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:325)
|
|
||||||
at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156)
|
|
||||||
at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109)
|
|
||||||
at com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor.intercept(MybatisPlusInterceptor.java:81)
|
|
||||||
at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:62)
|
|
||||||
at jdk.proxy2/jdk.proxy2.$Proxy97.query(Unknown Source)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:151)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:145)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
|
|
||||||
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
|
|
||||||
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
|
|
||||||
at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:427)
|
|
||||||
at jdk.proxy2/jdk.proxy2.$Proxy74.selectList(Unknown Source)
|
|
||||||
at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:224)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:166)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:77)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperProxy$PlainMethodInvoker.invoke(MybatisMapperProxy.java:148)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:89)
|
|
||||||
at jdk.proxy2/jdk.proxy2.$Proxy75.list(Unknown Source)
|
|
||||||
at com.celnet.datadump.config.SalesforceExecutor.init(SalesforceExecutor.java:36)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
|
|
||||||
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
|
|
||||||
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleElement.invoke(InitDestroyAnnotationBeanPostProcessor.java:389)
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleMetadata.invokeInitMethods(InitDestroyAnnotationBeanPostProcessor.java:333)
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor.postProcessBeforeInitialization(InitDestroyAnnotationBeanPostProcessor.java:157)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.applyBeanPostProcessorsBeforeInitialization(AbstractAutowireCapableBeanFactory.java:440)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1796)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:620)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1389)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1309)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:656)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:639)
|
|
||||||
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:399)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1389)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1309)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:656)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:639)
|
|
||||||
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:399)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:953)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:918)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:583)
|
|
||||||
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
|
|
||||||
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
|
|
||||||
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:448)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:339)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1365)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1354)
|
|
||||||
at com.celnet.datadump.DataDumpApplication.main(DataDumpApplication.java:13)
|
|
||||||
Caused by: com.mysql.cj.exceptions.CJCommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
|
|
||||||
at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77)
|
|
||||||
at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
|
|
||||||
at java.base/java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499)
|
|
||||||
at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:480)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:61)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:105)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:151)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createCommunicationsException(ExceptionFactory.java:167)
|
|
||||||
at com.mysql.cj.protocol.a.NativeSocketConnection.connect(NativeSocketConnection.java:89)
|
|
||||||
at com.mysql.cj.NativeSession.connect(NativeSession.java:120)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:948)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:818)
|
|
||||||
... 93 common frames omitted
|
|
||||||
Caused by: java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.SocksSocketImpl.connect(SocksSocketImpl.java:327)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at com.mysql.cj.protocol.StandardSocketFactory.connect(StandardSocketFactory.java:153)
|
|
||||||
at com.mysql.cj.protocol.a.NativeSocketConnection.connect(NativeSocketConnection.java:63)
|
|
||||||
... 96 common frames omitted
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:50:26.524 ERROR 33936 [main] org.springframework.boot.SpringApplication[865] Application run failed
|
|
||||||
|
|
||||||
org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'fileManagerController': Unsatisfied dependency expressed through field 'fileManagerService'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'fileManagerServiceImpl': Unsatisfied dependency expressed through field 'salesforceExecutor'; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'salesforceExecutor': Invocation of init method failed; nested exception is org.mybatis.spring.MyBatisSystemException: nested exception is org.apache.ibatis.exceptions.PersistenceException:
|
|
||||||
### Error querying database. Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
### The error may exist in file [D:\data-dump\target\classes\mapper\CustomMapper.xml]
|
|
||||||
### The error may involve com.celnet.datadump.mapper.CustomMapper.list
|
|
||||||
### The error occurred while executing a query
|
|
||||||
### Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:659)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:639)
|
|
||||||
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:399)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:953)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:918)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:583)
|
|
||||||
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
|
|
||||||
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
|
|
||||||
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:448)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:339)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1365)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1354)
|
|
||||||
at com.celnet.datadump.DataDumpApplication.main(DataDumpApplication.java:13)
|
|
||||||
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'fileManagerServiceImpl': Unsatisfied dependency expressed through field 'salesforceExecutor'; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'salesforceExecutor': Invocation of init method failed; nested exception is org.mybatis.spring.MyBatisSystemException: nested exception is org.apache.ibatis.exceptions.PersistenceException:
|
|
||||||
### Error querying database. Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
### The error may exist in file [D:\data-dump\target\classes\mapper\CustomMapper.xml]
|
|
||||||
### The error may involve com.celnet.datadump.mapper.CustomMapper.list
|
|
||||||
### The error occurred while executing a query
|
|
||||||
### Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:659)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:639)
|
|
||||||
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:119)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:399)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1431)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:619)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1389)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1309)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:656)
|
|
||||||
... 20 common frames omitted
|
|
||||||
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'salesforceExecutor': Invocation of init method failed; nested exception is org.mybatis.spring.MyBatisSystemException: nested exception is org.apache.ibatis.exceptions.PersistenceException:
|
|
||||||
### Error querying database. Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
### The error may exist in file [D:\data-dump\target\classes\mapper\CustomMapper.xml]
|
|
||||||
### The error may involve com.celnet.datadump.mapper.CustomMapper.list
|
|
||||||
### The error occurred while executing a query
|
|
||||||
### Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor.postProcessBeforeInitialization(InitDestroyAnnotationBeanPostProcessor.java:160)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.applyBeanPostProcessorsBeforeInitialization(AbstractAutowireCapableBeanFactory.java:440)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1796)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:620)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1389)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1309)
|
|
||||||
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.resolveFieldValue(AutowiredAnnotationBeanPostProcessor.java:656)
|
|
||||||
... 34 common frames omitted
|
|
||||||
Caused by: org.mybatis.spring.MyBatisSystemException: nested exception is org.apache.ibatis.exceptions.PersistenceException:
|
|
||||||
### Error querying database. Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
### The error may exist in file [D:\data-dump\target\classes\mapper\CustomMapper.xml]
|
|
||||||
### The error may involve com.celnet.datadump.mapper.CustomMapper.list
|
|
||||||
### The error occurred while executing a query
|
|
||||||
### Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at org.mybatis.spring.MyBatisExceptionTranslator.translateExceptionIfPossible(MyBatisExceptionTranslator.java:96)
|
|
||||||
at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:441)
|
|
||||||
at jdk.proxy2/jdk.proxy2.$Proxy74.selectList(Unknown Source)
|
|
||||||
at org.mybatis.spring.SqlSessionTemplate.selectList(SqlSessionTemplate.java:224)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.executeForMany(MybatisMapperMethod.java:166)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperMethod.execute(MybatisMapperMethod.java:77)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperProxy$PlainMethodInvoker.invoke(MybatisMapperProxy.java:148)
|
|
||||||
at com.baomidou.mybatisplus.core.override.MybatisMapperProxy.invoke(MybatisMapperProxy.java:89)
|
|
||||||
at jdk.proxy2/jdk.proxy2.$Proxy75.list(Unknown Source)
|
|
||||||
at com.celnet.datadump.config.SalesforceExecutor.init(SalesforceExecutor.java:36)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
|
|
||||||
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
|
|
||||||
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleElement.invoke(InitDestroyAnnotationBeanPostProcessor.java:389)
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleMetadata.invokeInitMethods(InitDestroyAnnotationBeanPostProcessor.java:333)
|
|
||||||
at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor.postProcessBeforeInitialization(InitDestroyAnnotationBeanPostProcessor.java:157)
|
|
||||||
... 46 common frames omitted
|
|
||||||
Caused by: org.apache.ibatis.exceptions.PersistenceException:
|
|
||||||
### Error querying database. Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
### The error may exist in file [D:\data-dump\target\classes\mapper\CustomMapper.xml]
|
|
||||||
### The error may involve com.celnet.datadump.mapper.CustomMapper.list
|
|
||||||
### The error occurred while executing a query
|
|
||||||
### Cause: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at org.apache.ibatis.exceptions.ExceptionFactory.wrapException(ExceptionFactory.java:30)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:153)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:145)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:140)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
|
|
||||||
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
|
|
||||||
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
|
|
||||||
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
|
|
||||||
at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:427)
|
|
||||||
... 61 common frames omitted
|
|
||||||
Caused by: org.springframework.jdbc.CannotGetJdbcConnectionException: Failed to obtain JDBC Connection; nested exception is com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.getConnection(DataSourceUtils.java:83)
|
|
||||||
at org.mybatis.spring.transaction.SpringManagedTransaction.openConnection(SpringManagedTransaction.java:80)
|
|
||||||
at org.mybatis.spring.transaction.SpringManagedTransaction.getConnection(SpringManagedTransaction.java:67)
|
|
||||||
at org.apache.ibatis.executor.BaseExecutor.getConnection(BaseExecutor.java:337)
|
|
||||||
at org.apache.ibatis.executor.SimpleExecutor.prepareStatement(SimpleExecutor.java:86)
|
|
||||||
at org.apache.ibatis.executor.SimpleExecutor.doQuery(SimpleExecutor.java:62)
|
|
||||||
at org.apache.ibatis.executor.BaseExecutor.queryFromDatabase(BaseExecutor.java:325)
|
|
||||||
at org.apache.ibatis.executor.BaseExecutor.query(BaseExecutor.java:156)
|
|
||||||
at org.apache.ibatis.executor.CachingExecutor.query(CachingExecutor.java:109)
|
|
||||||
at com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor.intercept(MybatisPlusInterceptor.java:81)
|
|
||||||
at org.apache.ibatis.plugin.Plugin.invoke(Plugin.java:62)
|
|
||||||
at jdk.proxy2/jdk.proxy2.$Proxy97.query(Unknown Source)
|
|
||||||
at org.apache.ibatis.session.defaults.DefaultSqlSession.selectList(DefaultSqlSession.java:151)
|
|
||||||
... 68 common frames omitted
|
|
||||||
Caused by: com.mysql.cj.jdbc.exceptions.CommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at com.mysql.cj.jdbc.exceptions.SQLError.createCommunicationsException(SQLError.java:174)
|
|
||||||
at com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(SQLExceptionsMapping.java:64)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:828)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:448)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:241)
|
|
||||||
at com.mysql.cj.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:198)
|
|
||||||
at com.zaxxer.hikari.util.DriverDataSource.getConnection(DriverDataSource.java:138)
|
|
||||||
at com.zaxxer.hikari.pool.PoolBase.newConnection(PoolBase.java:364)
|
|
||||||
at com.zaxxer.hikari.pool.PoolBase.newPoolEntry(PoolBase.java:206)
|
|
||||||
at com.zaxxer.hikari.pool.HikariPool.createPoolEntry(HikariPool.java:476)
|
|
||||||
at com.zaxxer.hikari.pool.HikariPool.checkFailFast(HikariPool.java:561)
|
|
||||||
at com.zaxxer.hikari.pool.HikariPool.<init>(HikariPool.java:115)
|
|
||||||
at com.zaxxer.hikari.HikariDataSource.getConnection(HikariDataSource.java:112)
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.fetchConnection(DataSourceUtils.java:159)
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.doGetConnection(DataSourceUtils.java:117)
|
|
||||||
at org.springframework.jdbc.datasource.DataSourceUtils.getConnection(DataSourceUtils.java:80)
|
|
||||||
... 80 common frames omitted
|
|
||||||
Caused by: com.mysql.cj.exceptions.CJCommunicationsException: Communications link failure
|
|
||||||
|
|
||||||
The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
|
|
||||||
at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
|
|
||||||
at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77)
|
|
||||||
at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
|
|
||||||
at java.base/java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499)
|
|
||||||
at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:480)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:61)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:105)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createException(ExceptionFactory.java:151)
|
|
||||||
at com.mysql.cj.exceptions.ExceptionFactory.createCommunicationsException(ExceptionFactory.java:167)
|
|
||||||
at com.mysql.cj.protocol.a.NativeSocketConnection.connect(NativeSocketConnection.java:89)
|
|
||||||
at com.mysql.cj.NativeSession.connect(NativeSession.java:120)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:948)
|
|
||||||
at com.mysql.cj.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:818)
|
|
||||||
... 93 common frames omitted
|
|
||||||
Caused by: java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.SocksSocketImpl.connect(SocksSocketImpl.java:327)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at com.mysql.cj.protocol.StandardSocketFactory.connect(StandardSocketFactory.java:153)
|
|
||||||
at com.mysql.cj.protocol.a.NativeSocketConnection.connect(NativeSocketConnection.java:63)
|
|
||||||
... 96 common frames omitted
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:51:26.308 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:51:56.325 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:52:26.329 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:52:56.344 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:53:26.354 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:53:56.365 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:54:26.373 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:54:51.839 ERROR 37352 [main] org.springframework.boot.SpringApplication[865] Application run failed
|
|
||||||
|
|
||||||
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'xxlJobExecutor' defined in class path resource [com/celnet/datadump/config/XxlJobConfig.class]: Invocation of init method failed; nested exception is com.xxl.rpc.util.XxlRpcException: xxl-rpc provider port[8887] is used.
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1804)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:620)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:953)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:918)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:583)
|
|
||||||
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
|
|
||||||
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:448)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:339)
|
|
||||||
at org.springframework.boot.test.context.SpringBootContextLoader.loadContext(SpringBootContextLoader.java:136)
|
|
||||||
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContextInternal(DefaultCacheAwareContextLoaderDelegate.java:99)
|
|
||||||
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:124)
|
|
||||||
at org.springframework.test.context.support.DefaultTestContext.getApplicationContext(DefaultTestContext.java:124)
|
|
||||||
at org.springframework.test.context.web.ServletTestExecutionListener.setUpRequestContextIfNecessary(ServletTestExecutionListener.java:190)
|
|
||||||
at org.springframework.test.context.web.ServletTestExecutionListener.prepareTestInstance(ServletTestExecutionListener.java:132)
|
|
||||||
at org.springframework.test.context.TestContextManager.prepareTestInstance(TestContextManager.java:248)
|
|
||||||
at org.springframework.test.context.junit.jupiter.SpringExtension.postProcessTestInstance(SpringExtension.java:138)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$invokeTestInstancePostProcessors$6(ClassBasedTestDescriptor.java:350)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.executeAndMaskThrowable(ClassBasedTestDescriptor.java:355)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$invokeTestInstancePostProcessors$7(ClassBasedTestDescriptor.java:350)
|
|
||||||
at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)
|
|
||||||
at java.base/java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:179)
|
|
||||||
at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625)
|
|
||||||
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)
|
|
||||||
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)
|
|
||||||
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:310)
|
|
||||||
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
|
|
||||||
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
|
|
||||||
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:762)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.invokeTestInstancePostProcessors(ClassBasedTestDescriptor.java:349)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$instantiateAndPostProcessTestInstance$4(ClassBasedTestDescriptor.java:270)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.instantiateAndPostProcessTestInstance(ClassBasedTestDescriptor.java:269)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$testInstancesProvider$2(ClassBasedTestDescriptor.java:259)
|
|
||||||
at java.base/java.util.Optional.orElseGet(Optional.java:364)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$testInstancesProvider$3(ClassBasedTestDescriptor.java:258)
|
|
||||||
at org.junit.jupiter.engine.execution.TestInstancesProvider.getTestInstances(TestInstancesProvider.java:31)
|
|
||||||
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.lambda$prepare$0(TestMethodTestDescriptor.java:101)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.prepare(TestMethodTestDescriptor.java:100)
|
|
||||||
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.prepare(TestMethodTestDescriptor.java:65)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$prepare$1(NodeTestTask.java:111)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.prepare(NodeTestTask.java:111)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:79)
|
|
||||||
at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
|
|
||||||
at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.submit(SameThreadHierarchicalTestExecutorService.java:32)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.HierarchicalTestExecutor.execute(HierarchicalTestExecutor.java:57)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine.execute(HierarchicalTestEngine.java:51)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:108)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:88)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.lambda$execute$0(EngineExecutionOrchestrator.java:54)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.withInterceptedStreams(EngineExecutionOrchestrator.java:67)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:52)
|
|
||||||
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:96)
|
|
||||||
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:75)
|
|
||||||
at com.intellij.junit5.JUnit5IdeaTestRunner.startRunnerWithArgs(JUnit5IdeaTestRunner.java:57)
|
|
||||||
at com.intellij.rt.junit.IdeaTestRunner$Repeater$1.execute(IdeaTestRunner.java:38)
|
|
||||||
at com.intellij.rt.execution.junit.TestsRepeater.repeat(TestsRepeater.java:11)
|
|
||||||
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:35)
|
|
||||||
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:232)
|
|
||||||
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:55)
|
|
||||||
Caused by: com.xxl.rpc.util.XxlRpcException: xxl-rpc provider port[8887] is used.
|
|
||||||
at com.xxl.rpc.remoting.provider.XxlRpcProviderFactory.start(XxlRpcProviderFactory.java:117)
|
|
||||||
at com.xxl.job.core.executor.XxlJobExecutor.initRpcProvider(XxlJobExecutor.java:162)
|
|
||||||
at com.xxl.job.core.executor.XxlJobExecutor.start(XxlJobExecutor.java:84)
|
|
||||||
at com.xxl.job.core.executor.impl.XxlJobSpringExecutor.afterPropertiesSet(XxlJobSpringExecutor.java:42)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1863)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1800)
|
|
||||||
... 84 common frames omitted
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:54:51.842 ERROR 37352 [main] org.springframework.test.context.TestContextManager[252] Caught exception while allowing TestExecutionListener [org.springframework.test.context.web.ServletTestExecutionListener@74bada02] to prepare test instance [com.celnet.datadump.DataDumpConnetTests@7120daa6]
|
|
||||||
|
|
||||||
java.lang.IllegalStateException: Failed to load ApplicationContext
|
|
||||||
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:132)
|
|
||||||
at org.springframework.test.context.support.DefaultTestContext.getApplicationContext(DefaultTestContext.java:124)
|
|
||||||
at org.springframework.test.context.web.ServletTestExecutionListener.setUpRequestContextIfNecessary(ServletTestExecutionListener.java:190)
|
|
||||||
at org.springframework.test.context.web.ServletTestExecutionListener.prepareTestInstance(ServletTestExecutionListener.java:132)
|
|
||||||
at org.springframework.test.context.TestContextManager.prepareTestInstance(TestContextManager.java:248)
|
|
||||||
at org.springframework.test.context.junit.jupiter.SpringExtension.postProcessTestInstance(SpringExtension.java:138)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$invokeTestInstancePostProcessors$6(ClassBasedTestDescriptor.java:350)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.executeAndMaskThrowable(ClassBasedTestDescriptor.java:355)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$invokeTestInstancePostProcessors$7(ClassBasedTestDescriptor.java:350)
|
|
||||||
at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)
|
|
||||||
at java.base/java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:179)
|
|
||||||
at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625)
|
|
||||||
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)
|
|
||||||
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)
|
|
||||||
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:310)
|
|
||||||
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
|
|
||||||
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
|
|
||||||
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:762)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.invokeTestInstancePostProcessors(ClassBasedTestDescriptor.java:349)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$instantiateAndPostProcessTestInstance$4(ClassBasedTestDescriptor.java:270)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.instantiateAndPostProcessTestInstance(ClassBasedTestDescriptor.java:269)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$testInstancesProvider$2(ClassBasedTestDescriptor.java:259)
|
|
||||||
at java.base/java.util.Optional.orElseGet(Optional.java:364)
|
|
||||||
at org.junit.jupiter.engine.descriptor.ClassBasedTestDescriptor.lambda$testInstancesProvider$3(ClassBasedTestDescriptor.java:258)
|
|
||||||
at org.junit.jupiter.engine.execution.TestInstancesProvider.getTestInstances(TestInstancesProvider.java:31)
|
|
||||||
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.lambda$prepare$0(TestMethodTestDescriptor.java:101)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.prepare(TestMethodTestDescriptor.java:100)
|
|
||||||
at org.junit.jupiter.engine.descriptor.TestMethodTestDescriptor.prepare(TestMethodTestDescriptor.java:65)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$prepare$1(NodeTestTask.java:111)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.prepare(NodeTestTask.java:111)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:79)
|
|
||||||
at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
|
|
||||||
at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.invokeAll(SameThreadHierarchicalTestExecutorService.java:38)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$5(NodeTestTask.java:143)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$7(NodeTestTask.java:129)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.Node.around(Node.java:137)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.lambda$executeRecursively$8(NodeTestTask.java:127)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.ThrowableCollector.execute(ThrowableCollector.java:73)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.executeRecursively(NodeTestTask.java:126)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.NodeTestTask.execute(NodeTestTask.java:84)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.SameThreadHierarchicalTestExecutorService.submit(SameThreadHierarchicalTestExecutorService.java:32)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.HierarchicalTestExecutor.execute(HierarchicalTestExecutor.java:57)
|
|
||||||
at org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine.execute(HierarchicalTestEngine.java:51)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:108)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:88)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.lambda$execute$0(EngineExecutionOrchestrator.java:54)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.withInterceptedStreams(EngineExecutionOrchestrator.java:67)
|
|
||||||
at org.junit.platform.launcher.core.EngineExecutionOrchestrator.execute(EngineExecutionOrchestrator.java:52)
|
|
||||||
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:96)
|
|
||||||
at org.junit.platform.launcher.core.DefaultLauncher.execute(DefaultLauncher.java:75)
|
|
||||||
at com.intellij.junit5.JUnit5IdeaTestRunner.startRunnerWithArgs(JUnit5IdeaTestRunner.java:57)
|
|
||||||
at com.intellij.rt.junit.IdeaTestRunner$Repeater$1.execute(IdeaTestRunner.java:38)
|
|
||||||
at com.intellij.rt.execution.junit.TestsRepeater.repeat(TestsRepeater.java:11)
|
|
||||||
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:35)
|
|
||||||
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:232)
|
|
||||||
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:55)
|
|
||||||
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'xxlJobExecutor' defined in class path resource [com/celnet/datadump/config/XxlJobConfig.class]: Invocation of init method failed; nested exception is com.xxl.rpc.util.XxlRpcException: xxl-rpc provider port[8887] is used.
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1804)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:620)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
|
|
||||||
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
|
|
||||||
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
|
|
||||||
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:953)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:918)
|
|
||||||
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:583)
|
|
||||||
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
|
|
||||||
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:448)
|
|
||||||
at org.springframework.boot.SpringApplication.run(SpringApplication.java:339)
|
|
||||||
at org.springframework.boot.test.context.SpringBootContextLoader.loadContext(SpringBootContextLoader.java:136)
|
|
||||||
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContextInternal(DefaultCacheAwareContextLoaderDelegate.java:99)
|
|
||||||
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:124)
|
|
||||||
... 69 common frames omitted
|
|
||||||
Caused by: com.xxl.rpc.util.XxlRpcException: xxl-rpc provider port[8887] is used.
|
|
||||||
at com.xxl.rpc.remoting.provider.XxlRpcProviderFactory.start(XxlRpcProviderFactory.java:117)
|
|
||||||
at com.xxl.job.core.executor.XxlJobExecutor.initRpcProvider(XxlJobExecutor.java:162)
|
|
||||||
at com.xxl.job.core.executor.XxlJobExecutor.start(XxlJobExecutor.java:84)
|
|
||||||
at com.xxl.job.core.executor.impl.XxlJobSpringExecutor.afterPropertiesSet(XxlJobSpringExecutor.java:42)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1863)
|
|
||||||
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1800)
|
|
||||||
... 84 common frames omitted
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:54:56.402 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:55:26.427 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:55:56.435 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:56:24.836 ERROR 32624 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registryRemove(AdminBizClient.java:46)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:84)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:56:43.349 ERROR 30264 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:56:44.287 ERROR 30264 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registryRemove(AdminBizClient.java:46)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:84)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 17:57:11.744 ERROR 39532 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 18:05:43.375 ERROR 39532 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 18:07:31.728 ERROR 39532 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 18:15:19.871 ERROR 39532 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registry(AdminBizClient.java:41)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:48)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
||||||
[data-dump] 2025-02-14 18:15:19.950 ERROR 39532 [xxl-job, executor ExecutorRegistryThread] com.xxl.job.core.util.XxlJobRemotingUtil[146] Connection refused: no further information
|
|
||||||
|
|
||||||
java.net.ConnectException: Connection refused: no further information
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnect(Native Method)
|
|
||||||
at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:672)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:547)
|
|
||||||
at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:602)
|
|
||||||
at java.base/java.net.Socket.connect(Socket.java:633)
|
|
||||||
at java.base/sun.net.NetworkClient.doConnect(NetworkClient.java:178)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:534)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.openServer(HttpClient.java:639)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.<init>(HttpClient.java:282)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:387)
|
|
||||||
at java.base/sun.net.www.http.HttpClient.New(HttpClient.java:409)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1309)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1242)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1128)
|
|
||||||
at java.base/sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:1057)
|
|
||||||
at com.xxl.job.core.util.XxlJobRemotingUtil.postBody(XxlJobRemotingUtil.java:94)
|
|
||||||
at com.xxl.job.core.biz.client.AdminBizClient.registryRemove(AdminBizClient.java:46)
|
|
||||||
at com.xxl.job.core.thread.ExecutorRegistryThread$1.run(ExecutorRegistryThread.java:84)
|
|
||||||
at java.base/java.lang.Thread.run(Thread.java:842)
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
1106
logs/data-dump.log
1106
logs/data-dump.log
File diff suppressed because it is too large
Load Diff
29
pom.xml
29
pom.xml
@ -61,7 +61,32 @@
|
|||||||
<version>${fastjson.version}</version>
|
<version>${fastjson.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- json -->
|
<!-- json -->
|
||||||
|
<!-- https://mvnrepository.com/artifact/cn.hutool/hutool-all -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>cn.hutool</groupId>
|
||||||
|
<artifactId>hutool-all</artifactId>
|
||||||
|
<version>5.8.5</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.opencsv</groupId>
|
||||||
|
<artifactId>opencsv</artifactId>
|
||||||
|
<version>5.8</version> <!-- 使用最新版本 -->
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-csv</artifactId>
|
||||||
|
<version>1.10.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.aspectj</groupId>
|
||||||
|
<artifactId>aspectjweaver</artifactId>
|
||||||
|
<version>1.9.6</version> <!-- 最新稳定版 -->
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.github.ljwlgl</groupId>
|
||||||
|
<artifactId>common-util</artifactId>
|
||||||
|
<version>2.1.0</version>
|
||||||
|
</dependency>
|
||||||
<!-- mybatis plus -->
|
<!-- mybatis plus -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.baomidou</groupId>
|
<groupId>com.baomidou</groupId>
|
||||||
@ -217,7 +242,7 @@
|
|||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||||
</plugin>
|
</plugin><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-compiler-plugin</artifactId><configuration><source>8</source><target>8</target></configuration></plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
<resources>
|
<resources>
|
||||||
<resource>
|
<resource>
|
||||||
|
@ -0,0 +1,14 @@
|
|||||||
|
package com.celnet.datadump.annotation;
|
||||||
|
|
||||||
|
import java.lang.annotation.*;
|
||||||
|
|
||||||
|
@Target({ElementType.METHOD})
|
||||||
|
@Retention(RetentionPolicy.RUNTIME)
|
||||||
|
@Documented
|
||||||
|
public @interface LogServiceAnnotation {
|
||||||
|
|
||||||
|
/** 操作类型 **/
|
||||||
|
String operateType();
|
||||||
|
/** 操作解释 **/
|
||||||
|
String remark() default "";
|
||||||
|
}
|
165
src/main/java/com/celnet/datadump/aspect/OperateLogAspect.java
Normal file
165
src/main/java/com/celnet/datadump/aspect/OperateLogAspect.java
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
package com.celnet.datadump.aspect;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
|
||||||
|
import com.celnet.datadump.annotation.LogServiceAnnotation;
|
||||||
|
import com.celnet.datadump.entity.DataLog;
|
||||||
|
import com.celnet.datadump.service.DataLogService;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import org.aspectj.lang.ProceedingJoinPoint;
|
||||||
|
import org.aspectj.lang.Signature;
|
||||||
|
import org.aspectj.lang.annotation.Around;
|
||||||
|
import org.aspectj.lang.annotation.Aspect;
|
||||||
|
import org.aspectj.lang.annotation.Pointcut;
|
||||||
|
import org.aspectj.lang.reflect.MethodSignature;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.core.annotation.Order;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.web.context.request.RequestAttributes;
|
||||||
|
import org.springframework.web.context.request.RequestContextHolder;
|
||||||
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
|
|
||||||
|
import javax.servlet.ServletRequest;
|
||||||
|
import javax.servlet.ServletResponse;
|
||||||
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import java.lang.reflect.Method;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
@Aspect
|
||||||
|
@Component
|
||||||
|
@Order(-1)
|
||||||
|
public class OperateLogAspect {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(OperateLogAspect.class);
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataLogService dataLogService;
|
||||||
|
|
||||||
|
@Pointcut(value = "@annotation(com.celnet.datadump.annotation.LogServiceAnnotation)")
|
||||||
|
public void operateLogAspectPoint(){
|
||||||
|
}
|
||||||
|
|
||||||
|
@Around("operateLogAspectPoint()")
|
||||||
|
public Object around(ProceedingJoinPoint joinPoint) {
|
||||||
|
|
||||||
|
//开始时间
|
||||||
|
Date startTime = new Date();
|
||||||
|
//日志注解
|
||||||
|
LogServiceAnnotation logServiceAnno = null;
|
||||||
|
|
||||||
|
//request请求
|
||||||
|
RequestAttributes requestAttributes = RequestContextHolder.getRequestAttributes();
|
||||||
|
//捕获请求参数
|
||||||
|
Object[] args = joinPoint.getArgs();
|
||||||
|
//结果
|
||||||
|
Object result = null;
|
||||||
|
try {
|
||||||
|
|
||||||
|
//获取注解
|
||||||
|
logServiceAnno = getAnnotationLog(joinPoint);
|
||||||
|
|
||||||
|
//执行程序
|
||||||
|
result = joinPoint.proceed();
|
||||||
|
|
||||||
|
//初始化日志记录
|
||||||
|
DataLog dataLog = initializeOperateLog(joinPoint,args,startTime,logServiceAnno,result,null);
|
||||||
|
|
||||||
|
//保存日志
|
||||||
|
dataLogService.save(dataLog);
|
||||||
|
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
|
||||||
|
//初始化日志记录
|
||||||
|
DataLog dataLog = initializeOperateLog(joinPoint,args,startTime,logServiceAnno,null,throwable);
|
||||||
|
|
||||||
|
//保存日志
|
||||||
|
dataLogService.save(dataLog);
|
||||||
|
|
||||||
|
log.error("日志拦截异常:"+throwable);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 初始化操作日志
|
||||||
|
* @param joinPoint 节点
|
||||||
|
*/
|
||||||
|
private DataLog initializeOperateLog(ProceedingJoinPoint joinPoint , Object[] args , Date startTime , LogServiceAnnotation logServiceAnno , Object result , Throwable throwable) {
|
||||||
|
|
||||||
|
if(logServiceAnno == null){
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
//request请求
|
||||||
|
RequestAttributes requestAttributes = RequestContextHolder.getRequestAttributes();
|
||||||
|
HttpServletRequest request = (HttpServletRequest) requestAttributes.resolveReference(RequestAttributes.REFERENCE_REQUEST);
|
||||||
|
//ip
|
||||||
|
String ip = request.getHeader("HTTP_X_FORWARDED_FOR");
|
||||||
|
|
||||||
|
|
||||||
|
//请求参数
|
||||||
|
Object[] arguments = new Object[args.length];
|
||||||
|
for (int i = 0; i < args.length; i++) {
|
||||||
|
if (args[i] instanceof ServletRequest || args[i] instanceof ServletResponse || args[i] instanceof MultipartFile) {
|
||||||
|
//ServletRequest不能序列化,从入参里排除,否则报异常:java.lang.IllegalStateException: It is illegal to call this method if the current request is not in asynchronous mode (i.e. isAsyncStarted() returns false)
|
||||||
|
//ServletResponse不能序列化 从入参里排除,否则报异常:java.lang.IllegalStateException: getOutputStream() has already been called for this response
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
arguments[i] = args[i];
|
||||||
|
}
|
||||||
|
String argStr = arguments.length <= 0 ? "" : JSON.toJSONString(arguments);
|
||||||
|
//响应结果
|
||||||
|
String resultStr = result == null ? "" : JSON.toJSONString(result);
|
||||||
|
//异常信息
|
||||||
|
String exceptionStr = throwable == null ? "" : JSON.toJSONString(throwable.getMessage());
|
||||||
|
//结束时间
|
||||||
|
Date endTime = new Date();
|
||||||
|
|
||||||
|
DataLog dataLog = new DataLog();
|
||||||
|
dataLog.setIp(ip);
|
||||||
|
dataLog.setStartTime(startTime);
|
||||||
|
dataLog.setEndTime(endTime);
|
||||||
|
if(resultStr.contains("200")){
|
||||||
|
dataLog.setCode("200");
|
||||||
|
dataLog.setStatus("成功");
|
||||||
|
dataLog.setMessage(resultStr);
|
||||||
|
} else {
|
||||||
|
dataLog.setCode("500");
|
||||||
|
dataLog.setStatus("失败");
|
||||||
|
dataLog.setMessage(exceptionStr);
|
||||||
|
}
|
||||||
|
dataLog.setRequestUrl(request.getRequestURI());
|
||||||
|
dataLog.setRequestType(logServiceAnno.operateType());
|
||||||
|
dataLog.setRequestData(argStr);
|
||||||
|
dataLog.setRequestMethod(logServiceAnno.remark());
|
||||||
|
|
||||||
|
return dataLog;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取注解
|
||||||
|
* @param joinPoint 节点
|
||||||
|
* @return 结果
|
||||||
|
*/
|
||||||
|
private LogServiceAnnotation getAnnotationLog(ProceedingJoinPoint joinPoint) {
|
||||||
|
Signature signature = joinPoint.getSignature();
|
||||||
|
MethodSignature methodSignature = (MethodSignature) signature;
|
||||||
|
Method method = methodSignature.getMethod();
|
||||||
|
if (method != null) {
|
||||||
|
// 拿到自定义注解中的信息
|
||||||
|
LogServiceAnnotation annotation = method.getAnnotation(LogServiceAnnotation.class);
|
||||||
|
System.out.println("打印注解信息:"+ JSON.toJSONString(annotation));
|
||||||
|
return annotation;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -2,7 +2,10 @@ package com.celnet.datadump.config;
|
|||||||
|
|
||||||
import com.alibaba.fastjson.JSONArray;
|
import com.alibaba.fastjson.JSONArray;
|
||||||
import com.celnet.datadump.mapper.CustomMapper;
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
|
import com.celnet.datadump.util.BulkUtil;
|
||||||
|
import com.celnet.datadump.util.EmailUtil;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
import com.sforce.async.BulkConnection;
|
||||||
import com.sforce.soap.partner.PartnerConnection;
|
import com.sforce.soap.partner.PartnerConnection;
|
||||||
import com.sforce.ws.ConnectionException;
|
import com.sforce.ws.ConnectionException;
|
||||||
import com.sforce.ws.ConnectorConfig;
|
import com.sforce.ws.ConnectorConfig;
|
||||||
@ -61,7 +64,53 @@ public class SalesforceConnect {
|
|||||||
config.setReadTimeout(60 * 60 * 1000);
|
config.setReadTimeout(60 * 60 * 1000);
|
||||||
return new PartnerConnection(config);
|
return new PartnerConnection(config);
|
||||||
} catch (ConnectionException e) {
|
} catch (ConnectionException e) {
|
||||||
e.printStackTrace();
|
String message = "源ORG连接配置错误!";
|
||||||
|
String format = String.format("ORG连接异常!, \ncause:\n%s", message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
log.error("exception message", e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 创建Bulk连接
|
||||||
|
* @author kris
|
||||||
|
*/
|
||||||
|
public BulkConnection createBulkConnect() {
|
||||||
|
try {
|
||||||
|
List<Map<String, Object>> poll = customerMapper.list("code,value","org_config",null);
|
||||||
|
//遍历poll,找出code值为SOURCE_ORG_URL,SOURCE_ORG_USERNAME,SOURCE_ORG_PASSWORD的value值
|
||||||
|
Map<String, String> map = new HashMap<>();
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("SOURCE_ORG_URL".equals(map1.get("code"))) {
|
||||||
|
map.put("url", (String) map1.get("value"));
|
||||||
|
}
|
||||||
|
if ("SOURCE_ORG_USERNAME".equals(map1.get("code"))) {
|
||||||
|
map.put("username", (String) map1.get("value"));
|
||||||
|
}
|
||||||
|
if ("SOURCE_ORG_PASSWORD".equals(map1.get("code"))) {
|
||||||
|
map.put("password", String.valueOf(map1.get("value")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//
|
||||||
|
// String username = map.get("username");
|
||||||
|
// ConnectorConfig config = new ConnectorConfig();
|
||||||
|
// config.setUsername(username);
|
||||||
|
// config.setPassword(map.get("password"));
|
||||||
|
// String url = map.get("url");
|
||||||
|
// config.setAuthEndpoint(url);
|
||||||
|
// config.setServiceEndpoint(url);
|
||||||
|
// config.setConnectionTimeout(60 * 60 * 1000);
|
||||||
|
// config.setReadTimeout(60 * 60 * 1000);
|
||||||
|
// PartnerConnection connection = new PartnerConnection(config);
|
||||||
|
// config.setRestEndpoint(url);
|
||||||
|
// config.setSessionId(connection.getSessionHeader().getSessionId());
|
||||||
|
return BulkUtil.getBulkConnection(map.get("username"),map.get("password"),map.get("url"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
String message = "源ORG连接配置错误!";
|
||||||
|
String format = String.format("ORG连接异常!, \ncause:\n%s", message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
log.error("exception message", e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
package com.celnet.datadump.config;
|
package com.celnet.datadump.config;
|
||||||
|
|
||||||
import com.celnet.datadump.mapper.CustomMapper;
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
|
import com.celnet.datadump.param.DataDumpParam;
|
||||||
|
import com.celnet.datadump.util.BulkUtil;
|
||||||
|
import com.celnet.datadump.util.EmailUtil;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
import com.sforce.async.BulkConnection;
|
||||||
import com.sforce.soap.partner.PartnerConnection;
|
import com.sforce.soap.partner.PartnerConnection;
|
||||||
import com.sforce.ws.ConnectionException;
|
import com.sforce.ws.ConnectionException;
|
||||||
import com.sforce.ws.ConnectorConfig;
|
import com.sforce.ws.ConnectorConfig;
|
||||||
@ -56,8 +60,56 @@ public class SalesforceTargetConnect {
|
|||||||
String orgId = connection.getUserInfo().getOrganizationId();
|
String orgId = connection.getUserInfo().getOrganizationId();
|
||||||
return connection;
|
return connection;
|
||||||
} catch (ConnectionException e) {
|
} catch (ConnectionException e) {
|
||||||
e.printStackTrace();
|
String message = "目标ORG连接配置错误!";
|
||||||
|
String format = String.format("ORG连接异常!, \ncause:\n%s", message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
log.error("exception message", e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 创建Bulk连接
|
||||||
|
* @author kris
|
||||||
|
*/
|
||||||
|
public BulkConnection createBulkConnect() {
|
||||||
|
try {
|
||||||
|
List<Map<String, Object>> poll = customerMapper.list("code,value","org_config",null);
|
||||||
|
//遍历poll,找出code值为TARGET_ORG_URL,TARGET_ORG_USERNAME,TARGET_ORG_PASSWORD的value值
|
||||||
|
Map<String, String> map = new HashMap<>();
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("TARGET_ORG_URL".equals(map1.get("code"))) {
|
||||||
|
map.put("url", (String) map1.get("value"));
|
||||||
|
}
|
||||||
|
if ("TARGET_ORG_USERNAME".equals(map1.get("code"))) {
|
||||||
|
map.put("username", (String) map1.get("value"));
|
||||||
|
}
|
||||||
|
if ("TARGET_ORG_PASSWORD".equals(map1.get("code"))) {
|
||||||
|
map.put("password", (String) map1.get("value"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// String username = ;
|
||||||
|
// ConnectorConfig config = new ConnectorConfig();
|
||||||
|
// config.setUsername(username);
|
||||||
|
// config.setPassword();
|
||||||
|
// String url = ;
|
||||||
|
// config.setAuthEndpoint(url);
|
||||||
|
// config.setServiceEndpoint(url);
|
||||||
|
// config.setConnectionTimeout(60 * 60 * 1000);
|
||||||
|
// config.setReadTimeout(60 * 60 * 1000);
|
||||||
|
// PartnerConnection connection = new PartnerConnection(config);
|
||||||
|
// config.setRestEndpoint(url);
|
||||||
|
// config.setSessionId(connection.getSessionHeader().getSessionId());
|
||||||
|
return BulkUtil.getBulkConnection(map.get("username"),map.get("password"),map.get("url"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
String message = "目标ORG连接配置错误!";
|
||||||
|
String format = String.format("ORG连接异常!, \ncause:\n%s", message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
log.error("exception message", e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,16 @@
|
|||||||
|
package com.celnet.datadump.constant;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 操作类型
|
||||||
|
*/
|
||||||
|
public class OperateTypeConstant {
|
||||||
|
|
||||||
|
//删除
|
||||||
|
public static final String TYPE_DELETE = "DELETE";
|
||||||
|
//新增
|
||||||
|
public static final String TYPE_INSERT = "INSERT";
|
||||||
|
//更新
|
||||||
|
public static final String TYPE_UPDATE = "UPDATE";
|
||||||
|
//查询
|
||||||
|
public static final String TYPE_SELECT = "SELECT";
|
||||||
|
}
|
@ -1,10 +1,13 @@
|
|||||||
package com.celnet.datadump.controller;
|
package com.celnet.datadump.controller;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
import com.celnet.datadump.annotation.LogServiceAnnotation;
|
||||||
import com.celnet.datadump.entity.DataObject;
|
import com.celnet.datadump.entity.DataObject;
|
||||||
import com.celnet.datadump.global.Result;
|
import com.celnet.datadump.global.Result;
|
||||||
import com.celnet.datadump.param.*;
|
import com.celnet.datadump.param.*;
|
||||||
import com.celnet.datadump.service.*;
|
import com.celnet.datadump.service.*;
|
||||||
import com.celnet.datadump.util.DataUtil;
|
import com.celnet.datadump.util.DataUtil;
|
||||||
|
import com.celnet.datadump.constant.OperateTypeConstant;
|
||||||
import com.xxl.job.core.biz.model.ReturnT;
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
import io.swagger.annotations.Api;
|
import io.swagger.annotations.Api;
|
||||||
import io.swagger.annotations.ApiOperation;
|
import io.swagger.annotations.ApiOperation;
|
||||||
@ -37,6 +40,12 @@ public class JobController {
|
|||||||
private CommonService commonService;
|
private CommonService commonService;
|
||||||
@Autowired
|
@Autowired
|
||||||
private DataCheckDeletedService dataCheckDeletedService;
|
private DataCheckDeletedService dataCheckDeletedService;
|
||||||
|
@Autowired
|
||||||
|
private DataImportService dataImportService;
|
||||||
|
@Autowired
|
||||||
|
private DataImportBatchService dataImportBatchService;
|
||||||
|
@Autowired
|
||||||
|
private DataImportNewService dataImportNewService;
|
||||||
|
|
||||||
@PostMapping("/fileTransform")
|
@PostMapping("/fileTransform")
|
||||||
@ApiOperation("附件解析")
|
@ApiOperation("附件解析")
|
||||||
@ -185,4 +194,109 @@ public class JobController {
|
|||||||
return Result.fail(returnT.getMsg());
|
return Result.fail(returnT.getMsg());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* bulk批量大数据生成newSFID
|
||||||
|
* @param paramStr
|
||||||
|
* @author kris
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@PostMapping("/dataImportBatchJob")
|
||||||
|
@ApiOperation("生成newSFID(大数据量)")
|
||||||
|
@LogServiceAnnotation(operateType = OperateTypeConstant.TYPE_INSERT, remark = "生成newSFID(大数据量)")
|
||||||
|
public ReturnT<String> dataImportBatchJob(String paramStr) throws Exception {
|
||||||
|
log.info("dataImportBatchJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
param.setType(1);
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
return dataImportBatchService.immigrationBatch(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* bulk批量更新大数据量数据
|
||||||
|
* @param paramStr
|
||||||
|
* @author kris
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@PostMapping("/dataUpdateBatchJob")
|
||||||
|
@ApiOperation("更新数据(大数据量)")
|
||||||
|
@LogServiceAnnotation(operateType = OperateTypeConstant.TYPE_UPDATE, remark = "更新数据(大数据量)")
|
||||||
|
public ReturnT<String> dataUpdateBatchJob(String paramStr) throws Exception {
|
||||||
|
log.info("dataImportBatchJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
return dataImportBatchService.immigrationUpdateBatch(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 返写个人联系人ID
|
||||||
|
* @param paramStr
|
||||||
|
* @author kris
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@PostMapping("/getPersonContactJob")
|
||||||
|
@ApiOperation("返写个人联系人ID")
|
||||||
|
@LogServiceAnnotation(operateType = OperateTypeConstant.TYPE_UPDATE, remark = "返写个人联系人ID")
|
||||||
|
public ReturnT<String> getPersonContactJob(String paramStr) throws Exception {
|
||||||
|
log.info("getPersonContactJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
return dataImportNewService.getPersonContact(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 数据更新同步(新)
|
||||||
|
* @param paramStr
|
||||||
|
* @author kris
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@PostMapping("/dataUpdateNewJob")
|
||||||
|
@ApiOperation("数据更新同步(新)")
|
||||||
|
@LogServiceAnnotation(operateType = OperateTypeConstant.TYPE_UPDATE, remark = "数据更新同步(新)")
|
||||||
|
public ReturnT<String> dataUpdateNewJob(String paramStr) throws Exception {
|
||||||
|
log.info("getPersonContactJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
return dataImportNewService.immigrationUpdateNew(param);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,6 @@ import io.swagger.annotations.ApiModel;
|
|||||||
import io.swagger.annotations.ApiModelProperty;
|
import io.swagger.annotations.ApiModelProperty;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import lombok.Setter;
|
import lombok.Setter;
|
||||||
import lombok.Value;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
@ -32,6 +31,13 @@ public class DataLog implements Serializable {
|
|||||||
@ApiModelProperty(value = "id")
|
@ApiModelProperty(value = "id")
|
||||||
private Integer id;
|
private Integer id;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 请求参数
|
||||||
|
*/
|
||||||
|
@TableField("request_url")
|
||||||
|
@ApiModelProperty(value = "请求接口")
|
||||||
|
private String requestUrl;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 请求参数
|
* 请求参数
|
||||||
*/
|
*/
|
||||||
@ -40,11 +46,11 @@ public class DataLog implements Serializable {
|
|||||||
private String requestData;
|
private String requestData;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 请求状态
|
* 请求参数
|
||||||
*/
|
*/
|
||||||
@TableField("request_status")
|
@TableField("ip")
|
||||||
@ApiModelProperty(value = "请求状态")
|
@ApiModelProperty(value = "请求IP")
|
||||||
private String requestStatus;
|
private String ip;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 开始时间
|
* 开始时间
|
||||||
@ -77,14 +83,23 @@ public class DataLog implements Serializable {
|
|||||||
/**
|
/**
|
||||||
* 错误信息
|
* 错误信息
|
||||||
*/
|
*/
|
||||||
@TableField("error_message")
|
@TableField("message")
|
||||||
@ApiModelProperty(value = "错误信息")
|
@ApiModelProperty(value = "响应信息")
|
||||||
private String errorMessage;
|
private String message;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 是否发送邮件
|
* 请求状态
|
||||||
*/
|
*/
|
||||||
@TableField("email_flag")
|
@TableField("code")
|
||||||
@ApiModelProperty(value = "是否发送邮件")
|
@ApiModelProperty(value = "响应码")
|
||||||
private Boolean emailFlag;
|
private String code;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 请求状态
|
||||||
|
*/
|
||||||
|
@TableField("status")
|
||||||
|
@ApiModelProperty(value = "状态")
|
||||||
|
private String status;
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -97,4 +97,10 @@ public class DataObject implements Serializable {
|
|||||||
@ApiModelProperty(value = "更新字段")
|
@ApiModelProperty(value = "更新字段")
|
||||||
private String updateField;
|
private String updateField;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 是否可编辑
|
||||||
|
*/
|
||||||
|
@TableField("is_editable")
|
||||||
|
@ApiModelProperty(value = "是否可编辑")
|
||||||
|
private Boolean isEditable;
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,12 @@ public class SystemConfigCode {
|
|||||||
*/
|
*/
|
||||||
public static final String EXECUTOR_SIZE = "EXECUTOR_SIZE";
|
public static final String EXECUTOR_SIZE = "EXECUTOR_SIZE";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 是否开启更新对象打印开关
|
||||||
|
*/
|
||||||
|
public static final String INFO_FLAG = "INFO_FLAG";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 邮件接收人
|
* 邮件接收人
|
||||||
*/
|
*/
|
||||||
@ -34,4 +40,8 @@ public class SystemConfigCode {
|
|||||||
public static final String BATCH_TYPE_MONTH = "MONTH";
|
public static final String BATCH_TYPE_MONTH = "MONTH";
|
||||||
public static final String BATCH_TYPE_YEAR = "YEAR";
|
public static final String BATCH_TYPE_YEAR = "YEAR";
|
||||||
|
|
||||||
|
public static final String INFO_FLAG_TRUE = "TRUE";
|
||||||
|
public static final String INFO_FLAG_FLASE = "FLASE";
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -3,8 +3,8 @@ package com.celnet.datadump.job;
|
|||||||
import com.alibaba.fastjson.JSON;
|
import com.alibaba.fastjson.JSON;
|
||||||
import com.celnet.datadump.param.SalesforceParam;
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
import com.celnet.datadump.service.CommonService;
|
import com.celnet.datadump.service.CommonService;
|
||||||
|
import com.celnet.datadump.service.DataImportBatchService;
|
||||||
import com.celnet.datadump.service.DataImportService;
|
import com.celnet.datadump.service.DataImportService;
|
||||||
import com.celnet.datadump.service.impl.DataImportServiceImpl;
|
|
||||||
import com.xxl.job.core.biz.model.ReturnT;
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
import com.xxl.job.core.handler.annotation.XxlJob;
|
import com.xxl.job.core.handler.annotation.XxlJob;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
@ -27,7 +27,6 @@ public class DataDumpJob {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private DataImportService dataImportService;
|
private DataImportService dataImportService;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 创建api
|
* 创建api
|
||||||
*
|
*
|
||||||
@ -122,30 +121,6 @@ public class DataDumpJob {
|
|||||||
return dataImportService.immigration(param);
|
return dataImportService.immigration(param);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 生成新SFID
|
|
||||||
* @param paramStr
|
|
||||||
* @return
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
|
||||||
@XxlJob("dataImportJobNew")
|
|
||||||
public ReturnT<String> dataImportJobNew(String paramStr) throws Exception {
|
|
||||||
log.info("dataImportJob execute start ..................");
|
|
||||||
SalesforceParam param = new SalesforceParam();
|
|
||||||
try {
|
|
||||||
if (StringUtils.isNotBlank(paramStr)) {
|
|
||||||
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
|
||||||
}
|
|
||||||
} catch (Throwable throwable) {
|
|
||||||
return new ReturnT<>(500, "参数解析失败!");
|
|
||||||
}
|
|
||||||
param.setType(1);
|
|
||||||
// 参数转换
|
|
||||||
param.setBeginCreateDate(param.getBeginDate());
|
|
||||||
param.setEndCreateDate(param.getEndDate());
|
|
||||||
|
|
||||||
return dataImportService.immigrationNew(param);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 更新目标org数据
|
* 更新目标org数据
|
||||||
@ -164,7 +139,7 @@ public class DataDumpJob {
|
|||||||
} catch (Throwable throwable) {
|
} catch (Throwable throwable) {
|
||||||
return new ReturnT<>(500, "参数解析失败!");
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
}
|
}
|
||||||
param.setType(1);
|
// param.setType(1);
|
||||||
// 参数转换
|
// 参数转换
|
||||||
param.setBeginCreateDate(param.getBeginDate());
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
param.setEndCreateDate(param.getEndDate());
|
param.setEndCreateDate(param.getEndDate());
|
||||||
@ -172,6 +147,7 @@ public class DataDumpJob {
|
|||||||
return dataImportService.immigrationUpdate(param);
|
return dataImportService.immigrationUpdate(param);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 获取文件关联表
|
* 获取文件关联表
|
||||||
*
|
*
|
||||||
@ -195,4 +171,5 @@ public class DataDumpJob {
|
|||||||
|
|
||||||
return commonService.getAllApi();
|
return commonService.getAllApi();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
150
src/main/java/com/celnet/datadump/job/DataDumpNewJob.java
Normal file
150
src/main/java/com/celnet/datadump/job/DataDumpNewJob.java
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
package com.celnet.datadump.job;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
import com.celnet.datadump.config.SalesforceConnect;
|
||||||
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
|
import com.celnet.datadump.service.CommonService;
|
||||||
|
import com.celnet.datadump.service.DataImportBatchService;
|
||||||
|
import com.celnet.datadump.service.DataImportNewService;
|
||||||
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
|
import com.xxl.job.core.handler.annotation.XxlJob;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 迁移任务 (新)
|
||||||
|
* 2024/06/12
|
||||||
|
* kris
|
||||||
|
*/
|
||||||
|
@Component
|
||||||
|
@Slf4j
|
||||||
|
public class DataDumpNewJob {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private CommonService commonService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataImportNewService dataImportNewService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataImportBatchService dataImportBatchService;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* bulk批量大数据生成newSFID
|
||||||
|
* @param paramStr
|
||||||
|
* @author kris
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@XxlJob("dataImportBatchJob")
|
||||||
|
public ReturnT<String> dataImportBatchJob(String paramStr) throws Exception {
|
||||||
|
log.info("dataImportBatchJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
param.setType(1);
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
return dataImportBatchService.immigrationBatch(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* bulk批量大数据更新数据
|
||||||
|
* @param paramStr
|
||||||
|
* @author kris
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@XxlJob("dataUpdateBatchJob")
|
||||||
|
public ReturnT<String> dataUpdateBatchJob(String paramStr) throws Exception {
|
||||||
|
log.info("dataImportBatchJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
return dataImportBatchService.immigrationUpdateBatch(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 写入个人客户联系人old_id,返写new_id
|
||||||
|
* @param paramStr 参数json
|
||||||
|
* @return result
|
||||||
|
*/
|
||||||
|
@XxlJob("getPersonContactJob")
|
||||||
|
public ReturnT<String> getPersonContactJob(String paramStr) throws Exception {
|
||||||
|
log.info("getPersonContactJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
// 参数转换
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
|
||||||
|
return dataImportNewService.getPersonContact(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 数据更新
|
||||||
|
*/
|
||||||
|
@XxlJob("dataUpdateNewJob")
|
||||||
|
public ReturnT<String> dataUpdateNewJob(String paramStr) throws Exception {
|
||||||
|
log.info("dataUpdateNewJob execute start ..................");
|
||||||
|
SalesforceParam param = new SalesforceParam();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(paramStr)) {
|
||||||
|
param = JSON.parseObject(paramStr, SalesforceParam.class);
|
||||||
|
}
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
return new ReturnT<>(500, "参数解析失败!");
|
||||||
|
}
|
||||||
|
param.setBeginCreateDate(param.getBeginDate());
|
||||||
|
param.setEndCreateDate(param.getEndDate());
|
||||||
|
|
||||||
|
return dataImportNewService.immigrationUpdateNew(param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 拉取文件关联表
|
||||||
|
* @return result
|
||||||
|
*/
|
||||||
|
@XxlJob("dumpDocumentLinkJob")
|
||||||
|
public ReturnT<String> dumpDocumentLinkJob(String paramStr) throws Exception{
|
||||||
|
log.info("dumpDocumentLinkJob execute start ..................");
|
||||||
|
|
||||||
|
return dataImportNewService.dumpDocumentLinkJob(paramStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 推送文件关联表
|
||||||
|
* @return result
|
||||||
|
*/
|
||||||
|
@XxlJob("uploadDocumentLinkJob")
|
||||||
|
public ReturnT<String> uploadDocumentLinkJob(String paramStr) throws Exception{
|
||||||
|
log.info("uploadDocumentLinkJob execute start ..................");
|
||||||
|
|
||||||
|
return dataImportNewService.uploadDocumentLinkJob(paramStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
package com.celnet.datadump.mapper;
|
package com.celnet.datadump.mapper;
|
||||||
|
|
||||||
import com.celnet.datadump.param.SalesforceParam;
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
|
import cn.hutool.json.JSONObject;
|
||||||
import org.apache.ibatis.annotations.Mapper;
|
import org.apache.ibatis.annotations.Mapper;
|
||||||
import org.apache.ibatis.annotations.Param;
|
import org.apache.ibatis.annotations.Param;
|
||||||
|
|
||||||
@ -102,6 +103,16 @@ public interface CustomMapper {
|
|||||||
*/
|
*/
|
||||||
public List<Map<String, Object>> list(@Param("select") String select, @Param("api") String api, @Param("sql") String sql);
|
public List<Map<String, Object>> list(@Param("select") String select, @Param("api") String api, @Param("sql") String sql);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 通用表数据获取,返回JosnObject
|
||||||
|
*
|
||||||
|
* @param api 参数
|
||||||
|
* @param select 参数
|
||||||
|
* @param sql 参数
|
||||||
|
* @return list
|
||||||
|
*/
|
||||||
|
public List<JSONObject> listJsonObject(@Param("select") String select, @Param("api") String api, @Param("sql") String sql);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 通用表数据获取
|
* 通用表数据获取
|
||||||
*
|
*
|
||||||
|
@ -0,0 +1,13 @@
|
|||||||
|
package com.celnet.datadump.service;
|
||||||
|
|
||||||
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
|
|
||||||
|
public interface DataImportBatchService {
|
||||||
|
|
||||||
|
|
||||||
|
ReturnT<String> immigrationBatch(SalesforceParam param) throws Exception;
|
||||||
|
|
||||||
|
ReturnT<String> immigrationUpdateBatch(SalesforceParam param) throws Exception;
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,37 @@
|
|||||||
|
package com.celnet.datadump.service;
|
||||||
|
|
||||||
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
|
||||||
|
public interface DataImportNewService {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 写入个人客户联系人old_id,返写new_id
|
||||||
|
*/
|
||||||
|
ReturnT<String> getPersonContact(SalesforceParam param) throws Exception;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 数据更新
|
||||||
|
*/
|
||||||
|
ReturnT<String> immigrationUpdateNew(SalesforceParam param) throws Exception;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取documentLink对象
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
ReturnT<String> dumpDocumentLinkJob(String paramStr) throws Exception;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 推送documentLink对象
|
||||||
|
* @return
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
ReturnT<String> uploadDocumentLinkJob(String paramStr) throws Exception;
|
||||||
|
|
||||||
|
}
|
@ -14,8 +14,6 @@ public interface DataImportService {
|
|||||||
|
|
||||||
ReturnT<String> immigration(SalesforceParam param) throws Exception;
|
ReturnT<String> immigration(SalesforceParam param) throws Exception;
|
||||||
|
|
||||||
ReturnT<String> immigrationNew(SalesforceParam param) throws Exception;
|
|
||||||
|
|
||||||
|
|
||||||
ReturnT<String> immigrationUpdate(SalesforceParam param) throws Exception;
|
ReturnT<String> immigrationUpdate(SalesforceParam param) throws Exception;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -22,6 +22,7 @@ import com.celnet.datadump.util.EmailUtil;
|
|||||||
import com.celnet.datadump.util.SqlUtil;
|
import com.celnet.datadump.util.SqlUtil;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
import com.sforce.async.BulkConnection;
|
||||||
import com.sforce.soap.partner.*;
|
import com.sforce.soap.partner.*;
|
||||||
import com.sforce.soap.partner.sobject.SObject;
|
import com.sforce.soap.partner.sobject.SObject;
|
||||||
import com.xxl.job.core.biz.model.ReturnT;
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
@ -215,7 +216,6 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* 自动dump
|
* 自动dump
|
||||||
*
|
|
||||||
* @param param 参数
|
* @param param 参数
|
||||||
* @param futures futures
|
* @param futures futures
|
||||||
*/
|
*/
|
||||||
@ -233,8 +233,8 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
for (DataObject dataObject : dataObjects) {
|
for (DataObject dataObject : dataObjects) {
|
||||||
DataObject update = new DataObject();
|
DataObject update = new DataObject();
|
||||||
TimeUnit.MILLISECONDS.sleep(1);
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
String api = dataObject.getName();
|
||||||
try {
|
try {
|
||||||
String api = dataObject.getName();
|
|
||||||
log.info("dump apis: {}", api);
|
log.info("dump apis: {}", api);
|
||||||
XxlJobLogger.log("dump apis: {}", api);
|
XxlJobLogger.log("dump apis: {}", api);
|
||||||
// 检测表是否存在 不存在创建
|
// 检测表是否存在 不存在创建
|
||||||
@ -311,6 +311,9 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
}
|
}
|
||||||
update.setDataWork(0);
|
update.setDataWork(0);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
|
String message = e.getMessage();
|
||||||
|
String format = String.format("获取表数据 error, api name: %s, \nparam: %s, \ncause:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
if (dataObject != null) {
|
if (dataObject != null) {
|
||||||
@ -753,6 +756,17 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
maps.add(paramMap);
|
maps.add(paramMap);
|
||||||
maps.add(paramMap2);
|
maps.add(paramMap2);
|
||||||
}
|
}
|
||||||
|
// Task和Event
|
||||||
|
// if ("Task".equals(api) || "Event".equals(api)){
|
||||||
|
// Map<String, Object> paramwhoMap = Maps.newHashMap();
|
||||||
|
// paramwhoMap.put("key", "WhoId_Type__c");
|
||||||
|
// paramwhoMap.put("value", jsonObject.get("Who_Type"));
|
||||||
|
// maps.add(paramwhoMap);
|
||||||
|
// Map<String, Object> paramwhatMap = Maps.newHashMap();
|
||||||
|
// paramwhoMap.put("key", "WhatId_Type__c");
|
||||||
|
// paramwhoMap.put("value", jsonObject.get("What_Type"));
|
||||||
|
// maps.add(paramwhoMap);
|
||||||
|
// }
|
||||||
|
|
||||||
//附件关联表 插入更新时给关联对象赋值
|
//附件关联表 插入更新时给关联对象赋值
|
||||||
// if ("ContentDocumentLink".equals(api)) {
|
// if ("ContentDocumentLink".equals(api)) {
|
||||||
@ -808,6 +822,8 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
List<Map<String, Object>> list = Lists.newArrayList();
|
List<Map<String, Object>> list = Lists.newArrayList();
|
||||||
DescribeSObjectResult dsr = connection.describeSObject(apiName);
|
DescribeSObjectResult dsr = connection.describeSObject(apiName);
|
||||||
String label = dsr.getLabel();
|
String label = dsr.getLabel();
|
||||||
|
boolean isCustomObject = dsr.isCustom(); // 自定义对象才支持新增字段
|
||||||
|
boolean isUpdateable = dsr.isUpdateable(); // 对象本身是否可修改
|
||||||
List<DataField> fieldList = Lists.newArrayList();
|
List<DataField> fieldList = Lists.newArrayList();
|
||||||
List<String> fields = Lists.newArrayList();
|
List<String> fields = Lists.newArrayList();
|
||||||
String blobField = null;
|
String blobField = null;
|
||||||
@ -918,6 +934,19 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
map.put("name", "new_id");
|
map.put("name", "new_id");
|
||||||
list.add(map);
|
list.add(map);
|
||||||
|
|
||||||
|
// if ("Task".equals(apiName) || "Event".equals(apiName)){
|
||||||
|
// Map<String, Object> LinkedMap = Maps.newHashMap();
|
||||||
|
// LinkedMap.put("type", "varchar(18)");
|
||||||
|
// LinkedMap.put("comment", "whatId关联对象");
|
||||||
|
// LinkedMap.put("name", "WhatId_Type__c");
|
||||||
|
// list.add(LinkedMap);
|
||||||
|
// Map<String, Object> LinkedMap1 = Maps.newHashMap();
|
||||||
|
// LinkedMap1.put("type", "varchar(18)");
|
||||||
|
// LinkedMap1.put("comment", "whoId关联对象");
|
||||||
|
// LinkedMap1.put("name", "WhoId_Type__c");
|
||||||
|
// list.add(LinkedMap1);
|
||||||
|
// }
|
||||||
|
|
||||||
if ("ContentDocumentLink".equals(apiName)){
|
if ("ContentDocumentLink".equals(apiName)){
|
||||||
//文档关联表新增关联对象字段
|
//文档关联表新增关联对象字段
|
||||||
Map<String, Object> LinkedMap = Maps.newHashMap();
|
Map<String, Object> LinkedMap = Maps.newHashMap();
|
||||||
@ -992,6 +1021,9 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
update.setName(apiName);
|
update.setName(apiName);
|
||||||
update.setLastUpdateDate(endCreateDate);
|
update.setLastUpdateDate(endCreateDate);
|
||||||
update.setBlobField(blobField);
|
update.setBlobField(blobField);
|
||||||
|
if(!isCustomObject && !isUpdateable){
|
||||||
|
update.setIsEditable(false);
|
||||||
|
}
|
||||||
dataObjectService.saveOrUpdate(update);
|
dataObjectService.saveOrUpdate(update);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
@ -1009,7 +1041,13 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
}
|
}
|
||||||
log.info("打印所有待同步表:" +apis.toString());
|
log.info("打印所有待同步表:" +apis.toString());
|
||||||
for (String api : apis) {
|
for (String api : apis) {
|
||||||
checkApi(api, true);
|
try {
|
||||||
|
checkApi(api, true);
|
||||||
|
}catch (Exception e){
|
||||||
|
String message = e.getMessage();
|
||||||
|
String format = String.format("创建表结构 error, api name: %s, \nparam: %s, \ncause:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return ReturnT.SUCCESS;
|
return ReturnT.SUCCESS;
|
||||||
}
|
}
|
||||||
@ -1103,87 +1141,87 @@ public class CommonServiceImpl implements CommonService {
|
|||||||
@Override
|
@Override
|
||||||
public ReturnT<String> getDocumentLink(String paramStr) throws Exception {
|
public ReturnT<String> getDocumentLink(String paramStr) throws Exception {
|
||||||
String api = "ContentDocumentLink";
|
String api = "ContentDocumentLink";
|
||||||
// PartnerConnection partnerConnection = salesforceConnect.createConnect();
|
PartnerConnection partnerConnection = salesforceConnect.createConnect();
|
||||||
PartnerConnection connection = salesforceTargetConnect.createConnect();
|
PartnerConnection connection = salesforceTargetConnect.createConnect();
|
||||||
List<Map<String, Object>> list = customMapper.list("Id", "ContentDocument", "new_id is not null");
|
List<Map<String, Object>> list = customMapper.list("Id", "ContentDocument", "new_id is not null");
|
||||||
// DescribeSObjectResult dsr = partnerConnection.describeSObject(api);
|
DescribeSObjectResult dsr = partnerConnection.describeSObject(api);
|
||||||
// List<String> fields = customMapper.getFields(api).stream().map(String::toUpperCase).collect(Collectors.toList());
|
List<String> fields = customMapper.getFields(api).stream().map(String::toUpperCase).collect(Collectors.toList());
|
||||||
// Field[] dsrFields = dsr.getFields();
|
Field[] dsrFields = dsr.getFields();
|
||||||
try {
|
try {
|
||||||
if (list != null && list.size() > 0) {
|
if (list != null && list.size() > 0) {
|
||||||
// for (Map<String, Object> map : list) {
|
for (Map<String, Object> map : list) {
|
||||||
// String contentDocumentId = (String) map.get("Id");
|
String contentDocumentId = (String) map.get("Id");
|
||||||
// String sql = "SELECT Id, LinkedEntityId, LinkedEntity.Type, ContentDocumentId, Visibility, ShareType, SystemModstamp, IsDeleted FROM ContentDocumentLink where ContentDocumentId = '" + contentDocumentId + "'";
|
String sql = "SELECT Id, LinkedEntityId, LinkedEntity.Type, ContentDocumentId, Visibility, ShareType, SystemModstamp, IsDeleted FROM ContentDocumentLink where ContentDocumentId = '" + contentDocumentId + "'";
|
||||||
// JSONArray objects = null;
|
JSONArray objects = null;
|
||||||
// try {
|
try {
|
||||||
// QueryResult queryResult = partnerConnection.queryAll(sql);
|
QueryResult queryResult = partnerConnection.queryAll(sql);
|
||||||
// SObject[] records = queryResult.getRecords();
|
SObject[] records = queryResult.getRecords();
|
||||||
// objects = DataUtil.toJsonArray(records, dsrFields);
|
objects = DataUtil.toJsonArray(records, dsrFields);
|
||||||
// saveOrUpdate(api, fields, records, objects, true);
|
saveOrUpdate(api, fields, records, objects, true);
|
||||||
// } catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
// log.error("getDocumentLink error api:{}, data:{}", api, JSON.toJSONString(objects), e);
|
log.error("getDocumentLink error api:{}, data:{}", api, JSON.toJSONString(objects), e);
|
||||||
// TimeUnit.MINUTES.sleep(1);
|
TimeUnit.MINUTES.sleep(1);
|
||||||
// return ReturnT.FAIL;
|
return ReturnT.FAIL;
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
|
|
||||||
//表内数据总量
|
//表内数据总量
|
||||||
Integer count = customMapper.countBySQL(api, "where ShareType = 'V' and new_id = '0'");
|
Integer count = customMapper.countBySQL(api, "where ShareType = 'V' and new_id = '0'");
|
||||||
//批量插入200一次
|
//批量插入200一次
|
||||||
int page = count % 200 == 0 ? count / 200 : (count / 200) + 1;
|
int page = count % 200 == 0 ? count / 200 : (count / 200) + 1;
|
||||||
for (int i = 0; i < page; i++) {
|
for (int i = 0; i < page; i++) {
|
||||||
List<Map<String, Object>> linkList = customMapper.list("Id,LinkedEntityId,ContentDocumentId,LinkedEntity_Type,ShareType,Visibility", api, "ShareType = 'V' and new_id = '0' order by Id asc limit 200");
|
List<Map<String, Object>> linkList = customMapper.list("Id,LinkedEntityId,ContentDocumentId,LinkedEntity_Type,ShareType,Visibility", api, "ShareType = 'V' and new_id = '0' order by Id asc limit 200");
|
||||||
SObject[] accounts = new SObject[linkList.size()];
|
SObject[] accounts = new SObject[linkList.size()];
|
||||||
String[] ids = new String[linkList.size()];
|
String[] ids = new String[linkList.size()];
|
||||||
int index = 0;
|
int index = 0;
|
||||||
for (Map<String, Object> map : linkList) {
|
for (Map<String, Object> map : linkList) {
|
||||||
String linkedEntityId = (String) map.get("LinkedEntityId");
|
String linkedEntityId = (String) map.get("LinkedEntityId");
|
||||||
String id = (String) map.get("Id");
|
String id = (String) map.get("Id");
|
||||||
String contentDocumentId = (String) map.get("ContentDocumentId");
|
String contentDocumentId = (String) map.get("ContentDocumentId");
|
||||||
String linkedEntityType = (String) map.get("LinkedEntity_Type");
|
String linkedEntityType = (String) map.get("LinkedEntity_Type");
|
||||||
String shareType = (String) map.get("ShareType");
|
String shareType = (String) map.get("ShareType");
|
||||||
String Visibility = (String) map.get("Visibility");
|
String Visibility = (String) map.get("Visibility");
|
||||||
|
|
||||||
// dataObject查询
|
// dataObject查询
|
||||||
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
qw.eq("name", linkedEntityType);
|
qw.eq("name", linkedEntityType);
|
||||||
List<DataObject> objects = dataObjectService.list(qw);
|
List<DataObject> objects = dataObjectService.list(qw);
|
||||||
if (objects.size() > 0) {
|
if (!objects.isEmpty()) {
|
||||||
Map<String, Object> dMap = customMapper.getById("new_id", "ContentDocument", contentDocumentId);
|
Map<String, Object> dMap = customMapper.getById("new_id", "ContentDocument", contentDocumentId);
|
||||||
Map<String, Object> lMap = customMapper.getById("new_id", linkedEntityType, linkedEntityId);
|
Map<String, Object> lMap = customMapper.getById("new_id", linkedEntityType, linkedEntityId);
|
||||||
|
|
||||||
SObject account = new SObject();
|
SObject account = new SObject();
|
||||||
account.setType(api);
|
account.setType(api);
|
||||||
account.setField("ContentDocumentId", dMap.get("new_id").toString());
|
account.setField("ContentDocumentId", dMap.get("new_id").toString());
|
||||||
account.setField("LinkedEntityId", lMap.get("new_id").toString());
|
account.setField("LinkedEntityId", lMap.get("new_id").toString());
|
||||||
account.setField("ShareType", shareType);
|
account.setField("ShareType", shareType);
|
||||||
account.setField("Visibility", Visibility);
|
account.setField("Visibility", Visibility);
|
||||||
ids[index] = id;
|
ids[index] = id;
|
||||||
accounts[index] = account;
|
accounts[index] = account;
|
||||||
index++;
|
index++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
SaveResult[] saveResults = connection.create(accounts);
|
||||||
|
for (int j = 0; j < saveResults.length; j++) {
|
||||||
|
if (!saveResults[j].getSuccess()) {
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", api, JSON.toJSONString(DataDumpParam.getFilter()), com.alibaba.fastjson.JSON.toJSONString(saveResults[j]));
|
||||||
|
EmailUtil.send("DataDump ContentDocumentLink ERROR", format);
|
||||||
|
} else {
|
||||||
|
List<Map<String, Object>> dList = new ArrayList<>();
|
||||||
|
Map<String, Object> linkMap = new HashMap<>();
|
||||||
|
linkMap.put("key", "new_id");
|
||||||
|
linkMap.put("value", saveResults[j].getId());
|
||||||
|
dList.add(linkMap);
|
||||||
|
customMapper.updateById("ContentDocumentLink", dList, ids[j]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try {
|
} catch (Exception e) {
|
||||||
SaveResult[] saveResults = connection.create(accounts);
|
log.error("getDocumentLink error api:{}, data:{}", api, JSON.toJSONString(accounts), e);
|
||||||
for (int j = 0; j < saveResults.length; j++) {
|
EmailUtil.send("-------测试-----------", JSON.toJSONString(accounts));
|
||||||
if (!saveResults[j].getSuccess()) {
|
throw new RuntimeException(e);
|
||||||
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(DataDumpParam.getFilter()), com.alibaba.fastjson.JSON.toJSONString(saveResults[j]));
|
}
|
||||||
EmailUtil.send("DataDump ContentDocumentLink ERROR", format);
|
}
|
||||||
} else {
|
|
||||||
List<Map<String, Object>> dList = new ArrayList<>();
|
|
||||||
Map<String, Object> linkMap = new HashMap<>();
|
|
||||||
linkMap.put("key", "new_id");
|
|
||||||
linkMap.put("value", saveResults[j].getId());
|
|
||||||
dList.add(linkMap);
|
|
||||||
customMapper.updateById("ContentDocumentLink", dList, ids[j]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error("getDocumentLink error api:{}, data:{}", api, JSON.toJSONString(accounts), e);
|
|
||||||
EmailUtil.send("-------测试-----------", JSON.toJSONString(accounts));
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("getDocumentLink error api:{}, data:{}", api, JSON.toJSONString(list), e);
|
log.error("getDocumentLink error api:{}, data:{}", api, JSON.toJSONString(list), e);
|
||||||
|
@ -0,0 +1,633 @@
|
|||||||
|
package com.celnet.datadump.service.impl;
|
||||||
|
|
||||||
|
|
||||||
|
import cn.hutool.core.lang.UUID;
|
||||||
|
import cn.hutool.json.JSONObject;
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
import com.alibaba.fastjson.JSONArray;
|
||||||
|
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||||
|
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
|
||||||
|
import com.celnet.datadump.config.SalesforceExecutor;
|
||||||
|
import com.celnet.datadump.config.SalesforceTargetConnect;
|
||||||
|
import com.celnet.datadump.entity.*;
|
||||||
|
import com.celnet.datadump.global.SystemConfigCode;
|
||||||
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
|
import com.celnet.datadump.param.DataDumpParam;
|
||||||
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
|
import com.celnet.datadump.service.*;
|
||||||
|
import com.celnet.datadump.util.BulkUtil;
|
||||||
|
import com.celnet.datadump.util.CsvConverterUtil;
|
||||||
|
import com.celnet.datadump.util.DataUtil;
|
||||||
|
import com.celnet.datadump.util.EmailUtil;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import com.sforce.async.*;
|
||||||
|
import com.sforce.soap.partner.PartnerConnection;
|
||||||
|
import com.sforce.soap.partner.SaveResult;
|
||||||
|
import com.sforce.soap.partner.sobject.SObject;
|
||||||
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
|
import com.xxl.job.core.log.XxlJobLogger;
|
||||||
|
import com.xxl.job.core.util.DateUtil;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
|
import org.apache.commons.lang.time.DateUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@Slf4j
|
||||||
|
public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SalesforceTargetConnect salesforceTargetConnect;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SalesforceExecutor salesforceExecutor;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataObjectService dataObjectService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataBatchService dataBatchService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataFieldService dataFieldService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private CustomMapper customMapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataBatchHistoryService dataBatchHistoryService;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert入口
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ReturnT<String> immigrationBatch(SalesforceParam param) throws Exception {
|
||||||
|
List<Future<?>> futures = Lists.newArrayList();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(param.getApi())) {
|
||||||
|
// 手动任务
|
||||||
|
ReturnT<String> result = manualImmigrationBatch(param, futures);
|
||||||
|
if (result != null) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ReturnT.SUCCESS;
|
||||||
|
} catch (Exception exception) {
|
||||||
|
salesforceExecutor.remove(futures.toArray(new Future<?>[]{}));
|
||||||
|
log.error("immigration error", exception);
|
||||||
|
throw exception;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装执行参数
|
||||||
|
*/
|
||||||
|
public ReturnT<String> manualImmigrationBatch(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
List<String> apis;
|
||||||
|
apis = DataUtil.toIdList(param.getApi());
|
||||||
|
String join = StringUtils.join(apis, ",");
|
||||||
|
log.info("immigration apis: {}", join);
|
||||||
|
XxlJobLogger.log("immigration apis: {}", join);
|
||||||
|
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1).in("name", apis);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
String message = "api:" + apiNames + " is locked";
|
||||||
|
log.info(message);
|
||||||
|
String format = String.format("数据Insert error, api name: %s, \nparam: %s, \ncause:\n%s", apiNames, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return new ReturnT<>(500, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
BulkConnection bulkConnection = salesforceTargetConnect.createBulkConnect();
|
||||||
|
for (String api : apis) {
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
try {
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(1);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
List<DataBatch> list = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
salesforceParams = list.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
manualCreatedNewIdBatch(salesforceParam, bulkConnection);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 1);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
update.setDataWork(0);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw e;
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("manualImmigration error", e);
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
if (isFull) {
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 执行数据Insert
|
||||||
|
*/
|
||||||
|
public void manualCreatedNewIdBatch(SalesforceParam param, BulkConnection bulkConnection) throws Exception {
|
||||||
|
String api = param.getApi();
|
||||||
|
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("api", api);
|
||||||
|
List<DataField> list = dataFieldService.list(dbQw);
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
String beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
//表内数据总量
|
||||||
|
Integer count = customMapper.countBySQL(api, "where new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
||||||
|
log.error("总Insert数据 count:{};-开始时间:{};-结束时间:{};-api:{};", count, beginDateStr, endDateStr, api);
|
||||||
|
if (count == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
//批量插入10000一次
|
||||||
|
int page = count%10000 == 0 ? count/10000 : (count/10000) + 1;
|
||||||
|
//总插入数
|
||||||
|
int sfNum = 0;
|
||||||
|
for (int i = 0; i < page; i++) {
|
||||||
|
|
||||||
|
List<JSONObject> data = customMapper.listJsonObject("*", api, "new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' limit 10000");
|
||||||
|
int size = data.size();
|
||||||
|
|
||||||
|
log.info("执行api:{}, 执行page:{}, 执行size:{}", api, i+1, size);
|
||||||
|
List<JSONObject> insertList = new ArrayList<>();
|
||||||
|
|
||||||
|
//判断引用对象是否存在new_id
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
update.setName(api);
|
||||||
|
|
||||||
|
//更新对象的new_id
|
||||||
|
String[] ids = new String[size];
|
||||||
|
|
||||||
|
// 定义输入/输出格式
|
||||||
|
DateTimeFormatter inputFormatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
|
||||||
|
DateTimeFormatter outputFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSxx");
|
||||||
|
|
||||||
|
for (int j = 1; j <= size; j++) {
|
||||||
|
JSONObject account = new JSONObject();
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
if ("OwnerId".equals(dataField.getField()) || "Owner_Type".equals(dataField.getField())
|
||||||
|
|| "Id".equals(dataField.getField())){
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (dataField.getIsCreateable() && !dataField.getIsNillable() && !dataField.getIsDefaultedOnCreate()) {
|
||||||
|
if ("reference".equals(dataField.getSfType())){
|
||||||
|
String reference = dataField.getReferenceTo();
|
||||||
|
if (reference == null){
|
||||||
|
reference = data.get(j-1).get("Parent_Type").toString();
|
||||||
|
}
|
||||||
|
List<Map<String, Object>> referenceMap = customMapper.list("new_id", reference, "new_id is not null limit 1");
|
||||||
|
if (referenceMap.isEmpty()){
|
||||||
|
QueryWrapper<DataObject> maxIndex = new QueryWrapper<>();
|
||||||
|
maxIndex.select("IFNULL(max(data_index),0) as data_index");
|
||||||
|
maxIndex.ne("name", api);
|
||||||
|
Map<String, Object> map = dataObjectService.getMap(maxIndex);
|
||||||
|
//如果必填lookup字段没有值,跳过
|
||||||
|
update.setDataIndex(Integer.parseInt(map.get("data_index").toString()) + 1);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
String message = "api:" + api + "的引用对象:" + reference + "不存在数据!";
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
log.info(message);
|
||||||
|
return;
|
||||||
|
}else{
|
||||||
|
account.put(dataField.getField(), referenceMap.get(0).get("new_id"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ("picklist".equals(dataField.getSfType())){
|
||||||
|
List<Map<String, Object>> pickList = customMapper.list("value", "data_picklist", "api = '"+api+"' and field = '"+dataField.getField()+"' limit 1");
|
||||||
|
account.put(dataField.getField(), pickList.get(0).get("value"));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
account.put(dataField.getField(), DataUtil.fieldTypeToSf(dataField));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 转换为UTC时间并格式化
|
||||||
|
LocalDateTime localDateTime = LocalDateTime.parse(String.valueOf(data.get(j - 1).get("CreatedDate")), inputFormatter);
|
||||||
|
|
||||||
|
ZonedDateTime utcDateTime = localDateTime.atZone(ZoneId.of("UTC")).minusHours(8) ;
|
||||||
|
|
||||||
|
String convertedTime = utcDateTime.format(outputFormatter);
|
||||||
|
|
||||||
|
account.put("CreatedDate", convertedTime);
|
||||||
|
Map<String, Object> CreatedByIdMap = customMapper.getById("new_id", "User", data.get(j-1).get("CreatedById").toString());
|
||||||
|
if(CreatedByIdMap.get("new_id") != null && StringUtils.isNotEmpty(CreatedByIdMap.get("new_id").toString())){
|
||||||
|
account.put("CreatedById", CreatedByIdMap.get("new_id"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
ids[j-1] = data.get(j-1).get("Id").toString();
|
||||||
|
insertList.add(account);
|
||||||
|
if (i*10000+j == count){
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
//写入csv文件
|
||||||
|
String fullPath = CsvConverterUtil.writeToCsv(insertList, UUID.randomUUID().toString());
|
||||||
|
|
||||||
|
JobInfo salesforceInsertJob = BulkUtil.createJob(bulkConnection, api, OperationEnum.insert);
|
||||||
|
|
||||||
|
List<BatchInfo> batchInfos = BulkUtil.createBatchesFromCSVFile(bulkConnection, salesforceInsertJob, fullPath);
|
||||||
|
|
||||||
|
BulkUtil.awaitCompletion(bulkConnection, salesforceInsertJob, batchInfos);
|
||||||
|
|
||||||
|
sfNum = sfNum + checkInsertResults(bulkConnection, salesforceInsertJob, batchInfos, api, ids);
|
||||||
|
|
||||||
|
BulkUtil.closeJob(bulkConnection, salesforceInsertJob.getId());
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("manualCreatedNewId error api:{}", api, e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatchHistory> updateQw = new UpdateWrapper<>();
|
||||||
|
updateQw.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", DateUtils.addSeconds(endDate, -1))
|
||||||
|
.set("target_sf_num", sfNum);
|
||||||
|
dataBatchHistoryService.update(updateQw);
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatch> updateQw2 = new UpdateWrapper<>();
|
||||||
|
updateQw2.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", endDate)
|
||||||
|
.set("sf_add_num", sfNum);
|
||||||
|
dataBatchService.update(updateQw2);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 读写Insert结果
|
||||||
|
*/
|
||||||
|
public int checkInsertResults(BulkConnection connection, JobInfo job,
|
||||||
|
List<BatchInfo> batchInfoList,String api,String[] ids)
|
||||||
|
throws AsyncApiException, IOException {
|
||||||
|
int index = 0;
|
||||||
|
// batchInfoList was populated when batches were created and submitted
|
||||||
|
for (BatchInfo b : batchInfoList) {
|
||||||
|
CSVReader rdr =
|
||||||
|
new CSVReader(connection.getBatchResultStream(job.getId(), b.getId()));
|
||||||
|
List<String> resultHeader = rdr.nextRecord();
|
||||||
|
int resultCols = resultHeader.size();
|
||||||
|
|
||||||
|
List<String> row;
|
||||||
|
while ((row = rdr.nextRecord()) != null) {
|
||||||
|
Map<String, String> resultInfo = new HashMap<String, String>();
|
||||||
|
for (int i = 0; i < resultCols; i++) {
|
||||||
|
resultInfo.put(resultHeader.get(i), row.get(i));
|
||||||
|
}
|
||||||
|
boolean insertStatus = Boolean.valueOf(resultInfo.get("Success"));
|
||||||
|
boolean created = Boolean.valueOf(resultInfo.get("Created"));
|
||||||
|
String id = resultInfo.get("Id");
|
||||||
|
String error = resultInfo.get("Error");
|
||||||
|
if (insertStatus && created) {
|
||||||
|
List<Map<String, Object>> maps = new ArrayList<>();
|
||||||
|
Map<String, Object> m = new HashMap<>();
|
||||||
|
m.put("key", "new_id");
|
||||||
|
m.put("value", id);
|
||||||
|
maps.add(m);
|
||||||
|
customMapper.updateById(api, maps, ids[index]);
|
||||||
|
index ++;
|
||||||
|
log.info("Created Success row with id " + id);
|
||||||
|
} else if (!insertStatus) {
|
||||||
|
log.info("Created Fail with error: " + error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update入口
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ReturnT<String> immigrationUpdateBatch(SalesforceParam param) throws Exception {
|
||||||
|
List<Future<?>> futures = Lists.newArrayList();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(param.getApi())) {
|
||||||
|
// 手动任务
|
||||||
|
ReturnT<String> result = updateSfDataBatch(param, futures);
|
||||||
|
if (result != null) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ReturnT.SUCCESS;
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw e;
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
salesforceExecutor.remove(futures.toArray(new Future<?>[]{}));
|
||||||
|
log.error("immigrationUpdate error", throwable);
|
||||||
|
throw throwable;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装执行参数
|
||||||
|
*/
|
||||||
|
public ReturnT<String> updateSfDataBatch(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
List<String> apis;
|
||||||
|
String beginDateStr = null;
|
||||||
|
String endDateStr = null;
|
||||||
|
apis = DataUtil.toIdList(param.getApi());
|
||||||
|
if (param.getBeginCreateDate() != null && param.getEndCreateDate() != null){
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
}
|
||||||
|
String join = StringUtils.join(apis, ",");
|
||||||
|
log.info("immigration apis: {}", join);
|
||||||
|
XxlJobLogger.log("immigration apis: {}", join);
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1).in("name", apis);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
String message = "api:" + apiNames + " is locked";
|
||||||
|
log.info(message);
|
||||||
|
String format = String.format("数据Update error, api name: %s, \nparam: %s, \ncause:\n%s", apiNames, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return new ReturnT<>(500, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
BulkConnection bulkConnection = salesforceTargetConnect.createBulkConnect();
|
||||||
|
for (String api : apis) {
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
try {
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
if (StringUtils.isNotEmpty(beginDateStr) && StringUtils.isNotEmpty(endDateStr)) {
|
||||||
|
dbQw.eq("sync_start_date", beginDateStr); // 等于开始时间
|
||||||
|
dbQw.eq("sync_end_date", endDateStr); // 等于结束时间
|
||||||
|
}
|
||||||
|
List<DataBatch> list = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
salesforceParams = list.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
manualUpdateSfDataBatch(salesforceParam, bulkConnection);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 1);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
if (isFull) {
|
||||||
|
update.setNeedUpdate(false);
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 执行数据Update
|
||||||
|
*/
|
||||||
|
private void manualUpdateSfDataBatch(SalesforceParam param, BulkConnection bulkConnection) throws Exception {
|
||||||
|
|
||||||
|
String api = param.getApi();
|
||||||
|
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("api", api);
|
||||||
|
List<DataField> list = dataFieldService.list(dbQw);
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
String beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
// 表内数据总量
|
||||||
|
Integer count = customMapper.countBySQL(api, "where new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
||||||
|
log.error("总Update数据 count:{};-开始时间:{};-结束时间:{};-api:{};", count, beginDateStr, endDateStr, api);
|
||||||
|
if(count == 0){
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//判断引用对象是否存在new_id
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
update.setName(api);
|
||||||
|
|
||||||
|
// 总更新数
|
||||||
|
int sfNum = 0;
|
||||||
|
// 批量更新10000一次
|
||||||
|
int page = count%10000 == 0 ? count/10000 : (count/10000) + 1;
|
||||||
|
for (int i = 0; i < page; i++) {
|
||||||
|
List<Map<String, Object>> mapList = customMapper.list("*", api, "new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' order by Id asc limit " + i * 10000 + ",10000");
|
||||||
|
|
||||||
|
List<JSONObject> updateList = new ArrayList<>();
|
||||||
|
|
||||||
|
for (Map<String, Object> map : mapList) {
|
||||||
|
JSONObject account = new JSONObject();
|
||||||
|
|
||||||
|
//给对象赋值
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
String field = dataField.getField();
|
||||||
|
String reference_to = dataField.getReferenceTo();
|
||||||
|
|
||||||
|
//根据旧sfid查找引用对象新sfid
|
||||||
|
if (field.equals("Id")) {
|
||||||
|
account.put("Id",String.valueOf(map.get("new_id")));
|
||||||
|
} else if (!DataUtil.isUpdate(field) || (dataField.getIsCreateable() != null && !dataField.getIsCreateable())) {
|
||||||
|
continue;
|
||||||
|
} else if (StringUtils.isNotBlank(reference_to) && !"data_picklist".equals(reference_to)) {
|
||||||
|
if ( StringUtils.isNotEmpty(String.valueOf(map.get(field))) && !"OwnerId".equals(field)
|
||||||
|
&& !"Owner_Type".equals(field)) {
|
||||||
|
//判断reference_to内是否包含User字符串
|
||||||
|
if (reference_to.contains("User")) {
|
||||||
|
reference_to = "User";
|
||||||
|
}
|
||||||
|
Map<String, Object> m = customMapper.getById("new_id", reference_to, String.valueOf(map.get(field)));
|
||||||
|
if (m != null && !m.isEmpty()) {
|
||||||
|
account.put(field, m.get("new_id"));
|
||||||
|
}else {
|
||||||
|
String message = "对象类型:" + api + "的数据:"+ map.get("Id") +"的引用对象:" + dataField.getReferenceTo() + "的数据:"+ map.get(field) +"不存在!";
|
||||||
|
EmailUtil.send("DataDump ERROR", message);
|
||||||
|
log.info(message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (map.get(field) != null && StringUtils.isNotBlank(dataField.getSfType())) {
|
||||||
|
account.put(field, DataUtil.localBulkDataToSfData(dataField.getSfType(), String.valueOf(map.get(field))));
|
||||||
|
}else {
|
||||||
|
account.put(field, map.get(field));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
account.put("old_owner_id__c", map.get("OwnerId"));
|
||||||
|
account.put("old_sfdc_id__c", map.get("Id"));
|
||||||
|
|
||||||
|
updateList.add(account);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
//写入csv文件
|
||||||
|
String fullPath = CsvConverterUtil.writeToCsv(updateList, UUID.randomUUID().toString());
|
||||||
|
|
||||||
|
JobInfo salesforceInsertJob = BulkUtil.createJob(bulkConnection, api, OperationEnum.update);
|
||||||
|
|
||||||
|
List<BatchInfo> batchInfos = BulkUtil.createBatchesFromCSVFile(bulkConnection, salesforceInsertJob, fullPath);
|
||||||
|
|
||||||
|
BulkUtil.awaitCompletion(bulkConnection, salesforceInsertJob, batchInfos);
|
||||||
|
|
||||||
|
sfNum = sfNum + checkUpdateResults(bulkConnection, salesforceInsertJob, batchInfos,api);
|
||||||
|
|
||||||
|
BulkUtil.closeJob(bulkConnection, salesforceInsertJob.getId());
|
||||||
|
|
||||||
|
new File(fullPath).delete();
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.info(e.getMessage());
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatchHistory> updateQw = new UpdateWrapper<>();
|
||||||
|
updateQw.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", DateUtils.addSeconds(endDate, -1))
|
||||||
|
.set("target_update_num", sfNum);
|
||||||
|
dataBatchHistoryService.update(updateQw);
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatch> updateQw2 = new UpdateWrapper<>();
|
||||||
|
updateQw2.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", endDate)
|
||||||
|
.set("sf_update_num", sfNum);
|
||||||
|
dataBatchService.update(updateQw2);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 读写update结果
|
||||||
|
*/
|
||||||
|
public int checkUpdateResults(BulkConnection connection, JobInfo job,
|
||||||
|
List<BatchInfo> batchInfoList,String api)
|
||||||
|
throws AsyncApiException, IOException {
|
||||||
|
int index = 0;
|
||||||
|
// batchInfoList was populated when batches were created and submitted
|
||||||
|
for (BatchInfo b : batchInfoList) {
|
||||||
|
CSVReader rdr =
|
||||||
|
new CSVReader(connection.getBatchResultStream(job.getId(), b.getId()));
|
||||||
|
List<String> resultHeader = rdr.nextRecord();
|
||||||
|
int resultCols = resultHeader.size();
|
||||||
|
|
||||||
|
List<String> row;
|
||||||
|
while ((row = rdr.nextRecord()) != null) {
|
||||||
|
Map<String, String> resultInfo = new HashMap<String, String>();
|
||||||
|
for (int i = 0; i < resultCols; i++) {
|
||||||
|
resultInfo.put(resultHeader.get(i), row.get(i));
|
||||||
|
}
|
||||||
|
boolean updateStatus = Boolean.valueOf(resultInfo.get("Success"));
|
||||||
|
String id = resultInfo.get("Id");
|
||||||
|
String error = resultInfo.get("Error");
|
||||||
|
if (updateStatus) {
|
||||||
|
index ++;
|
||||||
|
log.info("Update Success row with id " + id);
|
||||||
|
} else {
|
||||||
|
log.info("Update Fail with error: " + error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,974 @@
|
|||||||
|
package com.celnet.datadump.service.impl;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
import com.alibaba.fastjson.JSONArray;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||||
|
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
|
||||||
|
import com.celnet.datadump.config.SalesforceConnect;
|
||||||
|
import com.celnet.datadump.config.SalesforceExecutor;
|
||||||
|
import com.celnet.datadump.config.SalesforceTargetConnect;
|
||||||
|
import com.celnet.datadump.entity.DataBatch;
|
||||||
|
import com.celnet.datadump.entity.DataBatchHistory;
|
||||||
|
import com.celnet.datadump.entity.DataField;
|
||||||
|
import com.celnet.datadump.entity.DataObject;
|
||||||
|
import com.celnet.datadump.global.Const;
|
||||||
|
import com.celnet.datadump.global.SystemConfigCode;
|
||||||
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
|
import com.celnet.datadump.param.DataDumpParam;
|
||||||
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
|
import com.celnet.datadump.service.*;
|
||||||
|
import com.celnet.datadump.util.DataUtil;
|
||||||
|
import com.celnet.datadump.util.EmailUtil;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
import com.sforce.soap.partner.*;
|
||||||
|
import com.sforce.soap.partner.sobject.SObject;
|
||||||
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
|
import com.xxl.job.core.log.XxlJobLogger;
|
||||||
|
import com.xxl.job.core.util.DateUtil;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
|
import org.apache.commons.lang.time.DateUtils;
|
||||||
|
import org.apache.commons.lang3.ObjectUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@Slf4j
|
||||||
|
public class DataImportNewServiceImpl implements DataImportNewService {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SalesforceTargetConnect salesforceTargetConnect;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataBatchHistoryService dataBatchHistoryService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SalesforceExecutor salesforceExecutor;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SalesforceConnect salesforceConnect;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataObjectService dataObjectService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataBatchService dataBatchService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataFieldService dataFieldService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private CustomMapper customMapper;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private CommonService commonService;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get返写个人客户联系人入口
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ReturnT<String> getPersonContact(SalesforceParam param) throws Exception {
|
||||||
|
List<Future<?>> futures = Lists.newArrayList();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(param.getApi())) {
|
||||||
|
// 手动任务
|
||||||
|
ReturnT<String> result = manualGetPersonContact(param, futures);
|
||||||
|
if (result != null) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ReturnT.SUCCESS;
|
||||||
|
} catch (Exception exception) {
|
||||||
|
salesforceExecutor.remove(futures.toArray(new Future<?>[]{}));
|
||||||
|
log.error("immigration error", exception);
|
||||||
|
throw exception;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装Get执行参数
|
||||||
|
*/
|
||||||
|
public ReturnT<String> manualGetPersonContact(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
String api = "Contact";
|
||||||
|
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
return new ReturnT<>(500, "api:" + apiNames + " is locked");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PartnerConnection connect = salesforceTargetConnect.createConnect();
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
try {
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(1);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
List<DataBatch> list = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
salesforceParams = list.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
manualGetPersonContactId(salesforceParam, connect);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 1);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
update.setDataWork(0);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw e;
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("manualImmigration error", e);
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
if (isFull) {
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取newId,写入oldId
|
||||||
|
*/
|
||||||
|
private void manualGetPersonContactId(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
||||||
|
|
||||||
|
String api = param.getApi();
|
||||||
|
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("api", api);
|
||||||
|
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
String beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
|
||||||
|
DescribeSObjectResult dsr = partnerConnection.describeSObject(api);
|
||||||
|
Field[] dsrFields = dsr.getFields();
|
||||||
|
|
||||||
|
//表内数据总量
|
||||||
|
Integer count = customMapper.countBySQL(api, "where new_id is null and IsPersonAccount = 1 and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
||||||
|
|
||||||
|
log.error("总Insert数据 count:{};-开始时间:{};-结束时间:{};-api:{};", count, beginDateStr, endDateStr, api);
|
||||||
|
if (count == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//批量插入200一次
|
||||||
|
int page = count%200 == 0 ? count/200 : (count/200) + 1;
|
||||||
|
|
||||||
|
for (int i = 0; i < page; i++) {
|
||||||
|
List<Map<String, Object>> data = customMapper.list("*", api, "new_id is null and IsPersonAccount = 1 and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' limit 200");
|
||||||
|
int size = data.size();
|
||||||
|
log.info("执行api:{}, 执行page:{}, 执行size:{}", api, i+1, size);
|
||||||
|
SObject[] accounts = new SObject[size];
|
||||||
|
|
||||||
|
// 新客户ID,旧联系人ID,用于更新本地数据
|
||||||
|
Map<String,String> idMaps = new HashMap<>();
|
||||||
|
// 旧客户ID,新联系人ID,用于更新SF数据
|
||||||
|
Map<String,String> idMapa = new HashMap<>();
|
||||||
|
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
Map<String, Object> idMap = customMapper.getById("new_id", "Account", map.get("AccountId").toString());
|
||||||
|
if(idMap.get("new_id") != null && StringUtils.isNotEmpty(idMap.get("new_id").toString())){
|
||||||
|
// 新客户ID,旧联系人ID
|
||||||
|
idMaps.put(idMap.get("new_id").toString(),map.get("Id").toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String idStr = "(";
|
||||||
|
for (String ids : idMaps.keySet()) {
|
||||||
|
idStr += "'" + ids + "',"; // 拼接每个ID
|
||||||
|
}
|
||||||
|
if (idStr.endsWith(",")) { // 如果最后一个字符是逗号,说明循环正常结束
|
||||||
|
idStr = idStr.substring(0, idStr.length() - 1); // 去掉最后一个多余的逗号
|
||||||
|
}
|
||||||
|
idStr += ")"; // 添加右括号
|
||||||
|
|
||||||
|
try {
|
||||||
|
String sql = "SELECT Id,AccountId,Account.old_sfdc_id__c FROM Contact where AccountId in " + idStr ;
|
||||||
|
QueryResult queryResult = partnerConnection.queryAll(sql);
|
||||||
|
if (ObjectUtils.isEmpty(queryResult) || ObjectUtils.isEmpty(queryResult.getRecords())) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
SObject[] records = queryResult.getRecords();
|
||||||
|
com.alibaba.fastjson2.JSONArray objects = DataUtil.toJsonArray(records, dsrFields);
|
||||||
|
for (int z = 0; z < objects.size(); z++) {
|
||||||
|
JSONObject jsonObject = objects.getJSONObject(z);
|
||||||
|
String contactId = jsonObject.getString(Const.ID);
|
||||||
|
String accountId = jsonObject.getString("AccountId");
|
||||||
|
String oldAccountId = jsonObject.getString("Account_old_sfdc_id__c");
|
||||||
|
String id = idMaps.get(accountId);
|
||||||
|
List<Map<String, Object>> maps = Lists.newArrayList();
|
||||||
|
Map<String, Object> paramMap = Maps.newHashMap();
|
||||||
|
paramMap.put("key", "new_id");
|
||||||
|
paramMap.put("value", contactId);
|
||||||
|
maps.add(paramMap);
|
||||||
|
customMapper.updateById(api, maps, id);
|
||||||
|
idMapa.put(oldAccountId, contactId);
|
||||||
|
}
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
|
||||||
|
int index = 0;
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
SObject account = new SObject();
|
||||||
|
account.setType(api);
|
||||||
|
account.setField("old_owner_id__c", map.get("OwnerId"));
|
||||||
|
account.setField("old_sfdc_id__c", map.get("Id"));
|
||||||
|
account.setId(idMapa.get(map.get("AccountId").toString()));
|
||||||
|
accounts[index] = account;
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
|
||||||
|
SaveResult[] saveResults = partnerConnection.update(accounts);
|
||||||
|
for (SaveResult saveResult : saveResults) {
|
||||||
|
if (!saveResult.getSuccess()) {
|
||||||
|
log.info("-------------saveResults: {}", JSON.toJSONString(saveResult));
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s, \n数据实体类:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), JSON.toJSONString(saveResult));
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("manualGetPersonContactId error api:{}", api, e);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SalesforceParam countParam = new SalesforceParam();
|
||||||
|
countParam.setApi(api);
|
||||||
|
countParam.setBeginCreateDate(beginDate);
|
||||||
|
countParam.setEndCreateDate(DateUtils.addSeconds(endDate, -1));
|
||||||
|
// 存在isDeleted 只查询IsDeleted为false的
|
||||||
|
if (dataFieldService.hasDeleted(countParam.getApi())) {
|
||||||
|
countParam.setIsDeleted(false);
|
||||||
|
} else {
|
||||||
|
// 不存在 过滤
|
||||||
|
countParam.setIsDeleted(null);
|
||||||
|
}
|
||||||
|
// sf count
|
||||||
|
Integer sfNum = commonService.countSfNum(partnerConnection, countParam);
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatchHistory> updateQw = new UpdateWrapper<>();
|
||||||
|
updateQw.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", DateUtils.addSeconds(endDate, -1))
|
||||||
|
.set("target_sf_num", sfNum);
|
||||||
|
dataBatchHistoryService.update(updateQw);
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatch> updateQw2 = new UpdateWrapper<>();
|
||||||
|
updateQw2.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", endDate)
|
||||||
|
.set("sf_add_num", sfNum);
|
||||||
|
dataBatchService.update(updateQw2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 数据更新Update入口
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ReturnT<String> immigrationUpdateNew(SalesforceParam param) throws Exception {
|
||||||
|
List<Future<?>> futures = Lists.newArrayList();
|
||||||
|
try {
|
||||||
|
if (StringUtils.isNotBlank(param.getApi())) {
|
||||||
|
if (1 == param.getType()){
|
||||||
|
return updateSfDataNew(param, futures);
|
||||||
|
}else {
|
||||||
|
return updateIncrementalSfDataNew(param, futures);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (1 == param.getType()){
|
||||||
|
autoUpdateSfDataNew(param, futures);
|
||||||
|
}else {
|
||||||
|
autoUpdateIncrementalSfDataNew(param, futures);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ReturnT.SUCCESS;
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw e;
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
salesforceExecutor.remove(futures.toArray(new Future<?>[]{}));
|
||||||
|
log.error("immigrationUpdate error", throwable);
|
||||||
|
throw throwable;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装【单表】【存量】Update参数
|
||||||
|
*/
|
||||||
|
public ReturnT<String> updateSfDataNew(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
List<String> apis = DataUtil.toIdList(param.getApi());
|
||||||
|
|
||||||
|
String beginDateStr = null;
|
||||||
|
String endDateStr = null;
|
||||||
|
if (param.getBeginCreateDate() != null && param.getEndCreateDate() != null){
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1).in("name", apis);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
String message = "api:" + apiNames + " is locked";
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", apiNames, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return new ReturnT<>(500, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PartnerConnection partnerConnection = salesforceTargetConnect.createConnect();
|
||||||
|
for (String api : apis) {
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
try {
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
if (StringUtils.isNotEmpty(beginDateStr) && StringUtils.isNotEmpty(endDateStr)) {
|
||||||
|
dbQw.eq("sync_start_date", beginDateStr); // 等于开始时间
|
||||||
|
dbQw.eq("sync_end_date", endDateStr); // 等于结束时间
|
||||||
|
}
|
||||||
|
List<DataBatch> list = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
salesforceParams = list.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
UpdateSfDataNew(salesforceParam, partnerConnection);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 1);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
update.setNeedUpdate(false);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
if (isFull) {
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装【单表】【增量】Update参数
|
||||||
|
*/
|
||||||
|
public ReturnT<String> updateIncrementalSfDataNew(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
List<String> apis = DataUtil.toIdList(param.getApi());
|
||||||
|
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1).in("name", apis);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
return new ReturnT<>(500, "api:" + apiNames + " is locked");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PartnerConnection partnerConnection = salesforceTargetConnect.createConnect();
|
||||||
|
for (String api : apis) {
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
try {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("name", api);
|
||||||
|
DataObject dataObject = dataObjectService.getOne(qw);
|
||||||
|
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
dbQw.gt("sync_end_date",dataObject.getLastUpdateDate());
|
||||||
|
List<DataBatch> dataBatches = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
if (CollectionUtils.isNotEmpty(dataBatches)) {
|
||||||
|
salesforceParams = dataBatches.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
UpdateSfDataNew(salesforceParam, partnerConnection);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 1);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
if (isFull) {
|
||||||
|
update.setNeedUpdate(false);
|
||||||
|
update.setName(api);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装【多表】【存量】Update参数
|
||||||
|
*/
|
||||||
|
private void autoUpdateSfDataNew(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
String message = "api:" + apiNames + " is locked";
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", apiNames, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return ;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String beginDateStr = null;
|
||||||
|
String endDateStr = null;
|
||||||
|
if (param.getBeginCreateDate() != null && param.getEndCreateDate() != null){
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("need_update", 1)
|
||||||
|
.orderByAsc("data_index")
|
||||||
|
.last(" limit 10");
|
||||||
|
|
||||||
|
PartnerConnection partnerConnection = salesforceTargetConnect.createConnect();
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
List<DataObject> dataObjects = dataObjectService.list(qw);
|
||||||
|
//判断dataObjects是否为空
|
||||||
|
if (CollectionUtils.isEmpty(dataObjects)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DataObject dataObject : dataObjects) {
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
update.setName(dataObject.getName());
|
||||||
|
update.setDataLock(1);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
try {
|
||||||
|
String api = dataObject.getName();
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
if (StringUtils.isNotEmpty(beginDateStr) && StringUtils.isNotEmpty(endDateStr)) {
|
||||||
|
dbQw.eq("sync_start_date", beginDateStr); // 等于开始时间
|
||||||
|
dbQw.eq("sync_end_date", endDateStr); // 等于结束时间
|
||||||
|
}
|
||||||
|
List<DataBatch> list = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
salesforceParams = list.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
UpdateSfDataNew(salesforceParam, partnerConnection);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 0);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
update.setNeedUpdate(false);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
futures.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 组装【多表】【增量】Update参数
|
||||||
|
*/
|
||||||
|
private void autoUpdateIncrementalSfDataNew(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
|
|
||||||
|
// 全量的时候 检测是否有自动任务锁住的表
|
||||||
|
boolean isFull = CollectionUtils.isEmpty(param.getIds());
|
||||||
|
if (isFull) {
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("data_lock", 1);
|
||||||
|
List<DataObject> list = dataObjectService.list(qw);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
String apiNames = list.stream().map(DataObject::getName).collect(Collectors.joining());
|
||||||
|
String message = "api:" + apiNames + " is locked";
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", apiNames, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), message);
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return ;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("need_update", 1)
|
||||||
|
.orderByAsc("data_index")
|
||||||
|
.last(" limit 10");
|
||||||
|
|
||||||
|
PartnerConnection partnerConnection = salesforceTargetConnect.createConnect();
|
||||||
|
while (true) {
|
||||||
|
List<DataObject> dataObjects = dataObjectService.list(qw);
|
||||||
|
//判断dataObjects是否为空
|
||||||
|
if (CollectionUtils.isEmpty(dataObjects)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (DataObject dataObject : dataObjects) {
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
update.setName(dataObject.getName());
|
||||||
|
update.setDataLock(1);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
try {
|
||||||
|
String api = dataObject.getName();
|
||||||
|
List<SalesforceParam> salesforceParams = null;
|
||||||
|
QueryWrapper<DataBatch> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("name", api);
|
||||||
|
dbQw.gt("sync_end_date",dataObject.getLastUpdateDate());
|
||||||
|
List<DataBatch> list = dataBatchService.list(dbQw);
|
||||||
|
AtomicInteger batch = new AtomicInteger(1);
|
||||||
|
if (CollectionUtils.isNotEmpty(list)) {
|
||||||
|
salesforceParams = list.stream().map(t -> {
|
||||||
|
SalesforceParam salesforceParam = param.clone();
|
||||||
|
salesforceParam.setApi(t.getName());
|
||||||
|
salesforceParam.setBeginCreateDate(t.getSyncStartDate());
|
||||||
|
salesforceParam.setEndCreateDate(t.getSyncEndDate());
|
||||||
|
salesforceParam.setBatch(batch.getAndIncrement());
|
||||||
|
return salesforceParam;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手动任务优先执行
|
||||||
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
|
try {
|
||||||
|
UpdateSfDataNew(salesforceParam, partnerConnection);
|
||||||
|
} catch (Throwable throwable) {
|
||||||
|
log.error("salesforceExecutor error", throwable);
|
||||||
|
throw new RuntimeException(throwable);
|
||||||
|
}
|
||||||
|
}, salesforceParam.getBatch(), 0);
|
||||||
|
futures.add(future);
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
update.setNeedUpdate(false);
|
||||||
|
update.setDataLock(0);
|
||||||
|
dataObjectService.updateById(update);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 等待当前所有线程执行完成
|
||||||
|
salesforceExecutor.waitForFutures(futures.toArray(new Future<?>[]{}));
|
||||||
|
futures.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 执行Update更新数据
|
||||||
|
*/
|
||||||
|
private void UpdateSfDataNew(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
||||||
|
Map<String, Object> infoFlag = customMapper.list("code,value","system_config","code ='"+SystemConfigCode.INFO_FLAG+"'").get(0);
|
||||||
|
|
||||||
|
String api = param.getApi();
|
||||||
|
TimeUnit.MILLISECONDS.sleep(1);
|
||||||
|
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
||||||
|
dbQw.eq("api", api);
|
||||||
|
List<DataField> list = dataFieldService.list(dbQw);
|
||||||
|
|
||||||
|
Date beginDate = param.getBeginCreateDate();
|
||||||
|
Date endDate = param.getEndCreateDate();
|
||||||
|
String sql = "";
|
||||||
|
String sql2 = "";
|
||||||
|
|
||||||
|
String beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
String endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
||||||
|
|
||||||
|
if (1 == param.getType()) {
|
||||||
|
if (api.contains("Share")){
|
||||||
|
sql = "where RowCause = 'Manual' and new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'";
|
||||||
|
sql2 = "RowCause = 'Manual' and new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' order by Id asc limit ";
|
||||||
|
}else {
|
||||||
|
sql = "where new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'";
|
||||||
|
sql2 = "new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' order by Id asc limit ";
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
if (api.contains("Share")){
|
||||||
|
sql = "where RowCause = 'Manual' and new_id is not null and LastModifiedDate >= '" + beginDateStr + "' ";
|
||||||
|
sql2 = "RowCause = 'Manual' and new_id is not null and LastModifiedDate >= '" + beginDateStr + "' order by Id asc limit ";
|
||||||
|
}else {
|
||||||
|
sql = "where new_id is not null and LastModifiedDate >= '" + beginDateStr + "' ";
|
||||||
|
sql2 = "new_id is not null and LastModifiedDate >= '" + beginDateStr + "' order by Id asc limit ";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//表内数据总量
|
||||||
|
Integer count = customMapper.countBySQL(api, sql);
|
||||||
|
log.error("总Update数据 count:{};-开始时间:{};-结束时间:{};-api:{};", count, beginDateStr, endDateStr, api);
|
||||||
|
|
||||||
|
if(count == 0){
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//判断引用对象是否存在new_id
|
||||||
|
DataObject update = new DataObject();
|
||||||
|
update.setName(api);
|
||||||
|
|
||||||
|
int targetCount = 0;
|
||||||
|
//批量插入200一次
|
||||||
|
int page = count%200 == 0 ? count/200 : (count/200) + 1;
|
||||||
|
for (int i = 0; i < page; i++) {
|
||||||
|
List<Map<String, Object>> mapList = customMapper.list("*", api, sql2+ i * 200 + ",200");
|
||||||
|
SObject[] accounts = new SObject[mapList.size()];
|
||||||
|
int j = 0;
|
||||||
|
for (Map<String, Object> map : mapList) {
|
||||||
|
SObject account = new SObject();
|
||||||
|
account.setType(api);
|
||||||
|
//给对象赋值
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
String field = dataField.getField();
|
||||||
|
String reference_to = dataField.getReferenceTo();
|
||||||
|
|
||||||
|
String value = String.valueOf(map.get(field));
|
||||||
|
//根据旧sfid查找引用对象新sfid
|
||||||
|
if (field.equals("Id")) {
|
||||||
|
account.setId(String.valueOf(map.get("new_id")));
|
||||||
|
} else if (!DataUtil.isUpdate(field) || (dataField.getIsCreateable() != null && !dataField.getIsCreateable())) {
|
||||||
|
continue;
|
||||||
|
} else if (StringUtils.isNotBlank(reference_to) && !"data_picklist".equals(reference_to)) {
|
||||||
|
|
||||||
|
if (!"null".equals(value) && StringUtils.isNotEmpty(value) && (!"OwnerId".equals(field)
|
||||||
|
&& !"Owner_Type".equals(field))) {
|
||||||
|
//判断reference_to内是否包含User字符串
|
||||||
|
if (reference_to.contains("User")) {
|
||||||
|
reference_to = "User";
|
||||||
|
}
|
||||||
|
Map<String, Object> m = customMapper.getById("new_id", reference_to, value);
|
||||||
|
if (m != null && !m.isEmpty()) {
|
||||||
|
account.setField(field, m.get("new_id"));
|
||||||
|
}else {
|
||||||
|
String message = "对象类型:" + api + "的数据:"+ map.get("Id") +"的引用对象:" + dataField.getReferenceTo() + "的数据:"+ map.get(field) +"不存在!";
|
||||||
|
EmailUtil.send("DataDump ERROR", message);
|
||||||
|
log.info(message);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (map.get(field) != null && StringUtils.isNotBlank(dataField.getSfType())) {
|
||||||
|
account.setField(field, DataUtil.localDataToSfData(dataField.getSfType(), value));
|
||||||
|
}else {
|
||||||
|
account.setField(field, map.get(field));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
account.setField("old_owner_id__c", map.get("OwnerId"));
|
||||||
|
account.setField("old_sfdc_id__c", map.get("Id"));
|
||||||
|
accounts[j++] = account;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (infoFlag != null && "1".equals(infoFlag.get("value"))){
|
||||||
|
printlnAccountsDetails(accounts,list);
|
||||||
|
}
|
||||||
|
SaveResult[] saveResults = partnerConnection.update(accounts);
|
||||||
|
for (SaveResult saveResult : saveResults) {
|
||||||
|
if (!saveResult.getSuccess()) {
|
||||||
|
Map<String, String> map = returnErrorAccountsDetails(accounts, list, saveResult.getId());
|
||||||
|
log.info("-------------saveResults: {}", JSON.toJSONString(saveResult));
|
||||||
|
String format = String.format("数据更新 error, api name: %s, \nparam: %s, \ncause:\n%s, \n数据实体类:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), JSON.toJSONString(saveResult),JSON.toJSONString(map));
|
||||||
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
|
return;
|
||||||
|
}else {
|
||||||
|
targetCount ++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.info("sf return saveResults------" + JSONArray.toJSONString(saveResults));
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
UpdateWrapper<DataBatchHistory> updateQw = new UpdateWrapper<>();
|
||||||
|
updateQw.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", DateUtils.addSeconds(endDate, -1))
|
||||||
|
.set("target_update_num", targetCount);
|
||||||
|
dataBatchHistoryService.update(updateQw);
|
||||||
|
|
||||||
|
UpdateWrapper<DataBatch> updateQw2 = new UpdateWrapper<>();
|
||||||
|
updateQw2.eq("name", api)
|
||||||
|
.eq("sync_start_date", beginDate)
|
||||||
|
.eq("sync_end_date", endDate)
|
||||||
|
.set("sf_update_num", targetCount);
|
||||||
|
dataBatchService.update(updateQw2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 打印SF交互数据明细
|
||||||
|
*/
|
||||||
|
public void printlnAccountsDetails(SObject[] accounts,List<DataField> list) {
|
||||||
|
for (int i = 0; i < accounts.length; i++) {
|
||||||
|
SObject account = accounts[i];
|
||||||
|
System.out.println("--- 对象数据[" + i + "] ---");
|
||||||
|
// 获取对象所有字段名
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
try {
|
||||||
|
Object value = account.getField(dataField.getField());
|
||||||
|
System.out.println(dataField.getField() + ": " + (value != null ? value.toString() : "null"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
System.out.println(dataField.getField() + ": [权限不足或字段不存在]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.out.println("old_owner_id__c: " + (account.getField("old_owner_id__c") != null ? account.getField("old_owner_id__c").toString() : "null"));
|
||||||
|
System.out.println("old_sfdc_id__c: " + (account.getField("old_sfdc_id__c") != null ? account.getField("old_sfdc_id__c").toString() : "null"));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 返回SF交互数据错误明细
|
||||||
|
*/
|
||||||
|
public Map<String,String> returnErrorAccountsDetails(SObject[] accounts,List<DataField> list,String errorId) {
|
||||||
|
HashMap<String, String> map = new HashMap<>();
|
||||||
|
for (int i = 0; i < accounts.length; i++) {
|
||||||
|
SObject account = accounts[i];
|
||||||
|
if (errorId.equals(account.getId()) || errorId.equals(account.getField("Id"))){
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
try {
|
||||||
|
Object value = account.getField(dataField.getField());
|
||||||
|
map.put(dataField.getField(),String.valueOf(value));
|
||||||
|
System.out.println(dataField.getField() + ": " + (value != null ? value.toString() : "null"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
System.out.println(dataField.getField() + ": [权限不足或字段不存在]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
map.put("old_owner_id__c",String.valueOf(account.getField("old_owner_id__c")));
|
||||||
|
map.put("old_sfdc_id__c",String.valueOf(account.getField("old_sfdc_id__c")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取DocumentLink
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ReturnT<String> dumpDocumentLinkJob(String paramStr) throws Exception {
|
||||||
|
String api = "ContentDocumentLink";
|
||||||
|
PartnerConnection partnerConnection = salesforceConnect.createConnect();
|
||||||
|
List<Map<String, Object>> list = customMapper.list("Id", "ContentDocument", "new_id is not null");
|
||||||
|
DescribeSObjectResult dsr = partnerConnection.describeSObject(api);
|
||||||
|
List<String> fields = customMapper.getFields(api).stream().map(String::toUpperCase).collect(Collectors.toList());
|
||||||
|
Field[] dsrFields = dsr.getFields();
|
||||||
|
try {
|
||||||
|
if (list != null && !list.isEmpty()) {
|
||||||
|
for (Map<String, Object> map : list) {
|
||||||
|
String contentDocumentId = (String) map.get("Id");
|
||||||
|
String sql = "SELECT Id, LinkedEntityId, LinkedEntity.Type, ContentDocumentId, Visibility, ShareType, SystemModstamp, IsDeleted FROM ContentDocumentLink where ContentDocumentId = '" + contentDocumentId + "'";
|
||||||
|
com.alibaba.fastjson2.JSONArray objects = null;
|
||||||
|
QueryResult queryResult = partnerConnection.queryAll(sql);
|
||||||
|
SObject[] records = queryResult.getRecords();
|
||||||
|
objects = DataUtil.toJsonArray(records, dsrFields);
|
||||||
|
commonService.saveOrUpdate(api, fields, records, objects, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("getDocumentLink error api:{}, data:{}", api, com.alibaba.fastjson2.JSON.toJSONString(list), e);
|
||||||
|
return ReturnT.FAIL;
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("getDocumentLink error api:{}, data:{}", api, com.alibaba.fastjson2.JSON.toJSONString(list), e);
|
||||||
|
TimeUnit.MINUTES.sleep(1);
|
||||||
|
return ReturnT.FAIL;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 推送DocumentLink
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ReturnT<String> uploadDocumentLinkJob(String paramStr) throws Exception {
|
||||||
|
String api = "ContentDocumentLink";
|
||||||
|
PartnerConnection connection = salesforceTargetConnect.createConnect();
|
||||||
|
List<Map<String, Object>> list = customMapper.list("Id", "ContentDocument", "new_id is not null");
|
||||||
|
try {
|
||||||
|
if (list != null && !list.isEmpty()) {
|
||||||
|
//表内数据总量
|
||||||
|
Integer count = customMapper.countBySQL(api, "where ShareType = 'V' and new_id = '0'");
|
||||||
|
//批量插入200一次
|
||||||
|
int page = count % 200 == 0 ? count / 200 : (count / 200) + 1;
|
||||||
|
for (int i = 0; i < page; i++) {
|
||||||
|
List<Map<String, Object>> linkList = customMapper.list("Id,LinkedEntityId,ContentDocumentId,LinkedEntity_Type,ShareType,Visibility", api, "ShareType = 'V' and new_id = '0' order by Id asc limit 200");
|
||||||
|
SObject[] accounts = new SObject[linkList.size()];
|
||||||
|
String[] ids = new String[linkList.size()];
|
||||||
|
int index = 0;
|
||||||
|
for (Map<String, Object> map : linkList) {
|
||||||
|
String linkedEntityId = (String) map.get("LinkedEntityId");
|
||||||
|
String id = (String) map.get("Id");
|
||||||
|
String contentDocumentId = (String) map.get("ContentDocumentId");
|
||||||
|
String linkedEntityType = (String) map.get("LinkedEntity_Type");
|
||||||
|
String shareType = (String) map.get("ShareType");
|
||||||
|
String Visibility = (String) map.get("Visibility");
|
||||||
|
|
||||||
|
// dataObject查询
|
||||||
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
|
qw.eq("name", linkedEntityType);
|
||||||
|
List<DataObject> objects = dataObjectService.list(qw);
|
||||||
|
if (!objects.isEmpty()) {
|
||||||
|
Map<String, Object> dMap = customMapper.getById("new_id", "ContentDocument", contentDocumentId);
|
||||||
|
Map<String, Object> lMap = customMapper.getById("new_id", linkedEntityType, linkedEntityId);
|
||||||
|
|
||||||
|
SObject account = new SObject();
|
||||||
|
account.setType(api);
|
||||||
|
account.setField("ContentDocumentId", dMap.get("new_id").toString());
|
||||||
|
account.setField("LinkedEntityId", lMap.get("new_id").toString());
|
||||||
|
account.setField("ShareType", shareType);
|
||||||
|
account.setField("Visibility", Visibility);
|
||||||
|
ids[index] = id;
|
||||||
|
accounts[index] = account;
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
SaveResult[] saveResults = connection.create(accounts);
|
||||||
|
for (int j = 0; j < saveResults.length; j++) {
|
||||||
|
if (!saveResults[j].getSuccess()) {
|
||||||
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(DataDumpParam.getFilter()), com.alibaba.fastjson.JSON.toJSONString(saveResults[j]));
|
||||||
|
EmailUtil.send("DataDump ContentDocumentLink ERROR", format);
|
||||||
|
} else {
|
||||||
|
List<Map<String, Object>> dList = new ArrayList<>();
|
||||||
|
Map<String, Object> linkMap = new HashMap<>();
|
||||||
|
linkMap.put("key", "new_id");
|
||||||
|
linkMap.put("value", saveResults[j].getId());
|
||||||
|
dList.add(linkMap);
|
||||||
|
customMapper.updateById("ContentDocumentLink", dList, ids[j]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("getDocumentLink error api:{}, data:{}", api, com.alibaba.fastjson2.JSON.toJSONString(accounts), e);
|
||||||
|
EmailUtil.send("-------测试-----------", com.alibaba.fastjson2.JSON.toJSONString(accounts));
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("getDocumentLink error api:{}, data:{}", api, com.alibaba.fastjson2.JSON.toJSONString(list), e);
|
||||||
|
return ReturnT.FAIL;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,27 +1,29 @@
|
|||||||
package com.celnet.datadump.service.impl;
|
package com.celnet.datadump.service.impl;
|
||||||
|
|
||||||
|
import cn.hutool.core.lang.UUID;
|
||||||
import com.alibaba.fastjson.JSON;
|
import com.alibaba.fastjson.JSON;
|
||||||
import com.alibaba.fastjson.JSONArray;
|
import com.alibaba.fastjson.JSONArray;
|
||||||
import com.alibaba.fastjson.JSONObject;
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||||
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
|
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
|
||||||
import com.celnet.datadump.config.SalesforceExecutor;
|
import com.celnet.datadump.config.SalesforceExecutor;
|
||||||
import com.celnet.datadump.config.SalesforceTargetConnect;
|
import com.celnet.datadump.config.SalesforceTargetConnect;
|
||||||
import com.celnet.datadump.entity.DataBatch;
|
import com.celnet.datadump.entity.*;
|
||||||
import com.celnet.datadump.entity.DataBatchHistory;
|
|
||||||
import com.celnet.datadump.entity.DataField;
|
|
||||||
import com.celnet.datadump.entity.DataObject;
|
|
||||||
import com.celnet.datadump.global.Const;
|
import com.celnet.datadump.global.Const;
|
||||||
|
import com.celnet.datadump.global.SystemConfigCode;
|
||||||
import com.celnet.datadump.mapper.CustomMapper;
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
import com.celnet.datadump.param.DataDumpParam;
|
import com.celnet.datadump.param.DataDumpParam;
|
||||||
import com.celnet.datadump.param.DataDumpSpecialParam;
|
import com.celnet.datadump.param.DataDumpSpecialParam;
|
||||||
import com.celnet.datadump.param.SalesforceParam;
|
import com.celnet.datadump.param.SalesforceParam;
|
||||||
import com.celnet.datadump.service.*;
|
import com.celnet.datadump.service.*;
|
||||||
|
import com.celnet.datadump.util.BulkUtil;
|
||||||
|
import com.celnet.datadump.util.CsvConverterUtil;
|
||||||
import com.celnet.datadump.util.DataUtil;
|
import com.celnet.datadump.util.DataUtil;
|
||||||
import com.celnet.datadump.util.EmailUtil;
|
import com.celnet.datadump.util.EmailUtil;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.sforce.soap.partner.PartnerConnection;
|
import com.google.common.collect.Maps;
|
||||||
import com.sforce.soap.partner.SaveResult;
|
import com.sforce.async.*;
|
||||||
|
import com.sforce.soap.partner.*;
|
||||||
import com.sforce.soap.partner.sobject.SObject;
|
import com.sforce.soap.partner.sobject.SObject;
|
||||||
import com.sforce.ws.ConnectionException;
|
import com.sforce.ws.ConnectionException;
|
||||||
import com.xxl.job.core.biz.model.ReturnT;
|
import com.xxl.job.core.biz.model.ReturnT;
|
||||||
@ -60,6 +62,9 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private DataFieldService dataFieldService;
|
private DataFieldService dataFieldService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DataLogService dataLogService;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private CustomMapper customMapper;
|
private CustomMapper customMapper;
|
||||||
|
|
||||||
@ -69,6 +74,10 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private CommonService commonService;
|
private CommonService commonService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SystemConfigService systemConfigService;
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ReturnT<String> immigration(SalesforceParam param) throws Exception {
|
public ReturnT<String> immigration(SalesforceParam param) throws Exception {
|
||||||
List<Future<?>> futures = Lists.newArrayList();
|
List<Future<?>> futures = Lists.newArrayList();
|
||||||
@ -91,27 +100,7 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReturnT<String> immigrationNew(SalesforceParam param) throws Exception {
|
|
||||||
List<Future<?>> futures = Lists.newArrayList();
|
|
||||||
try {
|
|
||||||
if (StringUtils.isNotBlank(param.getApi())) {
|
|
||||||
// 手动任务
|
|
||||||
ReturnT<String> result = manualImmigration(param, futures);
|
|
||||||
if (result != null) {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// 自动任务
|
|
||||||
autoImmigration(param, futures);
|
|
||||||
}
|
|
||||||
return ReturnT.SUCCESS;
|
|
||||||
} catch (Exception exception) {
|
|
||||||
salesforceExecutor.remove(futures.toArray(new Future<?>[]{}));
|
|
||||||
log.error("immigration error", exception);
|
|
||||||
throw exception;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ReturnT<String> manualImmigration(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
public ReturnT<String> manualImmigration(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
@ -193,6 +182,8 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void autoImmigration(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
public void autoImmigration(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
QueryWrapper<DataObject> qw = new QueryWrapper<>();
|
||||||
@ -236,7 +227,7 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
for (SalesforceParam salesforceParam : salesforceParams) {
|
for (SalesforceParam salesforceParam : salesforceParams) {
|
||||||
Future<?> future = salesforceExecutor.execute(() -> {
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
try {
|
try {
|
||||||
autoCreatedNewId(salesforceParam, partnerConnection);
|
manualCreatedNewId(salesforceParam, partnerConnection);
|
||||||
} catch (Throwable throwable) {
|
} catch (Throwable throwable) {
|
||||||
log.error("salesforceExecutor error", throwable);
|
log.error("salesforceExecutor error", throwable);
|
||||||
throw new RuntimeException(throwable);
|
throw new RuntimeException(throwable);
|
||||||
@ -265,6 +256,7 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void manualCreatedNewId(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
private void manualCreatedNewId(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
||||||
|
Map<String, Object> infoFlag = customMapper.list("code,value","system_config","code ='"+SystemConfigCode.INFO_FLAG+"'").get(0);
|
||||||
String api = param.getApi();
|
String api = param.getApi();
|
||||||
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
||||||
dbQw.eq("api", api);
|
dbQw.eq("api", api);
|
||||||
@ -278,9 +270,8 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
|
|
||||||
//表内数据总量
|
//表内数据总量
|
||||||
Integer count = customMapper.countBySQL(api, "where new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
Integer count = customMapper.countBySQL(api, "where new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
||||||
|
log.error("总Insert数据 count:{};-开始时间:{};-结束时间:{};-api:{};", count, beginDateStr, endDateStr, api);
|
||||||
if (count == 0) {
|
if (count == 0) {
|
||||||
log.error("无数据同步 api:{}", api);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -293,6 +284,7 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
|
|
||||||
List<Map<String, Object>> data = customMapper.list("*", api, "new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' limit 200");
|
List<Map<String, Object>> data = customMapper.list("*", api, "new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' limit 200");
|
||||||
int size = data.size();
|
int size = data.size();
|
||||||
|
log.info("执行api:{}, 执行page:{}, 执行size:{}", api, i+1, size);
|
||||||
SObject[] accounts = new SObject[size];
|
SObject[] accounts = new SObject[size];
|
||||||
String[] ids = new String[size];
|
String[] ids = new String[size];
|
||||||
for (int j = 1; j <= size; j++) {
|
for (int j = 1; j <= size; j++) {
|
||||||
@ -317,9 +309,8 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
maxIndex.select("IFNULL(max(data_index),0) as data_index");
|
maxIndex.select("IFNULL(max(data_index),0) as data_index");
|
||||||
maxIndex.ne("name", api);
|
maxIndex.ne("name", api);
|
||||||
Map<String, Object> map = dataObjectService.getMap(maxIndex);
|
Map<String, Object> map = dataObjectService.getMap(maxIndex);
|
||||||
|
|
||||||
//如果必填lookup字段没有值,跳过
|
//如果必填lookup字段没有值,跳过
|
||||||
update.setDataIndex(Integer.parseInt(map.get("data_index").toString()+1));
|
update.setDataIndex(Integer.parseInt(map.get("data_index").toString()) + 1);
|
||||||
dataObjectService.updateById(update);
|
dataObjectService.updateById(update);
|
||||||
return;
|
return;
|
||||||
}else{
|
}else{
|
||||||
@ -335,21 +326,22 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
account.setField(dataField.getField(), DataUtil.fieldTypeToSf(dataField));
|
account.setField(dataField.getField(), DataUtil.fieldTypeToSf(dataField));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//object类型转Date类型
|
if (!api.equals("Product2")){
|
||||||
Date date;
|
//object类型转Date类型
|
||||||
try {
|
Date date;
|
||||||
date = sdf.parse(String.valueOf(data.get(j - 1).get("CreatedDate")));
|
try {
|
||||||
}catch (ParseException e){
|
date = sdf.parse(String.valueOf(data.get(j - 1).get("CreatedDate")));
|
||||||
//解决当时间秒为0时,转换秒精度丢失问题
|
}catch (ParseException e){
|
||||||
date = sdf.parse(data.get(j - 1).get("CreatedDate")+":00");
|
//解决当时间秒为0时,转换秒精度丢失问题
|
||||||
}
|
date = sdf.parse(data.get(j - 1).get("CreatedDate")+":00");
|
||||||
Calendar calendar = Calendar.getInstance();
|
}
|
||||||
calendar.setTime(date);
|
Calendar calendar = Calendar.getInstance();
|
||||||
account.setField("CreatedDate", calendar);
|
calendar.setTime(date);
|
||||||
log.info("-----打印时间参数-----" + calendar);
|
account.setField("CreatedDate", calendar);
|
||||||
Map<String, Object> CreatedByIdMap = customMapper.getById("new_id", "User", data.get(j-1).get("CreatedById").toString());
|
Map<String, Object> CreatedByIdMap = customMapper.getById("new_id", "User", data.get(j-1).get("CreatedById").toString());
|
||||||
if(CreatedByIdMap.get("new_id") != null && StringUtils.isNotEmpty(CreatedByIdMap.get("new_id").toString())){
|
if(CreatedByIdMap.get("new_id") != null && StringUtils.isNotEmpty(CreatedByIdMap.get("new_id").toString())){
|
||||||
account.setField("CreatedById", CreatedByIdMap.get("new_id"));
|
account.setField("CreatedById", CreatedByIdMap.get("new_id"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (api.equals("Task")){
|
if (api.equals("Task")){
|
||||||
account.setField("TaskSubtype",data.get(j - 1).get("TaskSubtype"));
|
account.setField("TaskSubtype",data.get(j - 1).get("TaskSubtype"));
|
||||||
@ -357,7 +349,18 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
|
|
||||||
if (api.equals("Event")){
|
if (api.equals("Event")){
|
||||||
account.setField("EventSubtype", String.valueOf(data.get(j - 1).get("EventSubtype")));
|
account.setField("EventSubtype", String.valueOf(data.get(j - 1).get("EventSubtype")));
|
||||||
account.setField("IsRecurrence", String.valueOf(data.get(j - 1).get("IsRecurrence")));
|
// account.setField("IsRecurrence", String.valueOf(data.get(j - 1).get("IsRecurrence")));
|
||||||
|
}
|
||||||
|
// if (api.equals("Account")){
|
||||||
|
// Map<String, Object> referenceMap = customMapper.list("new_id","RecordType", "new_id is not null and id = '"+ data.get(j - 1).get("RecordTypeId")+"' limit 1").get(0);
|
||||||
|
// account.setField("RecordTypeId", referenceMap.get("new_id") );
|
||||||
|
// }
|
||||||
|
if (api.equals("vlink__Wechat_User__c")){
|
||||||
|
List<Map<String, Object>> maps = customMapper.list("new_id", "vlink__Wechat_Account__c", "new_id is not null and id = '" + data.get(j - 1).get("vlink__Wechat_Account__c") + "' limit 1");
|
||||||
|
if (!maps.isEmpty()){
|
||||||
|
Map<String, Object> referenceMap = maps.get(0);
|
||||||
|
account.setField("vlink__Wechat_Account__c", referenceMap.get("new_id"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ids[j-1] = data.get(j-1).get("Id").toString();
|
ids[j-1] = data.get(j-1).get("Id").toString();
|
||||||
@ -366,7 +369,11 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
if (infoFlag != null && "1".equals(infoFlag.get("value"))){
|
||||||
|
printAccountsDetails(accounts, list);
|
||||||
|
}
|
||||||
SaveResult[] saveResults = partnerConnection.create(accounts);
|
SaveResult[] saveResults = partnerConnection.create(accounts);
|
||||||
log.info("sf return saveResults------"+ JSONArray.toJSONString(saveResults));
|
log.info("sf return saveResults------"+ JSONArray.toJSONString(saveResults));
|
||||||
int index = 0;
|
int index = 0;
|
||||||
@ -418,168 +425,6 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
dataBatchService.update(updateQw2);
|
dataBatchService.update(updateQw2);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void autoCreatedNewId(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
|
||||||
String api = param.getApi();
|
|
||||||
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
|
||||||
dbQw.eq("api", api);
|
|
||||||
List<DataField> list = dataFieldService.list(dbQw);
|
|
||||||
TimeUnit.MILLISECONDS.sleep(1);
|
|
||||||
|
|
||||||
param.setApi(api);
|
|
||||||
Date beginDate = param.getBeginCreateDate();
|
|
||||||
Date endDate = param.getEndCreateDate();
|
|
||||||
String beginDateStr = DateUtil.format(beginDate, "yyyy-MM-dd HH:mm:ss");
|
|
||||||
String endDateStr = DateUtil.format(endDate, "yyyy-MM-dd HH:mm:ss");
|
|
||||||
Integer count = 0;
|
|
||||||
//表内数据总量
|
|
||||||
if (api.contains("Share")){
|
|
||||||
count = customMapper.countBySQL(api, "where RowCause = 'Manual' and new_id is null and LastModifiedDate >= '" + beginDateStr + "' and LastModifiedDate < '" + endDateStr + "'");
|
|
||||||
}else {
|
|
||||||
count = customMapper.countBySQL(api, "where new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (count == 0) {
|
|
||||||
log.error("无数据同步 api:{}", api);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
//批量插入200一次
|
|
||||||
int page = count % 200 == 0 ? count / 200 : (count / 200) + 1;
|
|
||||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
|
||||||
DataObject update = new DataObject();
|
|
||||||
update.setName(api);
|
|
||||||
for (int i = 0; i < page; i++) {
|
|
||||||
List<Map<String, Object>> data = null;
|
|
||||||
if (api.contains("Share")){
|
|
||||||
data = customMapper.list("*", api, "RowCause = 'Manual' and new_id is null and LastModifiedDate >= '" + beginDateStr + "' and LastModifiedDate < '" + endDateStr + "' limit 200");
|
|
||||||
}else {
|
|
||||||
data = customMapper.list("*", api, "new_id is null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "' limit 200");
|
|
||||||
}
|
|
||||||
|
|
||||||
int size = data.size();
|
|
||||||
SObject[] accounts = new SObject[size];
|
|
||||||
String[] ids = new String[size];
|
|
||||||
for (int j = 0; j < size; j++) {
|
|
||||||
SObject account = new SObject();
|
|
||||||
account.setType(api);
|
|
||||||
//找出sf对象必填字段,并且给默认值
|
|
||||||
for (DataField dataField : list) {
|
|
||||||
if ("OwnerId".equals(dataField.getField()) || "Owner_Type".equals(dataField.getField())
|
|
||||||
|| "Id".equals(dataField.getField())){
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
//用完放下面 && !dataField.getIsNillable() && !dataField.getIsDefaultedOnCreate()
|
|
||||||
if (dataField.getIsCreateable()) {
|
|
||||||
if ("reference".equals(dataField.getSfType())){
|
|
||||||
String reference = dataField.getReferenceTo();
|
|
||||||
if ("Group,User".equals(reference)) {
|
|
||||||
reference = "User";
|
|
||||||
}
|
|
||||||
// List<Map<String, Object>> referenceMap = customMapper.list("new_id", reference, "new_id is not null limit 1");
|
|
||||||
//share表处理
|
|
||||||
List<Map<String, Object>> referenceMap = customMapper.list("new_id", reference, "Id = '"+data.get(j).get(dataField.getField()).toString()+"' and new_id is not null limit 1");
|
|
||||||
|
|
||||||
if (referenceMap.size() == 0){
|
|
||||||
QueryWrapper<DataObject> maxIndex = new QueryWrapper<>();
|
|
||||||
maxIndex.select("IFNULL(max(data_index),0) as data_index");
|
|
||||||
maxIndex.ne("name", api);
|
|
||||||
Map<String, Object> map = dataObjectService.getMap(maxIndex);
|
|
||||||
|
|
||||||
//如果必填lookup字段没有值,跳过
|
|
||||||
update.setDataIndex(Integer.parseInt(map.get("data_index").toString()+1));
|
|
||||||
dataObjectService.updateById(update);
|
|
||||||
return;
|
|
||||||
}else{
|
|
||||||
account.setField(dataField.getField(), referenceMap.get(0).get("new_id"));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// if ("picklist".equals(dataField.getSfType())){
|
|
||||||
// List<Map<String, Object>> pickList = customMapper.list("value", "data_picklist", "api = '"+api+"' and field = '"+dataField.getField()+"' limit 1");
|
|
||||||
// account.setField(dataField.getField(), pickList.get(0).get("value"));
|
|
||||||
// continue;
|
|
||||||
// }
|
|
||||||
// account.setField(dataField.getField(), DataUtil.fieldTypeToSf(dataField));
|
|
||||||
if ("picklist".equals(dataField.getSfType())){
|
|
||||||
account.setField(dataField.getField(), data.get(j).get(dataField.getField()));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
accounts[j] = account;
|
|
||||||
//object类型转Date类型
|
|
||||||
// Date date;
|
|
||||||
// //date转Calendar类型
|
|
||||||
// Calendar calendar = Calendar.getInstance();
|
|
||||||
// try {
|
|
||||||
// date = sdf.parse(String.valueOf(data.get(j - 1).get("CreatedDate")));
|
|
||||||
// }catch (ParseException e){
|
|
||||||
// //解决当时间秒为0时,转换秒精度丢失问题
|
|
||||||
// date = sdf.parse(data.get(j - 1).get("CreatedDate")+":00");
|
|
||||||
// }
|
|
||||||
// calendar.setTime(date);
|
|
||||||
// account.setField("CreatedDate", calendar);
|
|
||||||
// Map<String, Object> CreatedByIdMap = customMapper.getById("new_id", "User", data.get(j-1).get("CreatedById").toString());
|
|
||||||
// account.setField("CreatedById", CreatedByIdMap.get("new_id"));
|
|
||||||
|
|
||||||
ids[j] = data.get(j).get("Id").toString();
|
|
||||||
accounts[j] = account;
|
|
||||||
if (i*200+j == count){
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
SaveResult[] saveResults = partnerConnection.create(accounts);
|
|
||||||
int index = 0;
|
|
||||||
for (SaveResult saveResult : saveResults) {
|
|
||||||
if (saveResult.getSuccess()) {
|
|
||||||
List<Map<String, Object>> maps = new ArrayList<>();
|
|
||||||
Map<String, Object> m = new HashMap<>();
|
|
||||||
m.put("key", "new_id");
|
|
||||||
m.put("value", saveResult.getId());
|
|
||||||
maps.add(m);
|
|
||||||
customMapper.updateById(api, maps, ids[index]);
|
|
||||||
index++;
|
|
||||||
} else {
|
|
||||||
log.error("-------------saveResults: {}", JSON.toJSONString(saveResult));
|
|
||||||
ReturnT.FAIL.setCode(500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TimeUnit.MILLISECONDS.sleep(1);
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error("autoCreatedNewId error api:{}", api, e);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
SalesforceParam countParam = new SalesforceParam();
|
|
||||||
countParam.setApi(api);
|
|
||||||
countParam.setBeginCreateDate(beginDate);
|
|
||||||
countParam.setEndCreateDate(DateUtils.addSeconds(endDate, -1));
|
|
||||||
// 存在isDeleted 只查询IsDeleted为false的
|
|
||||||
if (dataFieldService.hasDeleted(countParam.getApi())) {
|
|
||||||
countParam.setIsDeleted(false);
|
|
||||||
} else {
|
|
||||||
// 不存在 过滤
|
|
||||||
countParam.setIsDeleted(null);
|
|
||||||
}
|
|
||||||
// sf count
|
|
||||||
Integer sfNum = commonService.countSfNum(partnerConnection, countParam);
|
|
||||||
|
|
||||||
UpdateWrapper<DataBatchHistory> updateQw = new UpdateWrapper<>();
|
|
||||||
updateQw.eq("name", api)
|
|
||||||
.eq("sync_start_date", beginDate)
|
|
||||||
.eq("sync_end_date", DateUtils.addSeconds(endDate, -1))
|
|
||||||
.set("target_sf_num", sfNum);
|
|
||||||
dataBatchHistoryService.update(updateQw);
|
|
||||||
|
|
||||||
UpdateWrapper<DataBatch> updateQw2 = new UpdateWrapper<>();
|
|
||||||
updateQw2.eq("name", api)
|
|
||||||
.eq("sync_start_date", beginDate)
|
|
||||||
.eq("sync_end_date", endDate)
|
|
||||||
.set("sf_add_num", sfNum);
|
|
||||||
dataBatchService.update(updateQw2);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ReturnT<String> immigrationUpdate(SalesforceParam param) throws Exception {
|
public ReturnT<String> immigrationUpdate(SalesforceParam param) throws Exception {
|
||||||
@ -605,6 +450,7 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public ReturnT<String> updateSfData(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
public ReturnT<String> updateSfData(SalesforceParam param, List<Future<?>> futures) throws Exception {
|
||||||
List<String> apis;
|
List<String> apis;
|
||||||
String beginDateStr = null;
|
String beginDateStr = null;
|
||||||
@ -762,6 +608,7 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void manualUpdateSfData(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
private void manualUpdateSfData(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
||||||
|
Map<String, Object> infoFlag = customMapper.list("code,value","system_config","code ='"+SystemConfigCode.INFO_FLAG+"'").get(0);
|
||||||
String api = param.getApi();
|
String api = param.getApi();
|
||||||
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
QueryWrapper<DataField> dbQw = new QueryWrapper<>();
|
||||||
dbQw.eq("api", api);
|
dbQw.eq("api", api);
|
||||||
@ -776,8 +623,10 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
//表内数据总量
|
//表内数据总量
|
||||||
Integer count = customMapper.countBySQL(api, "where new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
Integer count = customMapper.countBySQL(api, "where new_id is not null and CreatedDate >= '" + beginDateStr + "' and CreatedDate < '" + endDateStr + "'");
|
||||||
|
|
||||||
|
log.error("总Update数据 count:{};-开始时间:{};-结束时间:{};-api:{};", count, beginDateStr, endDateStr, api);
|
||||||
|
|
||||||
if(count == 0){
|
if(count == 0){
|
||||||
log.error("无数据同步 api:{}", api);
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
int targetCount = 0;
|
int targetCount = 0;
|
||||||
@ -816,21 +665,34 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
if (map.get(field) != null && StringUtils.isNotBlank(dataField.getSfType())) {
|
if (map.get(field) != null && StringUtils.isNotBlank(dataField.getSfType())) {
|
||||||
account.setField(field, DataUtil.localDataToSfData(dataField.getSfType(), String.valueOf(map.get(field))));
|
account.setField(field, DataUtil.localDataToSfData(dataField.getSfType(), String.valueOf(map.get(field))));
|
||||||
}else {
|
}else {
|
||||||
account.setField(field, map.get(field));
|
if (api.equals("Account")){
|
||||||
|
if ("1".equals(map.get("IsPersonAccount")) && field.equals("Name")){
|
||||||
|
continue;
|
||||||
|
}else if("0".equals(map.get("IsPersonAccount")) && field.equals("LastName")){
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
account.setField(field, map.get(field));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
account.setField("old_ownerId__c", map.get("OwnerId"));
|
account.setField("old_owner_id__c", map.get("OwnerId"));
|
||||||
account.setField("old_sfId__c", map.get("Id"));
|
account.setField("old_sfdc_id__c", map.get("Id"));
|
||||||
|
|
||||||
accounts[j++] = account;
|
accounts[j++] = account;
|
||||||
}
|
}
|
||||||
|
List<Map<String, String>> listMap = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
|
if (infoFlag != null && "1".equals(infoFlag.get("value"))){
|
||||||
|
listMap = returnAccountsDetails(accounts,list);
|
||||||
|
}
|
||||||
SaveResult[] saveResults = partnerConnection.update(accounts);
|
SaveResult[] saveResults = partnerConnection.update(accounts);
|
||||||
for (SaveResult saveResult : saveResults) {
|
for (SaveResult saveResult : saveResults) {
|
||||||
if (!saveResult.getSuccess()) {
|
if (!saveResult.getSuccess()) {
|
||||||
log.info("-------------saveResults: {}", JSON.toJSONString(saveResult));
|
log.info("-------------saveResults: {}", JSON.toJSONString(saveResult));
|
||||||
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), JSON.toJSONString(saveResult));
|
String format = String.format("数据导入 error, api name: %s, \nparam: %s, \ncause:\n%s, \n数据实体类:\n%s", api, com.alibaba.fastjson2.JSON.toJSONString(param, DataDumpParam.getFilter()), JSON.toJSONString(saveResult),JSON.toJSONString(listMap));
|
||||||
EmailUtil.send("DataDump ERROR", format);
|
EmailUtil.send("DataDump ERROR", format);
|
||||||
return;
|
return;
|
||||||
}else {
|
}else {
|
||||||
@ -859,6 +721,52 @@ public class DataImportServiceImpl implements DataImportService {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void printAccountsDetails(SObject[] accounts,List<DataField> list) {
|
||||||
|
for (int i = 0; i < accounts.length; i++) {
|
||||||
|
SObject account = accounts[i];
|
||||||
|
System.out.println("--- Account[" + i + "] ---");
|
||||||
|
|
||||||
|
// 获取对象所有字段名
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
try {
|
||||||
|
Object value = account.getField(dataField.getField());
|
||||||
|
System.out.println(dataField.getField() + ": " + (value != null ? value.toString() : "null"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
System.out.println(dataField.getField() + ": [权限不足或字段不存在]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.out.println("old_owner_id__c: " + (account.getField("old_owner_id__c") != null ? account.getField("old_owner_id__c").toString() : "null"));
|
||||||
|
System.out.println("old_sfdc_id__c: " + (account.getField("old_sfdc_id__c") != null ? account.getField("old_sfdc_id__c").toString() : "null"));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Map<String,String>> returnAccountsDetails(SObject[] accounts,List<DataField> list) {
|
||||||
|
ArrayList<Map<String, String>> arrayList = new ArrayList<>();
|
||||||
|
for (int i = 0; i < accounts.length; i++) {
|
||||||
|
HashMap<String, String> map = new HashMap<>();
|
||||||
|
SObject account = accounts[i];
|
||||||
|
System.out.println("--- Account[" + i + "] ---");
|
||||||
|
// 获取对象所有字段名
|
||||||
|
for (DataField dataField : list) {
|
||||||
|
try {
|
||||||
|
Object value = account.getField(dataField.getField());
|
||||||
|
map.put(dataField.getField(),String.valueOf(value));
|
||||||
|
System.out.println(dataField.getField() + ": " + (value != null ? value.toString() : "null"));
|
||||||
|
} catch (Exception e) {
|
||||||
|
System.out.println(dataField.getField() + ": [权限不足或字段不存在]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.out.println("old_owner_id__c: " + (account.getField("old_owner_id__c") != null ? account.getField("old_owner_id__c").toString() : "null"));
|
||||||
|
System.out.println("old_sfdc_id__c: " + (account.getField("old_sfdc_id__c") != null ? account.getField("old_sfdc_id__c").toString() : "null"));
|
||||||
|
map.put("old_owner_id__c",String.valueOf(account.getField("old_owner_id__c")));
|
||||||
|
map.put("old_sfdc_id__c",String.valueOf(account.getField("old_sfdc_id__c")));
|
||||||
|
arrayList.add(map);
|
||||||
|
}
|
||||||
|
return arrayList;
|
||||||
|
}
|
||||||
|
|
||||||
private void autoUpdateSfData(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
private void autoUpdateSfData(SalesforceParam param, PartnerConnection partnerConnection) throws Exception {
|
||||||
String api = param.getApi();
|
String api = param.getApi();
|
||||||
|
|
||||||
|
@ -6,9 +6,7 @@ import com.celnet.datadump.mapper.DataLogMapper;
|
|||||||
import com.celnet.datadump.service.DataLogService;
|
import com.celnet.datadump.service.DataLogService;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
@Service
|
@Service
|
||||||
public class DataLogServiceImpl extends ServiceImpl<DataLogMapper, DataLog> implements DataLogService {
|
public class DataLogServiceImpl extends ServiceImpl<DataLogMapper, DataLog> implements DataLogService {
|
||||||
|
|
||||||
|
@ -51,11 +51,6 @@ public class FileManagerServiceImpl implements FileManagerService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private SalesforceExecutor salesforceExecutor;
|
private SalesforceExecutor salesforceExecutor;
|
||||||
|
|
||||||
@Value(value = "${sf.file-download-url}")
|
|
||||||
private String downloadUrl;
|
|
||||||
|
|
||||||
@Value(value = "${sf.file-upload-url}")
|
|
||||||
private String uploadUrl;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -169,6 +164,17 @@ public class FileManagerServiceImpl implements FileManagerService {
|
|||||||
Map<String, String> headers = Maps.newHashMap();
|
Map<String, String> headers = Maps.newHashMap();
|
||||||
headers.put("Authorization", "Bearer " + token);
|
headers.put("Authorization", "Bearer " + token);
|
||||||
headers.put("connection", "keep-alive");
|
headers.put("connection", "keep-alive");
|
||||||
|
String downloadUrl = null;
|
||||||
|
List<Map<String, Object>> poll = customMapper.list("code,value","org_config",null);
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("FILE_DOWNLOAD_URL".equals(map1.get("code"))) {
|
||||||
|
downloadUrl = (String) map1.get("value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isNotBlank(downloadUrl)) {
|
||||||
|
EmailUtil.send("DumpFile ERROR", "文件下载失败!下载地址未配置");
|
||||||
|
return;
|
||||||
|
}
|
||||||
String url = downloadUrl + String.format(Const.SF_RICH_TEXT_FILE_URL, objectApi, recordId, fieldApi, fieldId);
|
String url = downloadUrl + String.format(Const.SF_RICH_TEXT_FILE_URL, objectApi, recordId, fieldApi, fieldId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -211,7 +217,7 @@ public class FileManagerServiceImpl implements FileManagerService {
|
|||||||
maps.add(paramMap);
|
maps.add(paramMap);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
e.printStackTrace();
|
log.error("exception message", e);
|
||||||
}
|
}
|
||||||
Map<String, Object> paramMap = Maps.newHashMap();
|
Map<String, Object> paramMap = Maps.newHashMap();
|
||||||
paramMap.put("key", "is_dump");
|
paramMap.put("key", "is_dump");
|
||||||
@ -297,6 +303,18 @@ public class FileManagerServiceImpl implements FileManagerService {
|
|||||||
}
|
}
|
||||||
ids.add(id);
|
ids.add(id);
|
||||||
}
|
}
|
||||||
|
String uploadUrl = null;
|
||||||
|
List<Map<String, Object>> poll = customMapper.list("code,value","org_config",null);
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("FILE_UPLOAD_URL".equals(map1.get("code"))) {
|
||||||
|
uploadUrl = (String) map1.get("value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isBlank(uploadUrl)) {
|
||||||
|
EmailUtil.send("UploadFile ERROR", "文件上传失败!上传地址未配置");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// 拼接url
|
// 拼接url
|
||||||
String url = uploadUrl + String.format(Const.SF_UPLOAD_RICH_TEXT_FILE_URL, objectApi, recordId);
|
String url = uploadUrl + String.format(Const.SF_UPLOAD_RICH_TEXT_FILE_URL, objectApi, recordId);
|
||||||
|
|
||||||
@ -326,7 +344,7 @@ public class FileManagerServiceImpl implements FileManagerService {
|
|||||||
try {
|
try {
|
||||||
response.close();
|
response.close();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e.printStackTrace();
|
log.error("exception message", e);
|
||||||
throw new RuntimeException("文件转换base64失败");
|
throw new RuntimeException("文件转换base64失败");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package com.celnet.datadump.service.impl;
|
package com.celnet.datadump.service.impl;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
import com.alibaba.fastjson.JSONObject;
|
import com.alibaba.fastjson.JSONObject;
|
||||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||||
@ -10,10 +11,12 @@ import com.celnet.datadump.config.SalesforceTargetConnect;
|
|||||||
import com.celnet.datadump.entity.DataObject;
|
import com.celnet.datadump.entity.DataObject;
|
||||||
import com.celnet.datadump.enums.FileType;
|
import com.celnet.datadump.enums.FileType;
|
||||||
import com.celnet.datadump.global.Const;
|
import com.celnet.datadump.global.Const;
|
||||||
|
import com.celnet.datadump.param.DataDumpParam;
|
||||||
import com.celnet.datadump.param.FileTransformParam;
|
import com.celnet.datadump.param.FileTransformParam;
|
||||||
import com.celnet.datadump.global.SystemConfigCode;
|
import com.celnet.datadump.global.SystemConfigCode;
|
||||||
import com.celnet.datadump.mapper.CustomMapper;
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
import com.celnet.datadump.service.*;
|
import com.celnet.datadump.service.*;
|
||||||
|
import com.celnet.datadump.util.EmailUtil;
|
||||||
import com.celnet.datadump.util.HttpUtil;
|
import com.celnet.datadump.util.HttpUtil;
|
||||||
import com.celnet.datadump.util.OssUtil;
|
import com.celnet.datadump.util.OssUtil;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
@ -75,12 +78,6 @@ public class FileServiceImpl implements FileService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private CommonService commonService;
|
private CommonService commonService;
|
||||||
|
|
||||||
@Value(value = "${sf.file-download-url}")
|
|
||||||
private String downloadUrl;
|
|
||||||
|
|
||||||
@Value(value = "${sf.file-upload-url}")
|
|
||||||
private String uploadUrl;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void verifyFile(String api, String field) {
|
public void verifyFile(String api, String field) {
|
||||||
log.info("verify file api:{}, field:{}", api, field);
|
log.info("verify file api:{}, field:{}", api, field);
|
||||||
@ -137,6 +134,17 @@ public class FileServiceImpl implements FileService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void dumpFile(String api, String field, Boolean singleThread) {
|
public void dumpFile(String api, String field, Boolean singleThread) {
|
||||||
|
String downloadUrl = null;
|
||||||
|
List<Map<String, Object>> poll = customMapper.list("code,value","org_config",null);
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("FILE_DOWNLOAD_URL".equals(map1.get("code"))) {
|
||||||
|
downloadUrl = (String) map1.get("value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isNotBlank(downloadUrl)) {
|
||||||
|
EmailUtil.send("DumpFile ERROR", "文件下载失败!下载地址未配置");
|
||||||
|
return;
|
||||||
|
}
|
||||||
log.info("dump file api:{}, field:{}", api, field);
|
log.info("dump file api:{}, field:{}", api, field);
|
||||||
PartnerConnection connect = salesforceConnect.createConnect();
|
PartnerConnection connect = salesforceConnect.createConnect();
|
||||||
String token = connect.getSessionHeader().getSessionId();
|
String token = connect.getSessionHeader().getSessionId();
|
||||||
@ -167,6 +175,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
}
|
}
|
||||||
String finalName = name;
|
String finalName = name;
|
||||||
for (Map<String, Object> map : list) {
|
for (Map<String, Object> map : list) {
|
||||||
|
String finalDownloadUrl = downloadUrl;
|
||||||
Future<?> future = salesforceExecutor.execute(() -> {
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
String id = null;
|
String id = null;
|
||||||
// 上传完毕 更新附件信息
|
// 上传完毕 更新附件信息
|
||||||
@ -182,7 +191,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
if (StringUtils.isNotBlank(fileName)) {
|
if (StringUtils.isNotBlank(fileName)) {
|
||||||
String filePath = api + "/" + id + "_" + fileName;
|
String filePath = api + "/" + id + "_" + fileName;
|
||||||
// 拼接url
|
// 拼接url
|
||||||
String url = downloadUrl + String.format(Const.SF_FILE_URL, api, id, field);
|
String url = finalDownloadUrl + String.format(Const.SF_FILE_URL, api, id, field);
|
||||||
Response response = HttpUtil.doGet(url, null, headers);
|
Response response = HttpUtil.doGet(url, null, headers);
|
||||||
if (response.body() != null) {
|
if (response.body() != null) {
|
||||||
InputStream inputStream = response.body().byteStream();
|
InputStream inputStream = response.body().byteStream();
|
||||||
@ -423,6 +432,18 @@ public class FileServiceImpl implements FileService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void uploadFile(String api, String field, Boolean singleThread) {
|
public void uploadFile(String api, String field, Boolean singleThread) {
|
||||||
|
String uploadUrl = null;
|
||||||
|
List<Map<String, Object>> poll = customMapper.list("code,value","org_config",null);
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("FILE_UPLOAD_URL".equals(map1.get("code"))) {
|
||||||
|
uploadUrl = (String) map1.get("value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isBlank(uploadUrl)) {
|
||||||
|
EmailUtil.send("UploadFile ERROR", "文件上传失败!上传地址未配置");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
log.info("upload file api:{}, field:{}", api, field);
|
log.info("upload file api:{}, field:{}", api, field);
|
||||||
PartnerConnection connect = salesforceTargetConnect.createConnect();
|
PartnerConnection connect = salesforceTargetConnect.createConnect();
|
||||||
String token = connect.getSessionHeader().getSessionId();
|
String token = connect.getSessionHeader().getSessionId();
|
||||||
@ -449,6 +470,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
if (CollectionUtils.isEmpty(list)) {
|
if (CollectionUtils.isEmpty(list)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
String finalUploadUrl = uploadUrl;
|
||||||
Future<?> future = salesforceExecutor.execute(() -> {
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
String newDocumentId = null;
|
String newDocumentId = null;
|
||||||
for (Map<String, Object> map : list) {
|
for (Map<String, Object> map : list) {
|
||||||
@ -477,7 +499,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// 拼接url
|
// 拼接url
|
||||||
String url = uploadUrl + String.format(Const.SF_UPLOAD_FILE_URL, api);
|
String url = finalUploadUrl + String.format(Const.SF_UPLOAD_FILE_URL, api);
|
||||||
HttpPost httpPost = new HttpPost(url);
|
HttpPost httpPost = new HttpPost(url);
|
||||||
httpPost.setHeader("Authorization", "Bearer " + token);
|
httpPost.setHeader("Authorization", "Bearer " + token);
|
||||||
httpPost.setHeader("connection", "keep-alive");
|
httpPost.setHeader("connection", "keep-alive");
|
||||||
@ -552,7 +574,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
try {
|
try {
|
||||||
response.close();
|
response.close();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e.printStackTrace();
|
log.error("exception message", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@ -592,6 +614,17 @@ public class FileServiceImpl implements FileService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void uploadFileToAttachment(String api, String field, Boolean singleThread) {
|
public void uploadFileToAttachment(String api, String field, Boolean singleThread) {
|
||||||
|
String uploadUrl = null;
|
||||||
|
List<Map<String, Object>> poll = customMapper.list("code,value","org_config",null);
|
||||||
|
for (Map<String, Object> map1 : poll) {
|
||||||
|
if ("FILE_UPLOAD_URL".equals(map1.get("code"))) {
|
||||||
|
uploadUrl = (String) map1.get("value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (StringUtils.isBlank(uploadUrl)) {
|
||||||
|
EmailUtil.send("UploadFile ERROR", "文件上传失败!上传地址未配置");
|
||||||
|
return;
|
||||||
|
}
|
||||||
log.info("upload file api:{}, field:{}", api, field);
|
log.info("upload file api:{}, field:{}", api, field);
|
||||||
PartnerConnection connect = salesforceTargetConnect.createConnect();
|
PartnerConnection connect = salesforceTargetConnect.createConnect();
|
||||||
String token = connect.getSessionHeader().getSessionId();
|
String token = connect.getSessionHeader().getSessionId();
|
||||||
@ -612,6 +645,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
// 获取未存储的附件id
|
// 获取未存储的附件id
|
||||||
List<Map<String, Object>> list = customMapper.list("Id, ParentId, Name, url, Description, Parent_type", api, "is_upload = 0");
|
List<Map<String, Object>> list = customMapper.list("Id, ParentId, Name, url, Description, Parent_type", api, "is_upload = 0");
|
||||||
for (Map<String, Object> map : list) {
|
for (Map<String, Object> map : list) {
|
||||||
|
String finalUploadUrl = uploadUrl;
|
||||||
Future<?> future = salesforceExecutor.execute(() -> {
|
Future<?> future = salesforceExecutor.execute(() -> {
|
||||||
String id = null;
|
String id = null;
|
||||||
// 上传完毕 更新附件信息
|
// 上传完毕 更新附件信息
|
||||||
@ -649,7 +683,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
Map<String, Object> lMap = customMapper.getById("new_id",parentType, parentId);
|
Map<String, Object> lMap = customMapper.getById("new_id",parentType, parentId);
|
||||||
|
|
||||||
// 拼接url
|
// 拼接url
|
||||||
String url = uploadUrl + String.format(Const.SF_UPLOAD_FILE_URL, api);
|
String url = finalUploadUrl + String.format(Const.SF_UPLOAD_FILE_URL, api);
|
||||||
HttpPost httpPost = new HttpPost(url);
|
HttpPost httpPost = new HttpPost(url);
|
||||||
httpPost.setHeader("Authorization", "Bearer " + token);
|
httpPost.setHeader("Authorization", "Bearer " + token);
|
||||||
httpPost.setHeader("connection", "keep-alive");
|
httpPost.setHeader("connection", "keep-alive");
|
||||||
@ -704,7 +738,7 @@ public class FileServiceImpl implements FileService {
|
|||||||
try {
|
try {
|
||||||
response.close();
|
response.close();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e.printStackTrace();
|
log.error("exception message", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
213
src/main/java/com/celnet/datadump/util/BulkUtil.java
Normal file
213
src/main/java/com/celnet/datadump/util/BulkUtil.java
Normal file
@ -0,0 +1,213 @@
|
|||||||
|
package com.celnet.datadump.util;
|
||||||
|
|
||||||
|
import com.sforce.async.*;
|
||||||
|
import com.sforce.soap.partner.PartnerConnection;
|
||||||
|
import com.sforce.ws.ConnectionException;
|
||||||
|
import com.sforce.ws.ConnectorConfig;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
public class BulkUtil {
|
||||||
|
|
||||||
|
public static void closeJob(BulkConnection connection, String jobId)
|
||||||
|
throws AsyncApiException {
|
||||||
|
JobInfo job = new JobInfo();
|
||||||
|
job.setId(jobId);
|
||||||
|
job.setState(JobStateEnum.Closed);
|
||||||
|
connection.updateJob(job);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for a job to complete by polling the Bulk API.
|
||||||
|
*
|
||||||
|
* @param connection
|
||||||
|
* BulkConnection used to check results.
|
||||||
|
* @param job
|
||||||
|
* The job awaiting completion.
|
||||||
|
* @param batchInfoList
|
||||||
|
* List of batches for this job.
|
||||||
|
* @throws AsyncApiException
|
||||||
|
*/
|
||||||
|
public static void awaitCompletion(BulkConnection connection, JobInfo job,
|
||||||
|
List<BatchInfo> batchInfoList)
|
||||||
|
throws AsyncApiException {
|
||||||
|
long sleepTime = 0L;
|
||||||
|
Set<String> incomplete = new HashSet<String>();
|
||||||
|
for (BatchInfo bi : batchInfoList) {
|
||||||
|
incomplete.add(bi.getId());
|
||||||
|
}
|
||||||
|
while (!incomplete.isEmpty()) {
|
||||||
|
try {
|
||||||
|
Thread.sleep(sleepTime);
|
||||||
|
} catch (InterruptedException e) {}
|
||||||
|
System.out.println("Awaiting results..." + incomplete.size());
|
||||||
|
sleepTime = 10000L;
|
||||||
|
BatchInfo[] statusList =
|
||||||
|
connection.getBatchInfoList(job.getId()).getBatchInfo();
|
||||||
|
for (BatchInfo b : statusList) {
|
||||||
|
if (b.getState() == BatchStateEnum.Completed
|
||||||
|
|| b.getState() == BatchStateEnum.Failed) {
|
||||||
|
if (incomplete.remove(b.getId())) {
|
||||||
|
System.out.println("BATCH STATUS:\n" + b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new job using the Bulk API.
|
||||||
|
*
|
||||||
|
* @param sobjectType
|
||||||
|
* The object type being loaded, such as "Account"
|
||||||
|
* @param connection
|
||||||
|
* BulkConnection used to create the new job.
|
||||||
|
* @return The JobInfo for the new job.
|
||||||
|
* @throws AsyncApiException
|
||||||
|
*/
|
||||||
|
public static JobInfo createJob( BulkConnection connection,String sobjectType,OperationEnum operation)
|
||||||
|
throws AsyncApiException {
|
||||||
|
JobInfo job = new JobInfo();
|
||||||
|
job.setObject(sobjectType);
|
||||||
|
job.setOperation(operation);
|
||||||
|
job.setContentType(ContentType.CSV);
|
||||||
|
job = connection.createJob(job);
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the BulkConnection used to call Bulk API operations.
|
||||||
|
*/
|
||||||
|
public static BulkConnection getBulkConnection(String userName, String password,String url)
|
||||||
|
throws ConnectionException, AsyncApiException {
|
||||||
|
ConnectorConfig partnerConfig = new ConnectorConfig();
|
||||||
|
partnerConfig.setUsername(userName);
|
||||||
|
partnerConfig.setPassword(password);
|
||||||
|
partnerConfig.setAuthEndpoint(url);
|
||||||
|
// Creating the connection automatically handles login and stores
|
||||||
|
// the session in partnerConfig
|
||||||
|
new PartnerConnection(partnerConfig);
|
||||||
|
// When PartnerConnection is instantiated, a login is implicitly
|
||||||
|
// executed and, if successful,
|
||||||
|
// a valid session is stored in the ConnectorConfig instance.
|
||||||
|
// Use this key to initialize a BulkConnection:
|
||||||
|
ConnectorConfig config = new ConnectorConfig();
|
||||||
|
config.setSessionId(partnerConfig.getSessionId());
|
||||||
|
// The endpoint for the Bulk API service is the same as for the normal
|
||||||
|
// SOAP uri until the /Soap/ part. From here it's '/async/versionNumber'
|
||||||
|
String soapEndpoint = partnerConfig.getServiceEndpoint();
|
||||||
|
String apiVersion = "56.0";
|
||||||
|
String restEndpoint = soapEndpoint.substring(0, soapEndpoint.indexOf("Soap/"))
|
||||||
|
+ "async/" + apiVersion;
|
||||||
|
config.setRestEndpoint(restEndpoint);
|
||||||
|
// This should only be false when doing debugging.
|
||||||
|
config.setCompression(true);
|
||||||
|
// Set this to true to see HTTP requests and responses on stdout
|
||||||
|
config.setTraceMessage(false);
|
||||||
|
BulkConnection connection = new BulkConnection(config);
|
||||||
|
return connection;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create and upload batches using a CSV file.
|
||||||
|
* The file into the appropriate size batch files.
|
||||||
|
*
|
||||||
|
* @param connection
|
||||||
|
* Connection to use for creating batches
|
||||||
|
* @param jobInfo
|
||||||
|
* Job associated with new batches
|
||||||
|
* @param csvFileName
|
||||||
|
* The source file for batch data
|
||||||
|
*/
|
||||||
|
public static List<BatchInfo> createBatchesFromCSVFile(BulkConnection connection,
|
||||||
|
JobInfo jobInfo, String csvFileName)
|
||||||
|
throws IOException, AsyncApiException {
|
||||||
|
List<BatchInfo> batchInfos = new ArrayList<BatchInfo>();
|
||||||
|
BufferedReader rdr = new BufferedReader(
|
||||||
|
new InputStreamReader(Files.newInputStream(Paths.get(csvFileName)),"GBK")
|
||||||
|
);
|
||||||
|
// read the CSV header row
|
||||||
|
byte[] headerBytes = (rdr.readLine() + "\n").getBytes("UTF-8");
|
||||||
|
int headerBytesLength = headerBytes.length;
|
||||||
|
File tmpFile = File.createTempFile("bulkAPIInsert", ".csv");
|
||||||
|
|
||||||
|
// Split the CSV file into multiple batches
|
||||||
|
try {
|
||||||
|
FileOutputStream tmpOut = new FileOutputStream(tmpFile);
|
||||||
|
int maxBytesPerBatch = 10000000; // 10 million bytes per batch
|
||||||
|
int maxRowsPerBatch = 10000; // 10 thousand rows per batch
|
||||||
|
int currentBytes = 0;
|
||||||
|
int currentLines = 0;
|
||||||
|
String nextLine;
|
||||||
|
while ((nextLine = rdr.readLine()) != null) {
|
||||||
|
byte[] bytes = (nextLine + "\n").getBytes("UTF-8");
|
||||||
|
// Create a new batch when our batch size limit is reached
|
||||||
|
if (currentBytes + bytes.length > maxBytesPerBatch
|
||||||
|
|| currentLines > maxRowsPerBatch) {
|
||||||
|
createBatch(tmpOut, tmpFile, batchInfos, connection, jobInfo);
|
||||||
|
currentBytes = 0;
|
||||||
|
currentLines = 0;
|
||||||
|
}
|
||||||
|
if (currentBytes == 0) {
|
||||||
|
tmpOut = new FileOutputStream(tmpFile);
|
||||||
|
tmpOut.write(headerBytes);
|
||||||
|
currentBytes = headerBytesLength;
|
||||||
|
currentLines = 1;
|
||||||
|
}
|
||||||
|
tmpOut.write(bytes);
|
||||||
|
currentBytes += bytes.length;
|
||||||
|
currentLines++;
|
||||||
|
}
|
||||||
|
// Finished processing all rows
|
||||||
|
// Create a final batch for any remaining data
|
||||||
|
if (currentLines > 1) {
|
||||||
|
createBatch(tmpOut, tmpFile, batchInfos, connection, jobInfo);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
tmpFile.delete();
|
||||||
|
}
|
||||||
|
return batchInfos;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a batch by uploading the contents of the file.
|
||||||
|
* This closes the output stream.
|
||||||
|
*
|
||||||
|
* @param tmpOut
|
||||||
|
* The output stream used to write the CSV data for a single batch.
|
||||||
|
* @param tmpFile
|
||||||
|
* The file associated with the above stream.
|
||||||
|
* @param batchInfos
|
||||||
|
* The batch info for the newly created batch is added to this list.
|
||||||
|
* @param connection
|
||||||
|
* The BulkConnection used to create the new batch.
|
||||||
|
* @param jobInfo
|
||||||
|
* The JobInfo associated with the new batch.
|
||||||
|
*/
|
||||||
|
public static void createBatch(FileOutputStream tmpOut, File tmpFile,
|
||||||
|
List<BatchInfo> batchInfos, BulkConnection connection, JobInfo jobInfo)
|
||||||
|
throws IOException, AsyncApiException {
|
||||||
|
tmpOut.flush();
|
||||||
|
tmpOut.close();
|
||||||
|
FileInputStream tmpInputStream = new FileInputStream(tmpFile);
|
||||||
|
try {
|
||||||
|
BatchInfo batchInfo =
|
||||||
|
connection.createBatchFromStream(jobInfo, tmpInputStream);
|
||||||
|
System.out.println(batchInfo);
|
||||||
|
batchInfos.add(batchInfo);
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
tmpInputStream.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
330
src/main/java/com/celnet/datadump/util/CsvConverterUtil.java
Normal file
330
src/main/java/com/celnet/datadump/util/CsvConverterUtil.java
Normal file
@ -0,0 +1,330 @@
|
|||||||
|
package com.celnet.datadump.util;
|
||||||
|
|
||||||
|
import cn.hutool.core.io.FileUtil;
|
||||||
|
import cn.hutool.core.io.IoUtil;
|
||||||
|
|
||||||
|
import cn.hutool.core.text.csv.CsvUtil;
|
||||||
|
import cn.hutool.core.text.csv.CsvWriteConfig;
|
||||||
|
import cn.hutool.core.text.csv.CsvWriter;
|
||||||
|
import cn.hutool.core.util.CharsetUtil;
|
||||||
|
import cn.hutool.json.JSONObject;
|
||||||
|
import com.opencsv.CSVReader;
|
||||||
|
import com.opencsv.exceptions.CsvException;
|
||||||
|
import org.apache.commons.csv.CSVFormat;
|
||||||
|
import org.apache.commons.csv.CSVPrinter;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* csv格式转换工具类
|
||||||
|
*/
|
||||||
|
public class CsvConverterUtil {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 将JSONObject(代表一行)或JSONArray(代表多行)写入CSV文件
|
||||||
|
* @param jsonList JSON数据
|
||||||
|
*/
|
||||||
|
public static String writeToCsv(List<JSONObject> jsonList, String fileName) {
|
||||||
|
|
||||||
|
// 1. 创建目标目录(不存在则创建)
|
||||||
|
File targetDir = FileUtil.mkdir("data-dump/dataFile");
|
||||||
|
|
||||||
|
// 2. 构建完整文件路径
|
||||||
|
String fullPath = targetDir.getAbsolutePath() + File.separator + fileName + ".csv";
|
||||||
|
|
||||||
|
CsvWriter csvWriter = CsvUtil.getWriter(fullPath, CharsetUtil.CHARSET_GBK, false);
|
||||||
|
|
||||||
|
String[] header = jsonList.get(0).keySet().toArray(new String[0]);
|
||||||
|
|
||||||
|
// 2. 写入表头(必须使用 String[])
|
||||||
|
csvWriter.writeHeaderLine(header);
|
||||||
|
|
||||||
|
// 遍历数据列表
|
||||||
|
for (JSONObject jsonObject : jsonList) {
|
||||||
|
// 按表头顺序获取值
|
||||||
|
String[] row = new String[header.length];
|
||||||
|
for (int i = 0; i < header.length; i++) {
|
||||||
|
// 将每个值转换为字符串(如果为null则转为空字符串)
|
||||||
|
Object value = jsonObject.get(header[i]);
|
||||||
|
row[i] = value == null ? "" : value.toString();
|
||||||
|
}
|
||||||
|
csvWriter.writeLine(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 关闭writer(在try-with-resources中可省略,但这里我们显式关闭)
|
||||||
|
csvWriter.close();
|
||||||
|
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static String exportToCsv(List<Map<String, Object>> data, String fileName) throws IOException {
|
||||||
|
// 1. 创建目标目录(不存在则创建)
|
||||||
|
File targetDir = FileUtil.mkdir("data-dump/dataFile");
|
||||||
|
|
||||||
|
// 2. 构建完整文件路径
|
||||||
|
String fullPath = targetDir.getAbsolutePath() + File.separator + fileName + ".csv";
|
||||||
|
|
||||||
|
CsvWriter csvWriter = CsvUtil.getWriter(fullPath, CharsetUtil.CHARSET_UTF_8, false);
|
||||||
|
|
||||||
|
// 1. 提取表头(保持顺序)
|
||||||
|
Set<String> headers = new LinkedHashSet<>();
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
headers.addAll(map.keySet());
|
||||||
|
}
|
||||||
|
String[] headerArray = headers.toArray(new String[0]);
|
||||||
|
|
||||||
|
// 2. 写入表头(必须使用 String[])
|
||||||
|
csvWriter.writeLine(headerArray);
|
||||||
|
|
||||||
|
// 3. 写入数据行(需要将 Object 转换为 String)
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
// 按表头顺序构建行数据
|
||||||
|
String[] row = new String[headerArray.length];
|
||||||
|
for (int i = 0; i < headerArray.length; i++) {
|
||||||
|
Object value = map.get(headerArray[i]);
|
||||||
|
// 处理空值和特殊字符
|
||||||
|
row[i] = convertToCsvValue(value);
|
||||||
|
}
|
||||||
|
csvWriter.writeLine(row); // 使用 String[] 参数
|
||||||
|
}
|
||||||
|
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @Description: convert list<Map<>> to csv
|
||||||
|
* @Param: list<Map<>>,pathName
|
||||||
|
* @return:
|
||||||
|
*/
|
||||||
|
public static String saveOpenToDataDump(List<Map<String,Object>> list,String fileName) throws IOException {
|
||||||
|
|
||||||
|
// 1. 创建目标目录(不存在则创建)
|
||||||
|
File targetDir = FileUtil.mkdir("data-dump/dataFile");
|
||||||
|
|
||||||
|
// 2. 构建完整文件路径
|
||||||
|
String fullPath = targetDir.getAbsolutePath() + File.separator + fileName + ".csv";
|
||||||
|
|
||||||
|
List<String> headerList = new ArrayList<>();
|
||||||
|
for (String s : list.get(0).keySet()) {
|
||||||
|
headerList.add(s);
|
||||||
|
}
|
||||||
|
String[] csvHeader = headerList.toArray(new String[headerList.size()]);
|
||||||
|
|
||||||
|
FileWriter out = new FileWriter(fullPath);
|
||||||
|
|
||||||
|
try (CSVPrinter printer = new CSVPrinter(out, CSVFormat.DEFAULT
|
||||||
|
.withHeader(csvHeader))) {
|
||||||
|
for(Map<String,Object> map:list) {
|
||||||
|
List<String> valueList = new ArrayList<>();
|
||||||
|
for(String s:headerList)
|
||||||
|
valueList.add(String.valueOf(map.get(s)));
|
||||||
|
String[] csvValue = valueList.toArray(new String[valueList.size()]);
|
||||||
|
printer.printRecord(csvValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out.close();
|
||||||
|
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 保存 CSV 文件到 data-dump/dataFile 目录
|
||||||
|
* @param data 数据集合
|
||||||
|
* @param fileName 文件名(无需后缀,自动添加.csv)
|
||||||
|
* @return 完整的文件路径
|
||||||
|
*/
|
||||||
|
public static String saveToDataDump(List<Map<String, Object>> data, String fileName) {
|
||||||
|
// 1. 创建目标目录(不存在则创建)
|
||||||
|
File targetDir = FileUtil.mkdir("data-dump/dataFile");
|
||||||
|
|
||||||
|
// 2. 构建完整文件路径
|
||||||
|
String fullPath = targetDir.getAbsolutePath() + File.separator + fileName + ".csv";
|
||||||
|
|
||||||
|
// 3. 写入CSV文件
|
||||||
|
try (CsvWriter csvWriter = new CsvWriter(
|
||||||
|
new File(fullPath),
|
||||||
|
CharsetUtil.CHARSET_UTF_8, // UTF-8编码
|
||||||
|
false, // 非追加模式
|
||||||
|
new CsvWriteConfig().setFieldSeparator(',') // 逗号分隔符
|
||||||
|
)) {
|
||||||
|
// 1. 提取表头(保持顺序)
|
||||||
|
Set<String> headers = new LinkedHashSet<>();
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
headers.addAll(map.keySet());
|
||||||
|
}
|
||||||
|
String[] headerArray = headers.toArray(new String[0]);
|
||||||
|
|
||||||
|
// 2. 写入表头(必须使用 String[])
|
||||||
|
csvWriter.writeLine(headerArray);
|
||||||
|
|
||||||
|
// 3. 写入数据行(需要将 Object 转换为 String)
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
// 按表头顺序构建行数据
|
||||||
|
String[] row = new String[headerArray.length];
|
||||||
|
for (int i = 0; i < headerArray.length; i++) {
|
||||||
|
Object value = map.get(headerArray[i]);
|
||||||
|
// 处理空值和特殊字符
|
||||||
|
row[i] = convertToCsvValue(value);
|
||||||
|
}
|
||||||
|
csvWriter.writeLine(row); // 使用 String[] 参数
|
||||||
|
}
|
||||||
|
// 4. 释放资源并返回结果
|
||||||
|
IoUtil.close(csvWriter);
|
||||||
|
}
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static InputStream convertListToCsv(List<Map<String, Object>> data) {
|
||||||
|
if (data == null || data.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用 StringWriter 作为缓冲区
|
||||||
|
StringWriter stringWriter = new StringWriter();
|
||||||
|
CsvWriter csvWriter = CsvUtil.getWriter(stringWriter, CsvWriteConfig.defaultConfig());
|
||||||
|
|
||||||
|
// 1. 提取表头(保持顺序)
|
||||||
|
Set<String> headers = new LinkedHashSet<>();
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
headers.addAll(map.keySet());
|
||||||
|
}
|
||||||
|
String[] headerArray = headers.toArray(new String[0]);
|
||||||
|
|
||||||
|
// 2. 写入表头(必须使用 String[])
|
||||||
|
csvWriter.writeLine(headerArray);
|
||||||
|
|
||||||
|
// 3. 写入数据行(需要将 Object 转换为 String)
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
// 按表头顺序构建行数据
|
||||||
|
String[] row = new String[headerArray.length];
|
||||||
|
for (int i = 0; i < headerArray.length; i++) {
|
||||||
|
Object value = map.get(headerArray[i]);
|
||||||
|
// 处理空值和特殊字符
|
||||||
|
row[i] = convertToCsvValue(value);
|
||||||
|
}
|
||||||
|
csvWriter.writeLine(row); // 使用 String[] 参数
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. 释放资源并返回结果
|
||||||
|
IoUtil.close(csvWriter);
|
||||||
|
return new ByteArrayInputStream(csvWriter.toString().getBytes(StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 处理特殊值和空值
|
||||||
|
private static String convertToCsvValue(Object value) {
|
||||||
|
if (value == null) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
// Hutool 会自动处理逗号、引号等特殊字符
|
||||||
|
return value.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<String> extractIdColumn(InputStream inputStream) throws IOException, CsvException {
|
||||||
|
|
||||||
|
List<String> idList = new ArrayList<>();
|
||||||
|
|
||||||
|
try (CSVReader reader = new CSVReader(new InputStreamReader(inputStream))) {
|
||||||
|
// 读取CSV表头
|
||||||
|
String[] headers = reader.readNext();
|
||||||
|
int idIndex = -1;
|
||||||
|
|
||||||
|
// 查找id列的索引位置
|
||||||
|
for (int i = 0; i < headers.length; i++) {
|
||||||
|
if ("id".equalsIgnoreCase(headers[i].trim())) {
|
||||||
|
idIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idIndex == -1) {
|
||||||
|
throw new IllegalArgumentException("CSV文件中未找到id列");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 逐行读取数据
|
||||||
|
String[] nextRecord;
|
||||||
|
while ((nextRecord = reader.readNext()) != null) {
|
||||||
|
if (idIndex < nextRecord.length) {
|
||||||
|
if (!"Id".equals(nextRecord[idIndex])){
|
||||||
|
idList.add(nextRecord[idIndex]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return idList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static InputStream convertToCsvStream(List<Map<String, Object>> data) throws IOException {
|
||||||
|
if (data == null || data.isEmpty()) {
|
||||||
|
return new ByteArrayInputStream(new byte[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取所有列头(保持顺序)
|
||||||
|
Set<String> headers = new LinkedHashSet<>();
|
||||||
|
for (Map<String, Object> map : data) {
|
||||||
|
headers.addAll(map.keySet());
|
||||||
|
}
|
||||||
|
List<String> headerList = new ArrayList<>(headers);
|
||||||
|
|
||||||
|
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||||
|
Writer writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)) {
|
||||||
|
|
||||||
|
// 写入CSV头
|
||||||
|
writeCsvLine(writer, headerList);
|
||||||
|
|
||||||
|
// 写入数据行
|
||||||
|
for (Map<String, Object> row : data) {
|
||||||
|
List<String> values = new ArrayList<>();
|
||||||
|
for (String header : headerList) {
|
||||||
|
Object value = row.get(header);
|
||||||
|
values.add(value != null ? escapeCsv(value.toString()) : "");
|
||||||
|
}
|
||||||
|
writeCsvLine(writer, values);
|
||||||
|
}
|
||||||
|
|
||||||
|
writer.flush();
|
||||||
|
return new ByteArrayInputStream(outputStream.toByteArray());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void writeCsvLine(Writer writer, List<String> values) throws IOException {
|
||||||
|
if (values.isEmpty()) {
|
||||||
|
writer.write("\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < values.size(); i++) {
|
||||||
|
writer.write(values.get(i));
|
||||||
|
if (i < values.size() - 1) {
|
||||||
|
writer.write(',');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writer.write('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String escapeCsv(String value) {
|
||||||
|
if (value == null) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查是否需要转义(包含特殊字符)
|
||||||
|
boolean needsEscape = value.contains(",")
|
||||||
|
|| value.contains("\"")
|
||||||
|
|| value.contains("\n")
|
||||||
|
|| value.contains("\r");
|
||||||
|
|
||||||
|
if (!needsEscape) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 转义双引号并包裹整个字段
|
||||||
|
return "\"" + value.replace("\"", "\"\"") + "\"";
|
||||||
|
}
|
||||||
|
}
|
@ -14,6 +14,7 @@ import com.sforce.soap.partner.Field;
|
|||||||
import com.sforce.soap.partner.sobject.SObject;
|
import com.sforce.soap.partner.sobject.SObject;
|
||||||
import com.sforce.ws.bind.XmlObject;
|
import com.sforce.ws.bind.XmlObject;
|
||||||
import com.sforce.ws.types.Time;
|
import com.sforce.ws.types.Time;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.commons.collections.CollectionUtils;
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.commons.collections.map.CaseInsensitiveMap;
|
import org.apache.commons.collections.map.CaseInsensitiveMap;
|
||||||
import org.apache.commons.lang3.BooleanUtils;
|
import org.apache.commons.lang3.BooleanUtils;
|
||||||
@ -37,6 +38,7 @@ import java.util.*;
|
|||||||
* @description 数据处理工具
|
* @description 数据处理工具
|
||||||
* @date 2022/11/16
|
* @date 2022/11/16
|
||||||
*/
|
*/
|
||||||
|
@Slf4j
|
||||||
@Component
|
@Component
|
||||||
public class DataUtil {
|
public class DataUtil {
|
||||||
|
|
||||||
@ -83,10 +85,12 @@ public class DataUtil {
|
|||||||
break;
|
break;
|
||||||
case "date":
|
case "date":
|
||||||
case "datetime":
|
case "datetime":
|
||||||
case "time":
|
|
||||||
case "boolean":
|
case "boolean":
|
||||||
result = type;
|
result = type;
|
||||||
break;
|
break;
|
||||||
|
case "time":
|
||||||
|
result = "time(3)";
|
||||||
|
break;
|
||||||
case "long":
|
case "long":
|
||||||
case "int":
|
case "int":
|
||||||
result = "int(" + length + ")";
|
result = "int(" + length + ")";
|
||||||
@ -388,6 +392,39 @@ public class DataUtil {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Object localBulkDataToSfData(String fieldType, String data) throws ParseException {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||||
|
SimpleDateFormat sd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
||||||
|
Date date;
|
||||||
|
//date转Calendar类型
|
||||||
|
Calendar calendar = Calendar.getInstance();
|
||||||
|
switch (fieldType) {
|
||||||
|
case "int":
|
||||||
|
return Integer.parseInt(data);
|
||||||
|
case "double":
|
||||||
|
case "currency":
|
||||||
|
case "percent":
|
||||||
|
return new BigDecimal(data);
|
||||||
|
case "boolean":
|
||||||
|
return Boolean.valueOf(data);
|
||||||
|
case "date":
|
||||||
|
return data+"T08:00:00Z";
|
||||||
|
case "datetime":
|
||||||
|
try {
|
||||||
|
date = sd.parse(data);
|
||||||
|
}catch (ParseException e){
|
||||||
|
//解决当时间秒为0时,转换秒精度丢失问题
|
||||||
|
date = sd.parse(data+":00");
|
||||||
|
}
|
||||||
|
calendar.setTime(date);
|
||||||
|
return calendar;
|
||||||
|
case "time":
|
||||||
|
return adjustHour(data);
|
||||||
|
default:
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static Object localDataToSfData(String fieldType, String data) throws ParseException {
|
public static Object localDataToSfData(String fieldType, String data) throws ParseException {
|
||||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||||
SimpleDateFormat sd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
SimpleDateFormat sd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
||||||
@ -407,7 +444,7 @@ public class DataUtil {
|
|||||||
try {
|
try {
|
||||||
return sdf.parse(data+"T08:00:00Z");
|
return sdf.parse(data+"T08:00:00Z");
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
e.printStackTrace();
|
log.error("exception message", e);
|
||||||
}
|
}
|
||||||
case "datetime":
|
case "datetime":
|
||||||
try {
|
try {
|
||||||
@ -418,11 +455,30 @@ public class DataUtil {
|
|||||||
}
|
}
|
||||||
calendar.setTime(date);
|
calendar.setTime(date);
|
||||||
return calendar;
|
return calendar;
|
||||||
|
case "time":
|
||||||
|
return adjustHour(data);
|
||||||
default:
|
default:
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String adjustHour(String timeStr) {
|
||||||
|
// 提取小时部分并转换为整数
|
||||||
|
int hour = Integer.parseInt(timeStr.substring(0, 2));
|
||||||
|
|
||||||
|
// 减去8小时并处理跨天逻辑
|
||||||
|
int adjustedHour = hour - 8;
|
||||||
|
if (adjustedHour < 0) {
|
||||||
|
adjustedHour += 24; // 处理负数情况(跨天)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 格式化为两位字符串(自动补零)
|
||||||
|
String newHour = String.format("%02d", adjustedHour);
|
||||||
|
|
||||||
|
// 拼接原始字符串的剩余部分(分钟+秒+毫秒)
|
||||||
|
return newHour + timeStr.substring(2);
|
||||||
|
}
|
||||||
|
|
||||||
public static boolean isUpdate(String field){
|
public static boolean isUpdate(String field){
|
||||||
switch (field) {
|
switch (field) {
|
||||||
case "LastModifiedDate":
|
case "LastModifiedDate":
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package com.celnet.datadump.util;
|
package com.celnet.datadump.util;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
|
||||||
import javax.crypto.Cipher;
|
import javax.crypto.Cipher;
|
||||||
import javax.crypto.SecretKeyFactory;
|
import javax.crypto.SecretKeyFactory;
|
||||||
import javax.crypto.spec.DESKeySpec;
|
import javax.crypto.spec.DESKeySpec;
|
||||||
@ -10,6 +12,7 @@ import java.security.NoSuchAlgorithmException;
|
|||||||
import java.security.spec.AlgorithmParameterSpec;
|
import java.security.spec.AlgorithmParameterSpec;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
public class MD5Util {
|
public class MD5Util {
|
||||||
|
|
||||||
/** 向量(同时拥有向量和密匙才能解密),此向量必须是8byte,多少都报错 */
|
/** 向量(同时拥有向量和密匙才能解密),此向量必须是8byte,多少都报错 */
|
||||||
@ -58,7 +61,7 @@ public class MD5Util {
|
|||||||
System.out.println("解密后的字符:" + mt.decode(mt.encode(value)));
|
System.out.println("解密后的字符:" + mt.decode(mt.encode(value)));
|
||||||
System.out.println("字符串的MD5值:"+ getMD5Value(value));
|
System.out.println("字符串的MD5值:"+ getMD5Value(value));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
e.printStackTrace();
|
log.error("exception message", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,12 +2,15 @@ spring:
|
|||||||
datasource:
|
datasource:
|
||||||
type: com.zaxxer.hikari.HikariDataSource
|
type: com.zaxxer.hikari.HikariDataSource
|
||||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||||
url: jdbc:mysql://127.0.0.1:3306/cook?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
# 携科
|
||||||
|
url: jdbc:mysql://127.0.0.1:3306/xieke?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
||||||
username: root
|
username: root
|
||||||
password: celnet@2025.bln
|
password: Celnet2025.QY
|
||||||
# url: jdbc:mysql://183.6.105.131:13306/zhonghe_test?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
# cook
|
||||||
|
# url: jdbc:mysql://127.0.0.1:3306/cook_1?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
||||||
# username: root
|
# username: root
|
||||||
# password: Celnet@2022
|
# password: celnet@2025.bln
|
||||||
|
|
||||||
mail:
|
mail:
|
||||||
host: smtp.163.com
|
host: smtp.163.com
|
||||||
port: 465
|
port: 465
|
||||||
@ -19,8 +22,8 @@ spring:
|
|||||||
#sf webservice配置
|
#sf webservice配置
|
||||||
sf:
|
sf:
|
||||||
# 附件下载url
|
# 附件下载url
|
||||||
file-download-url: https://cookmedicalasia.my.salesforce.com
|
file-download-url: https://d2000000079c7eaa.lightning.force.com
|
||||||
file-upload-url: https://cookchina--sandbox.sandbox.my.sfcrmproducts.cn
|
file-upload-url: https://steco-process.lightning.sfcrmapps.cn
|
||||||
# #线程数
|
# #线程数
|
||||||
# executor-size: 5
|
# executor-size: 5
|
||||||
# list:
|
# list:
|
||||||
|
@ -2,9 +2,14 @@ spring:
|
|||||||
datasource:
|
datasource:
|
||||||
type: com.zaxxer.hikari.HikariDataSource
|
type: com.zaxxer.hikari.HikariDataSource
|
||||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||||
url: jdbc:mysql://127.0.0.1:3306/cook?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
|
# 携科
|
||||||
|
url: jdbc:mysql://127.0.0.1:3306/xieke?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
||||||
username: root
|
username: root
|
||||||
password: celnet@2025.bln
|
password: Celnet2025.QY
|
||||||
|
# cook
|
||||||
|
# url: jdbc:mysql://127.0.0.1:3306/cook_1?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
||||||
|
# username: root
|
||||||
|
# password: celnet@2025.bln
|
||||||
mail:
|
mail:
|
||||||
host: smtp.mxhichina.com
|
host: smtp.mxhichina.com
|
||||||
port: 465
|
port: 465
|
||||||
@ -16,8 +21,8 @@ spring:
|
|||||||
#sf webservice配置
|
#sf webservice配置
|
||||||
sf:
|
sf:
|
||||||
# 附件下载url
|
# 附件下载url
|
||||||
file-download-url: https://cookmedicalasia.my.salesforce.com
|
file-download-url: https://d2000000079c7eaa.lightning.force.com
|
||||||
file-upload-url: https://cookchina--sandbox.sandbox.my.sfcrmproducts.cn
|
file-upload-url: https://steco-process.lightning.sfcrmapps.cn
|
||||||
#线程数
|
#线程数
|
||||||
executor-size: 5
|
executor-size: 5
|
||||||
list:
|
list:
|
||||||
|
@ -2,9 +2,15 @@ spring:
|
|||||||
datasource:
|
datasource:
|
||||||
type: com.zaxxer.hikari.HikariDataSource
|
type: com.zaxxer.hikari.HikariDataSource
|
||||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||||
url: jdbc:mysql://127.0.0.1:3306/cook?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
|
# 携科
|
||||||
|
url: jdbc:mysql://127.0.0.1:3306/xieke?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
||||||
username: root
|
username: root
|
||||||
password: celnet@2025.bln
|
password: Celnet2025.QY
|
||||||
|
# cook
|
||||||
|
# url: jdbc:mysql://127.0.0.1:3306/cook_1?useUnicode=true&characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai
|
||||||
|
# username: root
|
||||||
|
# password: celnet@2025.bln
|
||||||
|
|
||||||
mail:
|
mail:
|
||||||
host: smtp.163.com
|
host: smtp.163.com
|
||||||
port: 465
|
port: 465
|
||||||
@ -16,8 +22,8 @@ spring:
|
|||||||
#sf webservice配置
|
#sf webservice配置
|
||||||
sf:
|
sf:
|
||||||
# 附件下载url
|
# 附件下载url
|
||||||
file-download-url: https://cookmedicalasia.my.salesforce.com
|
file-download-url: https://d2000000079c7eaa.lightning.force.com
|
||||||
file-upload-url: https://cookchina--sandbox.sandbox.my.sfcrmproducts.cn
|
file-upload-url: https://steco-process.lightning.sfcrmapps.cn
|
||||||
#线程数
|
#线程数
|
||||||
executor-size: 5
|
executor-size: 5
|
||||||
list:
|
list:
|
||||||
|
@ -135,6 +135,18 @@
|
|||||||
</where>
|
</where>
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
<select id="listJsonObject" resultType="cn.hutool.json.JSONObject">
|
||||||
|
SELECT
|
||||||
|
${select}
|
||||||
|
FROM
|
||||||
|
`${api}`
|
||||||
|
<where>
|
||||||
|
<if test="sql != null">
|
||||||
|
${sql}
|
||||||
|
</if>
|
||||||
|
</where>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
|
||||||
<select id="listById" resultType="String">
|
<select id="listById" resultType="String">
|
||||||
SELECT
|
SELECT
|
||||||
|
@ -2,9 +2,11 @@ package com.celnet.datadump;
|
|||||||
|
|
||||||
import com.alibaba.fastjson.JSONObject;
|
import com.alibaba.fastjson.JSONObject;
|
||||||
import com.celnet.datadump.mapper.CustomMapper;
|
import com.celnet.datadump.mapper.CustomMapper;
|
||||||
|
import com.sforce.async.BulkConnection;
|
||||||
import com.sforce.soap.partner.PartnerConnection;
|
import com.sforce.soap.partner.PartnerConnection;
|
||||||
import com.sforce.ws.ConnectionException;
|
import com.sforce.ws.ConnectionException;
|
||||||
import com.sforce.ws.ConnectorConfig;
|
import com.sforce.ws.ConnectorConfig;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.http.HttpEntity;
|
import org.apache.http.HttpEntity;
|
||||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
import org.apache.http.client.methods.HttpPost;
|
import org.apache.http.client.methods.HttpPost;
|
||||||
@ -24,10 +26,11 @@ import java.util.HashMap;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
public class DataDumpConnetTests {
|
public class DataDumpConnetTests {
|
||||||
|
|
||||||
// @Resource
|
@Resource
|
||||||
// private CustomMapper customerMapper;
|
private CustomMapper customerMapper;
|
||||||
/**
|
/**
|
||||||
* 测试目标ORG是否能连接
|
* 测试目标ORG是否能连接
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
@ -35,7 +38,6 @@ public class DataDumpConnetTests {
|
|||||||
@Test
|
@Test
|
||||||
public void createConnect() throws Exception {
|
public void createConnect() throws Exception {
|
||||||
try {
|
try {
|
||||||
// List<Map<String, Object>> poll = customerMapper.list("code,value", "org_config", null);
|
|
||||||
// //遍历poll,找出code值为TARGET_ORG_URL,TARGET_ORG_USERNAME,TARGET_ORG_PASSWORD的value值
|
// //遍历poll,找出code值为TARGET_ORG_URL,TARGET_ORG_USERNAME,TARGET_ORG_PASSWORD的value值
|
||||||
Map<String, String> map = new HashMap<>();
|
Map<String, String> map = new HashMap<>();
|
||||||
// Map<String, Object> map = new HashMap<>();
|
// Map<String, Object> map = new HashMap<>();
|
||||||
@ -54,9 +56,9 @@ public class DataDumpConnetTests {
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
//遍历poll,找出code值为TARGET_ORG_URL,TARGET_ORG_USERNAME,TARGET_ORG_PASSWORD的value值
|
//遍历poll,找出code值为TARGET_ORG_URL,TARGET_ORG_USERNAME,TARGET_ORG_PASSWORD的value值
|
||||||
map.put("url", "https://momentum-platform-2421.my.salesforce.com/services/Soap/u/56.0");
|
map.put("url", "https://steco-process.my.sfcrmproducts.cn/services/Soap/u/56.0");
|
||||||
map.put("username", "jassi.w@procision.cn");
|
map.put("username", "binxu@steco-process.com");
|
||||||
map.put("password", "ncnoisicorp@888");
|
map.put("password", "AAM0902!");
|
||||||
String username = map.get("username").toString();
|
String username = map.get("username").toString();
|
||||||
ConnectorConfig config = new ConnectorConfig();
|
ConnectorConfig config = new ConnectorConfig();
|
||||||
config.setUsername(username);
|
config.setUsername(username);
|
||||||
@ -68,7 +70,6 @@ public class DataDumpConnetTests {
|
|||||||
config.setReadTimeout(60 * 60 * 1000);
|
config.setReadTimeout(60 * 60 * 1000);
|
||||||
PartnerConnection connection = new PartnerConnection(config);
|
PartnerConnection connection = new PartnerConnection(config);
|
||||||
String orgId = connection.getUserInfo().getOrganizationId();
|
String orgId = connection.getUserInfo().getOrganizationId();
|
||||||
|
|
||||||
} catch (ConnectionException e) {
|
} catch (ConnectionException e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user