Bladeren bron

第一次提交

lsh 2 weken geleden
commit
c941138465
100 gewijzigde bestanden met toevoegingen van 10535 en 0 verwijderingen
  1. 20 0
      .gitignore
  2. 21 0
      LICENSE
  3. 281 0
      README.md
  4. 354 0
      bin/db/datax_web.sql
  5. 273 0
      bin/install.sh
  6. 31 0
      bin/start-all.sh
  7. 84 0
      bin/start.sh
  8. 33 0
      bin/stop-all.sh
  9. 83 0
      bin/stop.sh
  10. 843 0
      datax-admin/pom.xml
  11. 47 0
      datax-admin/src/main/assembly/deploy.xml
  12. 202 0
      datax-admin/src/main/bin/configure.sh
  13. 280 0
      datax-admin/src/main/bin/datax-admin.sh
  14. 21 0
      datax-admin/src/main/bin/env.properties
  15. 38 0
      datax-admin/src/main/java/com/wugui/datax/admin/DataXAdminApplication.java
  16. 44 0
      datax-admin/src/main/java/com/wugui/datax/admin/config/MybatisPlusConfig.java
  17. 73 0
      datax-admin/src/main/java/com/wugui/datax/admin/config/SecurityConfig.java
  18. 269 0
      datax-admin/src/main/java/com/wugui/datax/admin/config/ServiceModelToSwagger2MapperImpl.java
  19. 39 0
      datax-admin/src/main/java/com/wugui/datax/admin/config/SwaggerConfig.java
  20. 24 0
      datax-admin/src/main/java/com/wugui/datax/admin/config/WebConfig.java
  21. 22 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/BaseController.java
  22. 253 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/BaseForm.java
  23. 48 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/DataxJsonController.java
  24. 49 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/IndexController.java
  25. 144 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobApiController.java
  26. 75 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobCodeController.java
  27. 133 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobDatasourceController.java
  28. 172 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobGroupController.java
  29. 129 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobInfoController.java
  30. 173 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobLogController.java
  31. 110 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobProjectController.java
  32. 90 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobRegistryController.java
  33. 84 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/JobTemplateController.java
  34. 104 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/MetadataController.java
  35. 149 0
      datax-admin/src/main/java/com/wugui/datax/admin/controller/UserController.java
  36. 154 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/conf/JobAdminConfig.java
  37. 1657 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/cron/CronExpression.java
  38. 49 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/handler/AESEncryptHandler.java
  39. 36 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/handler/MybatisMetaObjectHandler.java
  40. 40 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/kill/KillJob.java
  41. 48 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/ExecutorRouteStrategyEnum.java
  42. 24 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/ExecutorRouter.java
  43. 47 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteBusyover.java
  44. 85 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteConsistentHash.java
  45. 48 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteFailover.java
  46. 19 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteFirst.java
  47. 79 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteLFU.java
  48. 76 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteLRU.java
  49. 19 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteLast.java
  50. 23 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteRandom.java
  51. 39 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteRound.java
  52. 113 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/scheduler/JobScheduler.java
  53. 209 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobFailMonitorHelper.java
  54. 152 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobLogReportHelper.java
  55. 108 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobRegistryMonitorHelper.java
  56. 352 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobScheduleHelper.java
  57. 133 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobTriggerPoolHelper.java
  58. 256 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/trigger/JobTrigger.java
  59. 26 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/trigger/TriggerTypeEnum.java
  60. 80 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/util/I18nUtil.java
  61. 92 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/util/JacksonUtil.java
  62. 133 0
      datax-admin/src/main/java/com/wugui/datax/admin/core/util/LocalCacheUtil.java
  63. 32 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/DataXBatchJsonBuildDto.java
  64. 46 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/DataXJsonBuildDto.java
  65. 17 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/HbaseReaderDto.java
  66. 17 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/HbaseWriterDto.java
  67. 28 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/HiveReaderDto.java
  68. 29 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/HiveWriterDto.java
  69. 19 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/MongoDBReaderDto.java
  70. 20 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/MongoDBWriterDto.java
  71. 15 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/Range.java
  72. 24 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/RdbmsReaderDto.java
  73. 21 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/RdbmsWriterDto.java
  74. 86 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/TaskScheduleDto.java
  75. 21 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/TriggerJobDto.java
  76. 18 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/UpsertInfo.java
  77. 11 0
      datax-admin/src/main/java/com/wugui/datax/admin/dto/VersionColumn.java
  78. 143 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobDatasource.java
  79. 84 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobGroup.java
  80. 125 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobInfo.java
  81. 66 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobLog.java
  82. 34 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobLogGlue.java
  83. 17 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobLogReport.java
  84. 57 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobPermission.java
  85. 44 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobProject.java
  86. 23 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobRegistry.java
  87. 30 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobRole.java
  88. 92 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobTemplate.java
  89. 78 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JobUser.java
  90. 84 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/JwtUser.java
  91. 15 0
      datax-admin/src/main/java/com/wugui/datax/admin/entity/LoginUser.java
  92. 23 0
      datax-admin/src/main/java/com/wugui/datax/admin/exception/GlobalExceptionHandler.java
  93. 27 0
      datax-admin/src/main/java/com/wugui/datax/admin/exception/TokenIsExpiredException.java
  94. 92 0
      datax-admin/src/main/java/com/wugui/datax/admin/filter/JWTAuthenticationFilter.java
  95. 73 0
      datax-admin/src/main/java/com/wugui/datax/admin/filter/JWTAuthorizationFilter.java
  96. 18 0
      datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobDatasourceMapper.java
  97. 29 0
      datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobGroupMapper.java
  98. 60 0
      datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobInfoMapper.java
  99. 25 0
      datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobLogGlueMapper.java
  100. 0 0
      datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobLogMapper.java

+ 20 - 0
.gitignore

@@ -0,0 +1,20 @@
+
+/datax-executor/target/
+/datax-executor/datax-executor.iml
+/datax-core/datax-core.iml
+/datax-core/target/
+/datax-admin/datax-admin.iml
+/datax-admin/target/
+/.idea/
+/datax-admin/.idea/
+/datax-registry/datax-registry.iml
+/datax-registry/target/
+/datax-rpc/datax-rpc.iml
+/datax-rpc/target/
+/datax-all.iml
+/logs/
+*-dev*
+/data/
+/build/
+/packages/
+/datax-assembly/target/

+ 21 - 0
LICENSE

@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 WeiYe
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 281 - 0
README.md

@@ -0,0 +1,281 @@
+![GitHub contributors](https://img.shields.io/github/contributors/WeiYe-Jing/datax-web)
+![GitHub issues](https://img.shields.io/github/issues/WeiYe-Jing/datax-web)
+![GitHub](https://img.shields.io/github/license/WeiYe-Jing/datax-web)
+![GitHub code size in bytes](https://img.shields.io/github/languages/code-size/WeiYe-Jing/datax-web)
+![](https://img.shields.io/badge/qq%E7%BE%A4-776939467-green.svg)
+
+# DataX-Web
+
+[![Stargazers over time](https://starchart.cc/WeiYe-Jing/datax-web.svg)](https://starchart.cc/WeiYe-Jing/datax-web)
+
+DataX Web是在DataX之上开发的分布式数据同步工具,提供简单易用的
+操作界面,降低用户使用DataX的学习成本,缩短任务配置时间,避免配置过程中出错。用户可通过页面选择数据源即可创建数据同步任务,支持RDBMS、Hive、HBase、ClickHouse、MongoDB等数据源,RDBMS数据源可批量创建数据同步任务,支持实时查看数据同步进度及日志并提供终止同步功能,集成并二次开发xxl-job可根据时间、自增主键增量同步数据。
+
+任务"执行器"支持集群部署,支持执行器多节点路由策略选择,支持超时控制、失败重试、失败告警、任务依赖,执行器CPU.内存.负载的监控等等。后续还将提供更多的数据源支持、数据转换UDF、表结构同步、数据同步血缘等更为复杂的业务场景。
+
+# Architecture diagram:
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/DataX-Web.png)
+
+# System Requirements
+
+- Language: Java 8(jdk版本建议1.8.201以上)<br>
+  Python2.7(支持Python3需要修改替换datax/bin下面的三个python文件,替换文件在doc/datax-web/datax-python3下)
+- Environment: MacOS, Windows,Linux
+- Database: Mysql5.7
+
+
+
+# Features
+
+- 1、通过Web构建DataX Json;
+- 2、DataX Json保存在数据库中,方便任务的迁移,管理;
+- 3、Web实时查看抽取日志,类似Jenkins的日志控制台输出功能;
+- 4、DataX运行记录展示,可页面操作停止DataX作业;
+- 5、支持DataX定时任务,支持动态修改任务状态、启动/停止任务,以及终止运行中任务,即时生效;
+- 6、调度采用中心式设计,支持集群部署;
+- 7、任务分布式执行,任务"执行器"支持集群部署;
+- 8、执行器会周期性自动注册任务, 调度中心将会自动发现注册的任务并触发执行;
+- 9、路由策略:执行器集群部署时提供丰富的路由策略,包括:第一个、最后一个、轮询、随机、一致性HASH、最不经常使用、最近最久未使用、故障转移、忙碌转移等;
+- 10、阻塞处理策略:调度过于密集执行器来不及处理时的处理策略,策略包括:单机串行(默认)、丢弃后续调度、覆盖之前调度;
+- 11、任务超时控制:支持自定义任务超时时间,任务运行超时将会主动中断任务;
+- 12、任务失败重试:支持自定义任务失败重试次数,当任务失败时将会按照预设的失败重试次数主动进行重试;
+- 13、任务失败告警;默认提供邮件方式失败告警,同时预留扩展接口,可方便的扩展短信、钉钉等告警方式;
+- 14、用户管理:支持在线管理系统用户,存在管理员、普通用户两种角色;
+- 15、任务依赖:支持配置子任务依赖,当父任务执行结束且执行成功后将会主动触发一次子任务的执行, 多个子任务用逗号分隔;
+- 16、运行报表:支持实时查看运行数据,以及调度报表,如调度日期分布图,调度成功分布图等;
+- 17、指定增量字段,配置定时任务自动获取每次的数据区间,任务失败重试,保证数据安全;
+- 18、页面可配置DataX启动JVM参数;
+- 19、数据源配置成功后添加手动测试功能;
+- 20、可以对常用任务进行配置模板,在构建完JSON之后可选择关联模板创建任务;
+- 21、jdbc添加hive数据源支持,可在构建JSON页面选择数据源生成column信息并简化配置;
+- 22、优先通过环境变量获取DataX文件目录,集群部署时不用指定JSON及日志目录;
+- 23、通过动态参数配置指定hive分区,也可以配合增量实现增量数据动态插入分区;
+- 24、任务类型由原来DataX任务扩展到Shell任务、Python任务、PowerShell任务;
+- 25、添加HBase数据源支持,JSON构建可通过HBase数据源获取hbaseConfig,column;
+- 26、添加MongoDB数据源支持,用户仅需要选择collectionName即可完成json构建;
+- 27、添加执行器CPU、内存、负载的监控页面;
+- 28、添加24类插件DataX JSON配置样例
+- 29、公共字段(创建时间,创建人,修改时间,修改者)插入或更新时自动填充
+- 30、对swagger接口进行token验证
+- 31、任务增加超时时间,对超时任务kill datax进程,可配合重试策略避免网络问题导致的datax卡死。
+- 32、添加项目管理模块,可对任务分类管理;
+- 33、对RDBMS数据源增加批量任务创建功能,选择数据源,表即可根据模板批量生成DataX同步任务;
+- 34、JSON构建增加ClickHouse数据源支持;
+- 35、执行器CPU.内存.负载的监控页面图形化;
+- 36、RDBMS数据源增量抽取增加主键自增方式并优化页面参数配置;
+- 37、更换MongoDB数据源连接方式,重构HBase数据源JSON构建模块;
+- 38、脚本类型任务增加停止功能;
+- 39、rdbms json构建增加postSql,并支持构建多个preSql,postSql;
+- 40、数据源信息加密算法修改及代码优化;
+- 41、日志页面增加DataX执行结果统计数据;
+
+# Quick Start:
+
+##### 请点击:[Quick Start](https://github.com/WeiYe-Jing/datax-web/blob/master/userGuid.md)
+##### Linux:[一键部署](https://github.com/WeiYe-Jing/datax-web/blob/master/doc/datax-web/datax-web-deploy.md)
+
+
+# Introduction:
+
+### 1.执行器配置(使用开源项目xxl-job)
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/executor.png)
+
+- 1、"调度中心OnLine:"右侧显示在线的"调度中心"列表, 任务执行结束后, 将会以failover的模式进行回调调度中心通知执行结果, 避免回调的单点风险;
+- 2、"执行器列表" 中显示在线的执行器列表, 可通过"OnLine 机器"查看对应执行器的集群机器;
+
+#### 执行器属性说明
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/add_executor.png)
+
+```
+1、AppName: (与datax-executor中application.yml的datax.job.executor.appname保持一致)
+   每个执行器集群的唯一标示AppName, 执行器会周期性以AppName为对象进行自动注册。可通过该配置自动发现注册成功的执行器, 供任务调度时使用;
+2、名称: 执行器的名称, 因为AppName限制字母数字等组成,可读性不强, 名称为了提高执行器的可读性;
+3、排序: 执行器的排序, 系统中需要执行器的地方,如任务新增, 将会按照该排序读取可用的执行器列表;
+4、注册方式:调度中心获取执行器地址的方式;
+    自动注册:执行器自动进行执行器注册,调度中心通过底层注册表可以动态发现执行器机器地址;
+    手动录入:人工手动录入执行器的地址信息,多地址逗号分隔,供调度中心使用;
+5、机器地址:"注册方式"为"手动录入"时有效,支持人工维护执行器的地址信息;
+```
+
+### 2.创建数据源
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/add_datasource.png)
+
+第四步使用
+
+### 3.创建任务模版
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/template_list.png)
+
+第四步使用
+
+
+
+### 4. 构建JSON脚本
+
+- 1.步骤一,步骤二,选择第二步中创建的数据源,JSON构建目前支持的数据源有hive,mysql,oracle,postgresql,sqlserver,hbase,mongodb,clickhouse 其它数据源的JSON构建正在开发中,暂时需要手动编写。
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/build.png)
+
+- 2.字段映射
+
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/mapping.png)
+
+- 3.点击构建,生成json,此时可以选择复制json然后创建任务,选择datax任务,将json粘贴到文本框。也可以点击选择模版,直接生成任务。
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/select_template.png)
+
+### 5.批量创建任务
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/batch_build_r.png)
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/batch_build_w.png)
+
+### 6.任务创建介绍(关联模版创建任务不再介绍,具体参考4. 构建JSON脚本)
+
+#### 支持DataX任务,Shell任务,Python任务,PowerShell任务
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/datax.png)
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/shell.png)
+
+- 阻塞处理策略:调度过于密集执行器来不及处理时的处理策略;
+    - 单机串行:调度请求进入单机执行器后,调度请求进入FIFO队列并以串行方式运行;
+    - 丢弃后续调度:调度请求进入单机执行器后,发现执行器存在运行的调度任务,本次请求将会被丢弃并标记为失败;
+    - 覆盖之前调度:调度请求进入单机执行器后,发现执行器存在运行的调度任务,将会终止运行中的调度任务并清空队列,然后运行本地调度任务;
+- 增量增新建议将阻塞策略设置为丢弃后续调度或者单机串行
+    - 设置单机串行时应该注意合理设置重试次数(失败重试的次数*每次执行时间<任务的调度周期),重试的次数如果设置的过多会导致数据重复,例如任务30秒执行一次,每次执行时间需要20秒,设置重试三次,如果任务失败了,第一个重试的时间段为1577755680-1577756680,重试任务没结束,新任务又开启,那新任务的时间段会是1577755680-1577758680
+
+- [增量参数设置](https://github.com/WeiYe-Jing/datax-web/blob/master/doc/datax-web/increment-desc.md)
+- [分区参数设置](https://github.com/WeiYe-Jing/datax-web/blob/master/doc/datax-web/partition-dynamic-param.md)
+
+### 7. 任务列表
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/job.png)
+
+### 8. 可以点击查看日志,实时获取日志信息,终止正在执行的datax进程
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/log_stat.png)
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/img/log_detail.png)
+
+### 9.任务资源监控
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/img/monitor.png)
+
+### 10. admin可以创建用户,编辑用户信息
+
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/user.png)
+
+
+# UI
+
+[前端github地址](https://github.com/WeiYe-Jing/datax-web-ui)
+
+# 项目成员
+
+- water
+
+```
+非常荣幸成为datax-web的Committer,从早期datax手工编写任务+配置,到datax-web界面化勾选创建任务+配置信息+调度管理,datax-web将数据同步工作的效率提升不少,相信后面后成为etl中不可或缺的生产力……
+```
+
+- Alecor
+
+```
+非常荣幸成为datax-web的Committer,datax-web旨在帮助用户从datax配置中解放出来,提供datax的Web化的管理能力。希望datax-web能为更多有需要的人服务,带来更好的简单、易用的体验!
+```
+
+- zhouhongfa
+
+- liukunyuan
+
+感谢贡献!
+
+# Contributing
+
+Contributions are welcome! Open a pull request to fix a bug, or open an Issue to discuss a new feature or change.
+
+欢迎参与项目贡献!比如提交PR修复一个bug,或者新建 Issue 讨论新特性或者变更。
+
+# Copyright and License
+
+MIT License
+
+Copyright (c) 2020 WeiYe
+
+产品开源免费,并且将持续提供免费的社区技术支持。个人或企业内部可自由的接入和使用。
+
+> 欢迎在 [登记地址](https://github.com/WeiYe-Jing/datax-web/issues/93) 登记,登记仅仅为了产品推广和提升社区开发的动力。
+
+# v-2.1.2
+
+### 新增
+
+1. 添加项目管理模块,可对任务分类管理;
+2. 对RDBMS数据源增加批量任务创建功能,选择数据源,表即可根据模板批量生成DataX同步任务;
+3. JSON构建增加ClickHouse数据源支持;
+4. 执行器CPU.内存.负载的监控页面图形化;
+5. RDBMS数据源增量抽取增加主键自增方式并优化页面参数配置;
+6. 更换MongoDB数据源连接方式,重构HBase数据源JSON构建模块;
+7. 脚本类型任务增加停止功能;
+8. rdbms json构建增加postSql,并支持构建多个preSql,postSql;
+9. 合并datax-registry模块到datax-rpc中;
+10.数据源信息加密算法修改及代码优化;
+11.时间增量同步支持更多时间格式;
+12.日志页面增加DataX执行结果统计数据;
+
+### 升级:
+
+1. PostgreSql,SQLServer,Oracle 数据源JSON构建增加schema name选择;
+2. DataX JSON中的字段名称与数据源关键词一致问题优化;
+3. 任务管理页面按钮展示优化;
+4. 日志管理页面增加任务描述信息;
+5. JSON构建前端form表单不能缓存数据问题修复;
+6. HIVE JSON构建增加头尾选项参数;
+
+### 备注:
+2.1.1版本不建议升级,数据源信息加密方式变更会导致之前已加密的数据源解密失败,任务运行失败。
+如果需要升级请重建数据源,任务。
+
+# v-2.1.1
+
+### 新增
+
+1. 添加HBase数据源支持,JSON构建可通过HBase数据源获取hbaseConfig,column;
+2. 添加MongoDB数据源支持,用户仅需要选择collectionName即可完成json构建;
+3. 添加执行器CPU.内存.负载的监控页面;
+4. 添加24类插件DataX JSON配置样例
+5. 公共字段(创建时间,创建人,修改时间,修改者)插入或更新时自动填充
+6. 对swagger接口进行token验证
+7. 任务增加超时时间,对超时任务kill datax进程,可配合重试策略避免网络问题导致的datax卡死。
+
+### 升级:
+
+1. 数据源管理对用户名和密码进行加密,提高安全性;
+2. 对JSON文件中的用户名密码进行加密,执行DataX任务时解密
+3. 对页面菜单整理,图标升级,提示信息等交互优化;
+4. 日志输出取消项目类名等无关信息,减小文件大小,优化大文件输出,优化页面展示;
+5. logback为从yml中获取日志路径配置
+
+### 修复:
+
+1. 任务日志过大时,查看日志报错,请求超时;
+
+# 提交代码
+[参与贡献](https://github.com/WeiYe-Jing/datax-web/issues/190)
+
+
+# Contact us
+
+### 个人微信
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/WechatIMG2.jpeg?x-oss-process=image/resize,w_230,h_230)
+
+### QQ交流群
+![](https://datax-web.oss-cn-hangzhou.aliyuncs.com/doc/qrcode3.jpeg?x-oss-process=image/resize,w_250,h_300)
+
+
+
+

+ 354 - 0
bin/db/datax_web.sql

@@ -0,0 +1,354 @@
+/*
+ Navicat Premium Data Transfer
+
+ Source Server         : localhost
+ Source Server Type    : MySQL
+ Source Server Version : 50725
+ Source Host           : localhost:3306
+ Source Schema         : datax_web
+
+ Target Server Type    : MySQL
+ Target Server Version : 50725
+ File Encoding         : 65001
+
+ Date: 15/12/2019 22:27:10
+*/
+
+SET NAMES utf8mb4;
+SET FOREIGN_KEY_CHECKS = 0;
+
+-- ----------------------------
+-- Table structure for job_group
+-- ----------------------------
+DROP TABLE IF EXISTS `job_group`;
+CREATE TABLE `job_group`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `app_name` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '执行器AppName',
+  `title` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '执行器名称',
+  `order` int(11) NOT NULL DEFAULT 0 COMMENT '排序',
+  `address_type` tinyint(4) NOT NULL DEFAULT 0 COMMENT '执行器地址类型:0=自动注册、1=手动录入',
+  `address_list` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器地址列表,多地址逗号分隔',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of job_group
+-- ----------------------------
+INSERT INTO `job_group` VALUES (1, 'datax-executor', 'datax执行器', 1, 0, NULL);
+
+-- ----------------------------
+-- Table structure for job_info
+-- ----------------------------
+DROP TABLE IF EXISTS `job_info`;
+CREATE TABLE `job_info`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `job_group` int(11) NOT NULL COMMENT '执行器主键ID',
+  `job_cron` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '任务执行CRON',
+  `job_desc` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
+  `add_time` datetime(0) NULL DEFAULT NULL,
+  `update_time` datetime(0) NULL DEFAULT NULL,
+  `author` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '作者',
+  `alarm_email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '报警邮件',
+  `executor_route_strategy` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器路由策略',
+  `executor_handler` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器任务handler',
+  `executor_param` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器任务参数',
+  `executor_block_strategy` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '阻塞处理策略',
+  `executor_timeout` int(11) NOT NULL DEFAULT 0 COMMENT '任务执行超时时间,单位秒',
+  `executor_fail_retry_count` int(11) NOT NULL DEFAULT 0 COMMENT '失败重试次数',
+  `glue_type` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'GLUE类型',
+  `glue_source` mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL COMMENT 'GLUE源代码',
+  `glue_remark` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'GLUE备注',
+  `glue_updatetime` datetime(0) NULL DEFAULT NULL COMMENT 'GLUE更新时间',
+  `child_jobid` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '子任务ID,多个逗号分隔',
+  `trigger_status` tinyint(4) NOT NULL DEFAULT 0 COMMENT '调度状态:0-停止,1-运行',
+  `trigger_last_time` bigint(13) NOT NULL DEFAULT 0 COMMENT '上次调度时间',
+  `trigger_next_time` bigint(13) NOT NULL DEFAULT 0 COMMENT '下次调度时间',
+  `job_json` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT 'datax运行脚本',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 7 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+
+-- ----------------------------
+-- Table structure for job_jdbc_datasource
+-- ----------------------------
+DROP TABLE IF EXISTS `job_jdbc_datasource`;
+CREATE TABLE `job_jdbc_datasource`  (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '自增主键',
+  `datasource_name` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '数据源名称',
+  `datasource_group` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT 'Default' COMMENT '数据源分组',
+  `jdbc_username` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '用户名',
+  `jdbc_password` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '密码',
+  `jdbc_url` varchar(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'jdbc url',
+  `jdbc_driver_class` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'jdbc驱动类',
+  `status` tinyint(1) NOT NULL DEFAULT 1 COMMENT '状态:0删除 1启用 2禁用',
+  `create_by` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '创建人',
+  `create_date` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间',
+  `update_by` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '更新人',
+  `update_date` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
+  `comments` varchar(1000) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '备注',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 6 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci COMMENT = 'jdbc数据源配置' ROW_FORMAT = Dynamic;
+
+
+-- ----------------------------
+-- Table structure for job_lock
+-- ----------------------------
+DROP TABLE IF EXISTS `job_lock`;
+CREATE TABLE `job_lock`  (
+  `lock_name` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '锁名称',
+  PRIMARY KEY (`lock_name`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of job_lock
+-- ----------------------------
+INSERT INTO `job_lock` VALUES ('schedule_lock');
+
+-- ----------------------------
+-- Table structure for job_log
+-- ----------------------------
+DROP TABLE IF EXISTS `job_log`;
+CREATE TABLE `job_log`  (
+  `id` bigint(20) NOT NULL AUTO_INCREMENT,
+  `job_group` int(11) NOT NULL COMMENT '执行器主键ID',
+  `job_id` int(11) NOT NULL COMMENT '任务,主键ID',
+  `job_desc` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL,
+  `executor_address` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器地址,本次执行的地址',
+  `executor_handler` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器任务handler',
+  `executor_param` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器任务参数',
+  `executor_sharding_param` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器任务分片参数,格式如 1/2',
+  `executor_fail_retry_count` int(11) NULL DEFAULT 0 COMMENT '失败重试次数',
+  `trigger_time` datetime(0) NULL DEFAULT NULL COMMENT '调度-时间',
+  `trigger_code` int(11) NOT NULL COMMENT '调度-结果',
+  `trigger_msg` text CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL COMMENT '调度-日志',
+  `handle_time` datetime(0) NULL DEFAULT NULL COMMENT '执行-时间',
+  `handle_code` int(11) NOT NULL COMMENT '执行-状态',
+  `handle_msg` text CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL COMMENT '执行-日志',
+  `alarm_status` tinyint(4) NOT NULL DEFAULT 0 COMMENT '告警状态:0-默认、1-无需告警、2-告警成功、3-告警失败',
+  `process_id` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'datax进程Id',
+  `max_id` bigint(20) NULL DEFAULT NULL COMMENT '增量表max id',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `I_trigger_time`(`trigger_time`) USING BTREE,
+  INDEX `I_handle_code`(`handle_code`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 0 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for job_log_report
+-- ----------------------------
+DROP TABLE IF EXISTS `job_log_report`;
+CREATE TABLE `job_log_report`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `trigger_day` datetime(0) NULL DEFAULT NULL COMMENT '调度-时间',
+  `running_count` int(11) NOT NULL DEFAULT 0 COMMENT '运行中-日志数量',
+  `suc_count` int(11) NOT NULL DEFAULT 0 COMMENT '执行成功-日志数量',
+  `fail_count` int(11) NOT NULL DEFAULT 0 COMMENT '执行失败-日志数量',
+  PRIMARY KEY (`id`) USING BTREE,
+  UNIQUE INDEX `i_trigger_day`(`trigger_day`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 28 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of job_log_report
+-- ----------------------------
+INSERT INTO `job_log_report` VALUES (20, '2019-12-07 00:00:00', 0, 0, 0);
+INSERT INTO `job_log_report` VALUES (21, '2019-12-10 00:00:00', 77, 52, 23);
+INSERT INTO `job_log_report` VALUES (22, '2019-12-11 00:00:00', 9, 2, 11);
+INSERT INTO `job_log_report` VALUES (23, '2019-12-13 00:00:00', 9, 48, 74);
+INSERT INTO `job_log_report` VALUES (24, '2019-12-12 00:00:00', 10, 8, 30);
+INSERT INTO `job_log_report` VALUES (25, '2019-12-14 00:00:00', 78, 45, 66);
+INSERT INTO `job_log_report` VALUES (26, '2019-12-15 00:00:00', 24, 76, 9);
+INSERT INTO `job_log_report` VALUES (27, '2019-12-16 00:00:00', 23, 85, 10);
+
+-- ----------------------------
+-- Table structure for job_logglue
+-- ----------------------------
+DROP TABLE IF EXISTS `job_logglue`;
+CREATE TABLE `job_logglue`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `job_id` int(11) NOT NULL COMMENT '任务,主键ID',
+  `glue_type` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'GLUE类型',
+  `glue_source` mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL COMMENT 'GLUE源代码',
+  `glue_remark` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'GLUE备注',
+  `add_time` datetime(0) NULL DEFAULT NULL,
+  `update_time` datetime(0) NULL DEFAULT NULL,
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Table structure for job_registry
+-- ----------------------------
+DROP TABLE IF EXISTS `job_registry`;
+CREATE TABLE `job_registry`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `registry_group` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
+  `registry_key` varchar(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
+  `registry_value` varchar(191) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
+  `update_time` datetime(0) NULL DEFAULT NULL,
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `i_g_k_v`(`registry_group`, `registry_key`, `registry_value`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 26 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+
+
+-- ----------------------------
+-- Table structure for job_user
+-- ----------------------------
+DROP TABLE IF EXISTS `job_user`;
+CREATE TABLE `job_user`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `username` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '账号',
+  `password` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '密码',
+  `role` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '角色:0-普通用户、1-管理员',
+  `permission` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '权限:执行器ID列表,多个逗号分割',
+  PRIMARY KEY (`id`) USING BTREE,
+  UNIQUE INDEX `i_username`(`username`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 10 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+-- ----------------------------
+-- Records of job_user
+-- ----------------------------
+INSERT INTO `job_user` VALUES (1, 'admin', '$2a$10$2KCqRbra0Yn2TwvkZxtfLuWuUP5KyCWsljO/ci5pLD27pqR3TV1vy', 'ROLE_ADMIN', NULL);
+
+
+
+/**
+v2.1.1脚本更新
+*/
+ALTER TABLE `job_info`
+ADD COLUMN `replace_param` VARCHAR(100) NULL DEFAULT NULL COMMENT '动态参数' AFTER `job_json`,
+ADD COLUMN `jvm_param` VARCHAR(200) NULL DEFAULT NULL COMMENT 'jvm参数' AFTER `replace_param`,
+ADD COLUMN `time_offset` INT(11) NULL DEFAULT '0'COMMENT '时间偏移量'  AFTER `jvm_param`;
+/**
+增量改版脚本更新
+ */
+ALTER TABLE `job_info` DROP COLUMN `time_offset`;
+ALTER TABLE `job_info`
+ADD COLUMN `inc_start_time` DATETIME NULL DEFAULT NULL COMMENT '增量初始时间' AFTER `jvm_param`;
+
+-- ----------------------------
+-- Table structure for job_template
+-- ----------------------------
+DROP TABLE IF EXISTS `job_template`;
+CREATE TABLE `job_template`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `job_group` int(11) NOT NULL COMMENT '执行器主键ID',
+  `job_cron` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '任务执行CRON',
+  `job_desc` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
+  `add_time` datetime(0) NULL DEFAULT NULL,
+  `update_time` datetime(0) NULL DEFAULT NULL,
+  `user_id` int(11) NOT NULL COMMENT '修改用户',
+  `alarm_email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '报警邮件',
+  `executor_route_strategy` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器路由策略',
+  `executor_handler` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器任务handler',
+  `executor_param` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '执行器参数',
+  `executor_block_strategy` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '阻塞处理策略',
+  `executor_timeout` int(11) NOT NULL DEFAULT 0 COMMENT '任务执行超时时间,单位秒',
+  `executor_fail_retry_count` int(11) NOT NULL DEFAULT 0 COMMENT '失败重试次数',
+  `glue_type` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'GLUE类型',
+  `glue_source` mediumtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL COMMENT 'GLUE源代码',
+  `glue_remark` varchar(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'GLUE备注',
+  `glue_updatetime` datetime(0) NULL DEFAULT NULL COMMENT 'GLUE更新时间',
+  `child_jobid` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '子任务ID,多个逗号分隔',
+  `trigger_last_time` bigint(13) NOT NULL DEFAULT 0 COMMENT '上次调度时间',
+  `trigger_next_time` bigint(13) NOT NULL DEFAULT 0 COMMENT '下次调度时间',
+  `job_json` text CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT 'datax运行脚本',
+  `jvm_param` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'jvm参数',
+  `project_id` int(11) NULL DEFAULT NULL COMMENT '所属项目Id',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 22 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+/**
+添加数据源字段
+ */
+ALTER TABLE `job_jdbc_datasource`
+ADD COLUMN `datasource` VARCHAR(45) NOT NULL COMMENT '数据源' AFTER `datasource_name`;
+
+/**
+添加分区字段
+ */
+ALTER TABLE `job_info`
+ADD COLUMN `partition_info` VARCHAR(100) NULL DEFAULT NULL COMMENT '分区信息' AFTER `inc_start_time`;
+
+/**
+2.1.1版本新增----------------------------------------------------------------------------------------------
+ */
+
+/**
+最近一次执行状态
+ */
+ALTER TABLE `job_info`
+ADD COLUMN `last_handle_code` INT(11) NULL DEFAULT '0' COMMENT '最近一次执行状态' AFTER `partition_info`;
+
+/**
+zookeeper地址
+ */
+ALTER TABLE `job_jdbc_datasource`
+ADD COLUMN `zk_adress` VARCHAR(200) NULL DEFAULT NULL AFTER `jdbc_driver_class`;
+
+ALTER TABLE `job_info`
+CHANGE COLUMN `executor_timeout` `executor_timeout` INT(11) NOT NULL DEFAULT '0' COMMENT '任务执行超时时间,单位分钟' ;
+
+/**
+用户名密码改为非必填
+ */
+ALTER TABLE `job_jdbc_datasource`
+CHANGE COLUMN `jdbc_username` `jdbc_username` VARCHAR(100) CHARACTER SET 'utf8mb4' NULL DEFAULT NULL COMMENT '用户名' ,
+CHANGE COLUMN `jdbc_password` `jdbc_password` VARCHAR(200) CHARACTER SET 'utf8mb4' NULL DEFAULT NULL COMMENT '密码' ;
+/**
+添加mongodb数据库名字段
+ */
+ALTER TABLE `job_jdbc_datasource`
+ADD COLUMN `database_name` VARCHAR(45) NULL DEFAULT NULL COMMENT '数据库名' AFTER `datasource_group`;
+/**
+添加执行器资源字段
+ */
+ALTER TABLE `job_registry`
+ADD COLUMN `cpu_usage` DOUBLE NULL AFTER `registry_value`,
+ADD COLUMN `memory_usage` DOUBLE NULL AFTER `cpu_usage`,
+ADD COLUMN `load_average` DOUBLE NULL AFTER `memory_usage`;
+
+-- ----------------------------
+-- Table structure for job_permission
+-- ----------------------------
+DROP TABLE IF EXISTS `job_permission`;
+CREATE TABLE `job_permission`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键',
+  `name` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT '权限名',
+  `description` varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT '权限描述',
+  `url` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL,
+  `pid` int(11) NULL DEFAULT NULL,
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+
+
+ALTER TABLE `job_info`
+ADD COLUMN `replace_param_type` varchar(255) NULL COMMENT '增量时间格式' AFTER `last_handle_code`;
+
+
+ALTER TABLE `job_info`
+ADD COLUMN `project_id` int(11) NULL COMMENT '所属项目id' AFTER `job_desc`;
+
+ALTER TABLE `job_info`
+ADD COLUMN `reader_table` VARCHAR(255) NULL COMMENT 'reader表名称' AFTER `replace_param_type`,
+ADD COLUMN `primary_key` VARCHAR(50) NULL COMMENT '增量表主键' AFTER `reader_table`,
+ADD COLUMN `inc_start_id` VARCHAR(20) NULL COMMENT '增量初始id' AFTER `primary_key`,
+ADD COLUMN `increment_type` TINYINT(4) NULL COMMENT '增量类型' AFTER `inc_start_id`,
+ADD COLUMN `datasource_id` BIGINT(11) NULL COMMENT '数据源id' AFTER `increment_type`;
+
+CREATE TABLE `job_project`  (
+  `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key',
+  `name` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL COMMENT 'project name',
+  `description` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NULL DEFAULT NULL,
+  `user_id` int(11) NULL DEFAULT NULL COMMENT 'creator id',
+  `flag` tinyint(4) NULL DEFAULT 1 COMMENT '0 not available, 1 available',
+  `create_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time',
+  `update_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'update time',
+  PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
+
+
+ALTER TABLE `job_info`
+CHANGE COLUMN `author` `user_id` INT(11) NOT NULL COMMENT '修改用户' ;
+
+ALTER TABLE `job_info`
+CHANGE COLUMN `increment_type` `increment_type` TINYINT(4) NULL DEFAULT 0 COMMENT '增量类型' ;

+ 273 - 0
bin/install.sh

@@ -0,0 +1,273 @@
+#!/bin/bash
+
+
+export BASE_LOG_DIR=""
+export BASE_CONF_DIR=""
+export BASE_DATA_DIR=""
+DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+SHELL_LOG="${DIR}/console.out"
+
+export SQL_SOURCE_PATH="${DIR}/db/datax_web.sql"
+
+MODULES_DIR="${DIR}/../modules"
+PACKAGE_DIR="${DIR}/../packages"
+
+MODULE_LIST=()
+CONF_FILE_PATH="bin/configure.sh"
+FORCE_INSTALL=false
+SKIP_PACKAGE=false
+SAFE_MODE=true
+UNSAFE_COMMAND=""
+USER=`whoami`
+SUDO_USER=false
+
+usage(){
+   printf "\033[1m Install usage:\n\033[0m"
+   printf "\t%-15s  %-15s  %-2s \n" "-m|--modules" "modules to install" "Define the modules to install"
+   printf "\t%-15s  %-15s  %-2s \n" "-f|--force" "force install" "Force program to install modules"
+   printf "\t%-15s  %-15s  %-2s \n" "--skip-pack" "do not decompress" "Skip the phrase of decompressing packages"
+   printf "\t%-15s  %-15s  %-2s \n" "--unsafe" "unsafe mode" "Will clean the module directory existed"
+   printf "\t%-15s  %-15s  %-2s \n" "--safe" "safe mode" "Will not modify the module directory existed (Default)"
+   printf "\t%-15s  %-15s  %-2s \n" "-h|--help" "usage" "View command list"
+}
+
+function LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+is_sudo_user(){
+  sudo -v >/dev/null 2>&1
+}
+
+clean_modules(){
+  if [ ${#MODULE_LIST[@]} -gt 0 ]; then
+    for server in ${MODULE_LIST[@]}
+    do
+      rm -rf ${MODULES_DIR}/${server}
+    done
+  else
+    rm -rf ${MODULES_DIR}/*
+  fi
+}
+
+uncompress_packages(){
+  local list=`ls ${PACKAGE_DIR}`
+  for pack in ${list}
+  do
+    local uncompress=true
+    if [ ${#PACKAGE_NAMES[@]} -gt 0 ]; then
+      uncompress=false
+      for server in ${PACKAGE_NAMES[@]}
+      do
+        if [ ${server} == ${pack%%.tar.gz*} ] || [ ${server} == ${pack%%.zip*} ]; then
+          uncompress=true
+          break
+        fi
+      done
+    fi
+    if [ ${uncompress} == true ]; then
+      if [[ ${pack} =~ tar\.gz$ ]]; then
+        local do_uncompress=0
+        if [ ${FORCE_INSTALL} == false ]; then
+          interact_echo "Do you want to decompress this package: [${pack}]?"
+          do_uncompress=$?
+        fi
+        if [ ${do_uncompress} == 0 ]; then
+          LOG INFO "\033[1m Uncompress package: [${pack}] to modules directory\033[0m"
+          tar --skip-old-files -zxf ${PACKAGE_DIR}/${pack} -C ${MODULES_DIR}
+        fi
+      elif [[ ${pack} =~ zip$ ]]; then
+        local do_uncompress=0
+        if [ ${FORCE_INSTALL} == false ]; then
+          interact_echo "Do you want to decompress this package: [${pack}]?"
+          do_uncompress=$?
+        fi
+        if [ ${do_uncompress} == 0 ]; then
+          LOG INFO "\033[1m Uncompress package: [${pack}] to modules directory\033[0m"
+          unzip -nq ${PACKAGE_DIR}/${pack} -d ${MODULES_DIR}
+        fi
+      fi
+      # skip other packages
+    fi
+  done
+}
+
+interact_echo(){
+  while [ 1 ]; do
+    read -p "$1 (Y/N)" yn
+    if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
+      return 0
+    elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
+      return 1
+    else
+      echo "Unknown choise: [$yn], please choose again."
+    fi
+  done
+}
+
+install_modules(){
+  LOG INFO "\033[1m ####### Start To Install Modules ######\033[0m"
+  LOG INFO "Module servers could be installed:"
+  for server in ${MODULE_LIST[@]}
+  do
+    printf "\\033[1m [${server}] \033[0m"
+  done
+  echo ""
+  for server in ${MODULE_LIST[@]}
+  do
+    if [ ${FORCE_INSTALL} == false ]; then
+      interact_echo "Do you want to confiugre and install [${server}]?"
+      if [ $? == 0 ]; then
+        LOG INFO "\033[1m Install module server: [${server}]\033[0m"
+        # Call configure.sh
+        ${MODULES_DIR}/${server}/${CONF_FILE_PATH} ${UNSAFE_COMMAND}
+      fi
+    else
+      LOG INFO "\033[1m Install module server: [${server}]\033[0m"
+      # Call configure.sh
+      ${MODULES_DIR}/${server}/${CONF_FILE_PATH} ${UNSAFE_COMMAND}
+    fi
+  done
+  LOG INFO "\033[1m ####### Finish To Install Modules ######\033[0m"
+}
+
+scan_to_install_modules(){
+  echo "Scan modules directory: [$1] to find server under dataxweb"
+  let c=0
+  ls_out=`ls $1`
+  for dir in ${ls_out}
+  do
+    if test -e "$1/${dir}/${CONF_FILE_PATH}"; then
+      MODULE_LIST[$c]=${dir}
+      ((c++))
+    fi
+  done
+  install_modules
+}
+
+while [ 1 ]; do
+  case ${!OPTIND} in
+  -h|--help)
+    usage
+    exit 0
+  ;;
+  -m|--modules)
+    i=1
+    if [ -z $2 ]; then
+      echo "Empty modules"
+      exit 1
+    fi
+    while [ 1 ]; do
+     split=`echo $2|cut -d "," -f${i}`
+      if [ "$split" != "" ];
+      then
+        c=$(($i - 1))
+        MODULE_LIST[$c]=${split}
+        i=$(($i + 1))
+      else
+        break
+      fi
+      if [ "`echo $2 |grep ","`" == "" ]; then
+        break
+      fi
+    done
+    shift 2
+  ;;
+  -f|--force)
+    FORCE_INSTALL=true
+    shift 1
+  ;;
+  --skip-pack)
+    SKIP_PACKAGE=true
+    shift 1
+  ;;
+  --safe)
+    SAFE_MODE=true
+    UNSAFE_COMMAND=""
+    shift 1
+  ;;
+  --unsafe)
+    SAFE_MODE=false
+    UNSAFE_COMMAND="--unsafe"
+    shift 1
+  ;;
+  "")
+    break
+  ;;
+  *)
+    echo "Argument error! " 1>&2
+    exit 1
+  ;;
+  esac
+done
+
+is_sudo_user
+if [ $? == 0 ]; then
+  SUDO_USER=true
+fi
+MODULE_LIST_RESOLVED=()
+if [ ${#MODULE_LIST[@]} -gt 0 ]; then
+  c=0
+  RESOLVED_DIR=${PACKAGE_DIR}
+  if [ ${SKIP_PACKAGE} == true ]; then
+    RESOLVED_DIR=${MODULES_DIR}
+  fi
+  for server in ${MODULE_LIST[@]}
+  do
+    server_list=`ls ${RESOLVED_DIR} | grep -E "^(${server}|${server}_[0-9]+\\.[0-9]+\\.[0-9]+\\.RELEASE_[0-9]+)(\\.tar\\.gz|\\.zip|)$"`
+    for _server in ${server_list}
+    do
+      # More better method to cut string?
+      _server=${_server%%.tar.gz*}
+      _server=${_server%%zip*}
+      MODULE_LIST_RESOLVED[$c]=${_server}
+      c=$(($c + 1))
+    done
+  done
+  if [ ${SKIP_PACKAGE} == true ]; then
+    MODULE_LIST=${MODULE_LIST_RESOLVED}
+  else
+    PACKAGE_NAMES=${MODULE_LIST_RESOLVED}
+  fi
+fi
+
+if [ ! -d ${MODULES_DIR} ]; then
+  LOG INFO  "Creating directory: ["${MODULES_DIR}"]."
+  mkdir -p ${MODULES_DIR}
+fi
+
+if [ ${SAFE_MODE} == false ]; then
+  LOG INFO  "\033[1m ####### Start To Clean Modules Directory ######\033[0m"
+  LOG INFO  "Cleanning...."
+  if [ ${MODULES_DIR} == "" ] || [ ${MODULES_DIR} == "/" ]; then
+    LOG INFO  "Illegal modules directory: ${MODULES_DIR}" 1>&2
+    exit 1
+  fi
+  clean_modules
+  LOG INFO "\033[1m ####### Finish To Clean Modules Directory ######\033[0m"
+fi
+
+if [ ${SKIP_PACKAGE} == false ]; then
+  LOG INFO  "\033[1m ####### Start To Uncompress Packages ######\033[0m"
+  LOG INFO  "Uncompressing...."
+  uncompress_packages
+  LOG INFO  "\033[1m ####### Finish To Umcompress Packages ######\033[0m"
+fi
+
+if [ ${#MODULE_LIST[@]} -gt 0 ]; then
+  for server in ${MODULE_LIST}
+  do
+    if [ ! -f ${MODULES_DIR}/${server}/${CONF_FILE_PATH} ]; then
+      LOG INFO  "Module [${server}] defined doesn't have configure.sh shell" 1>&2
+      exit 1
+    fi
+  done
+  install_modules
+else
+  # Scan modules directory
+  scan_to_install_modules ${MODULES_DIR}
+fi
+
+exit 0
+

+ 31 - 0
bin/start-all.sh

@@ -0,0 +1,31 @@
+#!/bin/bash
+
+START_MODULES=("datax-admin"  "datax-executor" )
+
+function LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+abs_path(){
+    SOURCE="${BASH_SOURCE[0]}"
+    while [ -h "${SOURCE}" ]; do
+        DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+        SOURCE="$(readlink "${SOURCE}")"
+        [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
+    done
+    echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+}
+
+BIN=`abs_path`
+SHELL_LOG="${BIN}/console.out"
+
+LOG INFO "\033[1m Try To Start Modules In Order \033[0m"
+for module in ${START_MODULES[@]}
+do
+  ${BIN}/start.sh -m ${module}
+  if [ $? != 0 ]; then
+    LOG ERROR "\033[1m Start Modules [${module}] Failed! \033[0m"
+    exit 1
+  fi
+done

+ 84 - 0
bin/start.sh

@@ -0,0 +1,84 @@
+#!/bin/bash
+
+
+MODULE_NAME=""
+MODULE_DEFAULT_PREFIX="datax-"
+
+usage(){
+  echo "Usage is [-m module will be started]"
+}
+
+function LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+abs_path(){
+    SOURCE="${BASH_SOURCE[0]}"
+    while [ -h "${SOURCE}" ]; do
+        DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+        SOURCE="$(readlink "${SOURCE}")"
+        [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
+    done
+    echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+}
+
+BIN=`abs_path`
+MODULE_DIR=${BIN}/../modules
+SHELL_LOG="${BIN}/console.out"
+
+interact_echo(){
+  while [ 1 ]; do
+    read -p "$1 (Y/N)" yn
+    if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
+      return 0
+    elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
+      return 1
+    else
+      echo "Unknown choise: [$yn], please choose again."
+    fi
+  done
+}
+
+start_single_module(){
+  LOG INFO "\033[1m ####### Begin To Start Module: [$1] ######\033[0m"
+  if [ -f "${MODULE_DIR}/$1/bin/$1.sh" ]; then
+    ${MODULE_DIR}/$1/bin/$1.sh start
+  elif [[ $1 != ${MODULE_DEFAULT_PREFIX}* ]] && [ -f "${MODULE_DIR}/${MODULE_DEFAULT_PREFIX}$1/bin/${MODULE_DEFAULT_PREFIX}$1.sh" ]; then
+    interact_echo "Do you mean [${MODULE_DEFAULT_PREFIX}$1] ?"
+    if [ $? == 0 ]; then
+      ${MODULE_DIR}/${MODULE_DEFAULT_PREFIX}$1/bin/${MODULE_DEFAULT_PREFIX}$1.sh start
+    fi
+  else
+    LOG ERROR "Cannot find the startup script for module: [$1], please check your installation"
+    exit 1
+  fi
+}
+
+while [ 1 ]; do
+  case ${!OPTIND} in
+  -m|--modules)
+    if [ -z $2 ]; then
+      LOG ERROR "No module provided"
+      exit 1
+    fi
+    MODULE_NAME=$2
+    shift 2
+  ;;
+  "")
+    break
+  ;;
+  *)
+    usage
+    exit 1
+  ;;
+  esac
+done
+
+if [ "x${MODULE_NAME}" == "x" ]; then
+  usage
+  exit 1
+fi
+
+start_single_module ${MODULE_NAME}
+exit $?

+ 33 - 0
bin/stop-all.sh

@@ -0,0 +1,33 @@
+#!/bin/bash
+
+
+STOP_MODULES=("datax-admin"  "datax-executor" )
+
+
+function LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+abs_path(){
+    SOURCE="${BASH_SOURCE[0]}"
+    while [ -h "${SOURCE}" ]; do
+        DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+        SOURCE="$(readlink "${SOURCE}")"
+        [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
+    done
+    echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+}
+
+BIN=`abs_path`
+SHELL_LOG="${BIN}/console.out"
+
+LOG INFO "\033[1m Try to Stop Modules In Order \033[0m"
+for module in ${STOP_MODULES[@]}
+do
+  ${BIN}/stop.sh -m ${module}
+  if [ $? != 0 ]; then
+    LOG ERROR "\033[1m Stop Modules [${module}] Failed! \033[0m"
+    exit 1
+  fi
+done

+ 83 - 0
bin/stop.sh

@@ -0,0 +1,83 @@
+#!/bin/bash
+
+
+MODULE_NAME=""
+MODULE_DEFAULT_PREFIX="datax-"
+usage(){
+  echo "Usage is [-m module will be stoped]"
+}
+
+function LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+abs_path(){
+    SOURCE="${BASH_SOURCE[0]}"
+    while [ -h "${SOURCE}" ]; do
+        DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+        SOURCE="$(readlink "${SOURCE}")"
+        [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
+    done
+    echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+}
+
+BIN=`abs_path`
+MODULE_DIR=${BIN}/../modules
+SHELL_LOG="${BIN}/console.out"
+
+interact_echo(){
+  while [ 1 ]; do
+    read -p "$1 (Y/N)" yn
+    if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
+      return 0
+    elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
+      return 1
+    else
+      echo "Unknown choise: [$yn], please choose again."
+    fi
+  done
+}
+
+stop_single_module(){
+  LOG INFO "\033[1m ####### Begin To Stop Module: [$1] ######\033[0m"
+  if [ -f "${MODULE_DIR}/$1/bin/$1.sh" ]; then
+    ${MODULE_DIR}/$1/bin/$1.sh stop
+  elif [[ $1 != ${MODULE_DEFAULT_PREFIX}* ]] && [ -f "${MODULE_DIR}/${MODULE_DEFAULT_PREFIX}$1/bin/${MODULE_DEFAULT_PREFIX}$1.sh" ]; then
+    interact_echo "Do you mean [${MODULE_DEFAULT_PREFIX}$1] ?"
+    if [ $? == 0 ]; then
+      ${MODULE_DIR}/${MODULE_DEFAULT_PREFIX}$1/bin/${MODULE_DEFAULT_PREFIX}$1.sh stop
+    fi
+  else
+    LOG ERROR  "Cannot find the stop script for module: [$1], please check your installation"
+    exit 1
+  fi
+}
+
+while [ 1 ]; do
+  case ${!OPTIND} in
+  -m|--modules)
+    if [ -z $2 ]; then
+      LOG ERROR "No module provided"
+      exit 1
+    fi
+    MODULE_NAME=$2
+    shift 2
+  ;;
+  "")
+    break
+  ;;
+  *)
+    usage
+    exit 1
+  ;;
+  esac
+done
+
+if [ "x${MODULE_NAME}" == "x" ]; then
+  usage
+  exit 1
+fi
+
+stop_single_module ${MODULE_NAME}
+exit $?

+ 843 - 0
datax-admin/pom.xml

@@ -0,0 +1,843 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.wugui</groupId>
+        <artifactId>datax-web</artifactId>
+        <version>2.1.2</version>
+    </parent>
+    <artifactId>datax-admin</artifactId>
+    <packaging>jar</packaging>
+    <properties>
+        <java.version>1.8</java.version>
+    </properties>
+
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-starter-parent</artifactId>
+                <version>${spring-boot.version}</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
+    <dependencies>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+            <exclusions>
+                <exclusion>
+                    <artifactId>logback-classic</artifactId>
+                    <groupId>ch.qos.logback</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <!-- 引入该 spring-cloud-context.jar 使 bootstrap.properties 配置文件即可生效 -->
+        <dependency>
+            <groupId>org.springframework.cloud</groupId>
+            <artifactId>spring-cloud-context</artifactId>
+            <version>${spring-boot.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-security</artifactId>
+        </dependency>
+
+        <!-- starter-actuator -->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-actuator</artifactId>
+        </dependency>
+
+        <!-- Mybatis Plus -->
+        <dependency>
+            <groupId>com.baomidou</groupId>
+            <artifactId>mybatis-plus-boot-starter</artifactId>
+            <version>${mybatisplus.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.baomidou</groupId>
+                    <artifactId>mybatis-plus-generator</artifactId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>com.baomidou</groupId>
+            <artifactId>mybatis-plus</artifactId>
+            <version>${mybatisplus.version}</version>
+        </dependency>
+        <!-- 接口管理 -->
+        <dependency>
+            <groupId>io.springfox</groupId>
+            <artifactId>springfox-swagger2</artifactId>
+            <version>${swagger.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>io.swagger</groupId>
+                    <artifactId>swagger-models</artifactId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>io.swagger</groupId>
+            <artifactId>swagger-models</artifactId>
+            <version>${swagger-models.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>swagger-annotations</artifactId>
+                    <groupId>io.swagger</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>com.github.xiaoymin</groupId>
+            <artifactId>swagger-bootstrap-ui</artifactId>
+            <version>${swagger-bootstrap-ui.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <optional>true</optional>
+        </dependency>
+
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+            <version>${fastjson.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.postgresql</groupId>
+            <artifactId>postgresql</artifactId>
+            <version>${postgresql.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.oracle</groupId>
+            <artifactId>ojdbc6</artifactId>
+            <version>11.2.0.3</version>
+            <scope>system</scope>
+            <systemPath>${basedir}/src/main/lib/ojdbc6-11.2.0.3.jar</systemPath>
+        </dependency>
+
+        <dependency>
+            <groupId>com.microsoft.sqlserver</groupId>
+            <artifactId>sqljdbc4</artifactId>
+            <version>4.0</version>
+            <scope>system</scope>
+            <systemPath>${basedir}/src/main/lib/sqljdbc4-4.0.jar</systemPath>
+        </dependency>
+
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>${slf4j-api.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback-classic.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+            <version>${mysql-connector.version}</version>
+        </dependency>
+
+        <!-- datax-core -->
+        <dependency>
+            <groupId>com.wugui</groupId>
+            <artifactId>datax-core</artifactId>
+            <version>${project.parent.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>${junit.version}</version>
+        </dependency>
+        <!-- mail-starter -->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-mail</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>io.jsonwebtoken</groupId>
+            <artifactId>jjwt</artifactId>
+            <version>${jjwt.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>29.0-jre</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${hadoop.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-log4j12</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>com.sun.jersey</artifactId>
+                    <groupId>jersey-json</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jsr305</artifactId>
+                    <groupId>com.google.code.findbugs</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jettison</artifactId>
+                    <groupId>org.codehaus.jettison</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-core-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-mapper-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-cli</artifactId>
+                    <groupId>commons-cli</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-logging</artifactId>
+                    <groupId>commons-logging</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-collections</artifactId>
+                    <groupId>commons-collections</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-lang</artifactId>
+                    <groupId>commons-lang</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>curator-framework</artifactId>
+                    <groupId>org.apache.curator</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j</artifactId>
+                    <groupId>log4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>netty</artifactId>
+                    <groupId>io.netty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jsp-api</artifactId>
+                    <groupId>javax.servlet.jsp</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-util</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-jdbc</artifactId>
+            <version>${hive.jdbc.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>jsr305</artifactId>
+                    <groupId>com.google.code.findbugs</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jettison</artifactId>
+                    <groupId>org.codehaus.jettison</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-cli</artifactId>
+                    <groupId>commons-cli</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>curator-client</artifactId>
+                    <groupId>org.apache.curator</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-compress</artifactId>
+                    <groupId>org.apache.commons</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-common</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-hdfs</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>snappy</artifactId>
+                    <groupId>org.iq80.snappy</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>antlr-runtime</artifactId>
+                    <groupId>org.antlr</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-client</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>libthrift</artifactId>
+                    <groupId>org.apache.thrift</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>twill-common</artifactId>
+                    <groupId>org.apache.twill</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>twill-core</artifactId>
+                    <groupId>org.apache.twill</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>twill-discovery-api</artifactId>
+                    <groupId>org.apache.twill</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>twill-discovery-core</artifactId>
+                    <groupId>org.apache.twill</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>twill-zookeeper</artifactId>
+                    <groupId>org.apache.twill</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>avro</artifactId>
+                    <groupId>org.apache.avro</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>curator-recipes</artifactId>
+                    <groupId>org.apache.curator</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-common</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-hadoop-compat</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-hadoop2-compat</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-server</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>curator-framework</artifactId>
+                    <groupId>org.apache.curator</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guice-servlet</artifactId>
+                    <groupId>com.google.inject.extensions</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-client</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-yarn-api</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-yarn-common</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-core-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-mapper-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-jaxrs</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-xc</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jersey-client</artifactId>
+                    <groupId>com.sun.jersey</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jamon-runtime</artifactId>
+                    <groupId>org.jamon</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-logging</artifactId>
+                    <groupId>commons-logging</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-annotations</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-collections</artifactId>
+                    <groupId>commons-collections</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jersey-guice</artifactId>
+                    <groupId>com.sun.jersey.contribs</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-slf4j-impl</artifactId>
+                    <groupId>org.apache.logging.log4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hive-shims-common</artifactId>
+                    <groupId>org.apache.hive.shims</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>javax.servlet</artifactId>
+                    <groupId>org.eclipse.jetty.orbit</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jsp-api</artifactId>
+                    <groupId>javax.servlet.jsp</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jasper-compiler</artifactId>
+                    <groupId>tomcat</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-all</artifactId>
+                    <groupId>org.eclipse.jetty.aggregate</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-util</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${hadoop.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-cli</artifactId>
+                    <groupId>commons-cli</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-core-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-mapper-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-util</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${hbase.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-logging</artifactId>
+                    <groupId>commons-logging</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-auth</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-common</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-mapreduce-client-core</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-annotations</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-protocol</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jackson-mapper-asl</artifactId>
+                    <groupId>org.codehaus.jackson</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-util</artifactId>
+                    <groupId>org.mortbay.jetty</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.phoenix</groupId>
+            <artifactId>phoenix-core</artifactId>
+            <version>${phoenix.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-log4j12</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-cli</artifactId>
+                    <groupId>commons-cli</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-common</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>avro</artifactId>
+                    <groupId>org.apache.avro</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guice</artifactId>
+                    <groupId>com.google.inject</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-yarn-api</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-auth</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jersey-core</artifactId>
+                    <groupId>com.sun.jersey</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>guice-servlet</artifactId>
+                    <groupId>com.google.inject.extensions</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jersey-server</artifactId>
+                    <groupId>com.sun.jersey</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jersey-json</artifactId>
+                    <groupId>com.sun.jersey</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>netty</artifactId>
+                    <groupId>io.netty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-client</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-io</artifactId>
+                    <groupId>commons-io</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-mapreduce-client-core</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>zookeeper</artifactId>
+                    <groupId>org.apache.zookeeper</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-math3</artifactId>
+                    <groupId>org.apache.commons</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-annotations</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-hdfs</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-yarn-client</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-yarn-common</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-yarn-server-common</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>javax.ws.rs-api</artifactId>
+                    <groupId>javax.ws.rs</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>htrace-core</artifactId>
+                    <groupId>org.apache.htrace</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jline</artifactId>
+                    <groupId>jline</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>fastutil</artifactId>
+                    <groupId>it.unimi.dsi</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>commons-lang</artifactId>
+                    <groupId>commons-lang</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jsr305</artifactId>
+                    <groupId>com.google.code.findbugs</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-common</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>javax.servlet.jsp-api</artifactId>
+                    <groupId>javax.servlet.jsp</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hbase-server</artifactId>
+                    <groupId>org.apache.hbase</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>javax.servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-io</artifactId>
+                    <groupId>org.eclipse.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-http</artifactId>
+                    <groupId>org.eclipse.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-security</artifactId>
+                    <groupId>org.eclipse.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-server</artifactId>
+                    <groupId>org.eclipse.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-servlet</artifactId>
+                    <groupId>org.eclipse.jetty</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jetty-webapp</artifactId>
+                    <groupId>org.eclipse.jetty</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.mongodb</groupId>
+            <artifactId>mongo-java-driver</artifactId>
+            <version>${mongo-java-driver.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>ru.yandex.clickhouse</groupId>
+            <artifactId>clickhouse-jdbc</artifactId>
+            <version>0.2.4</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>guava</artifactId>
+                    <groupId>com.google.guava</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <configuration>
+                    <excludes>
+                        <exclude>**/*.yml</exclude>
+                        <exclude>**/*.properties</exclude>
+                        <exclude>**/*.sh</exclude>
+                        <exclude>**/*.xml</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>exec-maven-plugin</artifactId>
+                <version>1.4.0</version>
+                <executions>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.2.1</version>
+                <executions>
+                    <execution>
+                        <id>assemble</id>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                        <!-- install -->
+                        <phase>install</phase>
+                    </execution>
+                </executions>
+                <configuration>
+                    <appendAssemblyId>false</appendAssemblyId>
+                    <attach>false</attach>
+                    <descriptors>
+                        <descriptor>${basedir}/src/main/assembly/deploy.xml</descriptor>
+                    </descriptors>
+                    <finalName>${project.artifactId}_${project.version}_1</finalName>
+                    <outputDirectory>${project.parent.basedir}/packages</outputDirectory>
+                </configuration>
+            </plugin>
+        </plugins>
+
+    </build>
+
+
+</project>

+ 47 - 0
datax-admin/src/main/assembly/deploy.xml

@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+    <id>dist</id>
+    <formats>
+        <format>tar.gz</format>
+    </formats>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <dependencySets>
+        <dependencySet>
+            <useProjectArtifact>true</useProjectArtifact>
+            <outputDirectory>datax-admin/lib</outputDirectory>
+            <scope>runtime</scope>
+        </dependencySet>
+    </dependencySets>
+    <fileSets>
+        <fileSet>
+            <lineEnding>unix</lineEnding>
+            <directory>./src/main/bin</directory>
+            <outputDirectory>datax-admin/bin</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+            <fileMode>0755</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>./src/main/logs</directory>
+            <outputDirectory>datax-admin/logs</outputDirectory>
+        </fileSet>
+        <fileSet>
+            <directory>./src/main/resources</directory>
+            <includes>
+                <include>*.properties</include>
+                <include>logback.xml</include>
+                <include>application.yml</include>
+                <include>mybatis-mapper/**</include>
+                <include>i18n/**</include>
+                <include>static/**</include>
+            </includes>
+            <outputDirectory>datax-admin/conf</outputDirectory>
+        </fileSet>
+        <fileSet>
+            <directory>src/main/lib</directory>
+            <outputDirectory>datax-admin/lib</outputDirectory>
+        </fileSet>
+    </fileSets>
+</assembly>

+ 202 - 0
datax-admin/src/main/bin/configure.sh

@@ -0,0 +1,202 @@
+#!/bin/bash
+
+DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+SHELL_LOG="${DIR}/console.out"
+SERVER_NAME="datax-admin"
+USER=`whoami`
+SAFE_MODE=true
+SUDO_USER=false
+ENV_FILE_PATH="${DIR}/env.properties"
+
+usage(){
+  printf "Configure usage:\n"
+  printf "\t%-10s  %-10s  %-2s \n" --server "server-name" "Name of datax-admin server"
+  printf "\t%-10s  %-10s  %-2s \n" --unsafe "unsafe mode" "Will clean the directory existed"
+  printf "\t%-10s  %-10s  %-2s \n" --safe "safe mode" "Will not modify the directory existed (Default)"
+  printf "\t%-10s  %-10s  %-2s \n" "-h|--help" "usage" "List help document"
+}
+
+LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+interact_echo(){
+  while [ 1 ]; do
+    read -p "$1 (Y/N)" yn
+    if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
+      return 0
+    elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
+      return 1
+    else
+      echo "Unknown choise: [$yn], please choose again."
+    fi
+  done
+}
+
+is_sudo_user(){
+  sudo -v >/dev/null 2>&1
+}
+
+abs_path(){
+    SOURCE="${BASH_SOURCE[0]}"
+    while [ -h "${SOURCE}" ]; do
+        DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+        SOURCE="$(readlink "${SOURCE}")"
+        [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
+    done
+    echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+}
+
+check_exist(){
+    if test -e "$1"; then
+        LOG INFO "Directory or file: [$1] has been exist"
+        if [ $2 == true ]; then
+           LOG INFO "Configure program will shutdown..."
+           exit 0
+        fi
+    fi
+}
+
+copy_replace(){
+    file_name=$1
+     if test -e "${CONF_PATH}/${file_name}";then
+        if [ ${SAFE_MODE} == true ]; then
+            check_exist "${CONF_PATH}/${file_name}" true
+        fi
+        LOG INFO "Delete file or directory: [${CONF_PATH}/${file_name}]"
+        rm -rf ${CONF_PATH}/${file_name}
+    fi
+    if test -e "${DIR}/../conf/${file_name}";then
+        LOG INFO "Copy from ${DIR}/../conf/${file_name}"
+        cp -R ${DIR}/../conf/${file_name} ${CONF_PATH}/
+    fi
+}
+
+mkdir_p(){
+    if [ ${SAFE_MODE} == true ]; then
+      check_exist $1 false
+    fi
+    if [ ! -d $1 ]; then
+        LOG INFO "Creating directory: ["$1"]."
+        #mkdir -p $1
+        if [ ${SUDO_USER} == true ]; then
+          sudo mkdir -p $1 && sudo chown -R ${USER} $1
+        else
+          mkdir -p $1
+        fi
+    fi
+}
+
+while [ 1 ]; do
+  case ${!OPTIND} in
+  --server)
+    SERVER_NAME=$2
+    shift 2
+  ;;
+  --unsafe)
+    SAFE_MODE=false
+    shift 1
+  ;;
+  --safe)
+    SAFE_MODE=true
+    shift 1
+  ;;
+  --help|-h)
+    usage
+    exit 0
+  ;;
+  *)
+    break
+  ;;
+  esac
+done
+
+is_sudo_user
+if [ $? == 0 ]; then
+  SUDO_USER=true
+fi
+
+BIN=`abs_path`
+SERVER_NAME_SIMPLE=${SERVER_NAME/datax-/}
+
+LOG_PATH=${BIN}/../logs
+if [ "x${BASE_LOG_DIR}" != "x" ]; then
+    LOG_PATH=${BASE_LOG_DIR}/${SERVER_NAME_SIMPLE}
+    sed -ri "s![#]?(WEB_LOG_PATH=)\S*!\1${LOG_PATH}!g" ${ENV_FILE_PATH}
+fi
+
+CONF_PATH=${BIN}/../conf
+if [ "x${BASE_CONF_DIR}" != "x" ]; then
+  CONF_PATH=${BASE_CONF_DIR}/${SERVER_NAME_SIMPLE}
+  sed -ri "s![#]?(WEB_CONF_PATH=)\S*!\1${CONF_PATH}!g" ${ENV_FILE_PATH}
+fi
+
+DATA_PATH=${BIN}/../data
+if [ "x${BASE_DATA_DIR}" != "x" ]; then
+  DATA_PATH=${BASE_DATA_DIR}/${SERVER_NAME_SIMPLE}
+  sed -ri "s![#]?(DATA_PATH=)\S*!\1${DATA_PATH}!g" ${ENV_FILE_PATH}
+fi
+
+echo "Start to make directory"
+# Start to make directory
+LOG INFO "\033[1m Start to build directory\033[0m"
+mkdir_p ${LOG_PATH}
+mkdir_p ${CONF_PATH}
+mkdir_p ${DATA_PATH}
+if [ "x${BASE_CONF_DIR}" != "x" ]; then
+  LOG INFO "\033[1m Start to copy configuration file/directory\033[0m"
+  # Copy the configuration file
+  copy_replace bootstrap.properties
+  copy_replace application.yml
+  copy_replace logback.xml
+  copy_replace i18n
+  copy_replace mybatis-mapper
+  copy_replace static
+fi
+echo "end to make directory"
+
+
+BOOTSTRAP_PROP_FILE="${CONF_PATH}/bootstrap.properties"
+# Start to initalize database
+echo "Start to initalize database"
+if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then
+   `mysql --version >/dev/null 2>&1`
+   if [ $? == 0 ]; then
+      LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m"
+      interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?"
+      if [ $? == 0 ]; then
+        read -p "Please input the db host(default: 127.0.0.1): " HOST
+        if [ "x${HOST}" == "x" ]; then
+          HOST="127.0.0.1"
+        fi
+        while [ 1 ]; do
+          read -p "Please input the db port(default: 3306): " PORT
+          if [ "x${PORT}" == "x" ]; then
+            PORT=3306
+            break
+          elif [ ${PORT} -gt 0 ] 2>/dev/null; then
+            break
+          else
+            echo "${PORT} is not a number, please input again"
+          fi
+        done
+        read -p "Please input the db username(default: root): " USERNAME
+        if [ "x${USERNAME}" == "x" ]; then
+          USERNAME="root"
+        fi
+        read -p "Please input the db password(default: ""): " PASSWORD
+        read -p "Please input the db name(default: dataxweb)" DATABASE
+        if [ "x${DATABASE}" == "x" ]; then
+          DATABASE="dataxweb"
+        fi
+        mysql -h ${HOST} -P ${PORT} -u ${USERNAME} -p${PASSWORD}  --default-character-set=utf8 -e \
+        "CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};"
+        sed -ri "s![#]?(DB_HOST=)\S*!\1${HOST}!g" ${BOOTSTRAP_PROP_FILE}
+        sed -ri "s![#]?(DB_PORT=)\S*!\1${PORT}!g" ${BOOTSTRAP_PROP_FILE}
+        sed -ri "s![#]?(DB_USERNAME=)\S*!\1${USERNAME}!g" ${BOOTSTRAP_PROP_FILE}
+        sed -ri "s![#]?(DB_PASSWORD=)\S*!\1${PASSWORD}!g" ${BOOTSTRAP_PROP_FILE}
+        sed -ri "s![#]?(DB_DATABASE=)\S*!\1${DATABASE}!g" ${BOOTSTRAP_PROP_FILE}
+      fi
+   fi
+fi

+ 280 - 0
datax-admin/src/main/bin/datax-admin.sh

@@ -0,0 +1,280 @@
+#!/bin/bash
+#
+
+FRIEND_NAME=DATAX-ADMIN
+MAIN_CLASS=com.wugui.datax.admin.DataXAdminApplication
+if [ ! ${ENV_FILE} ]; then
+    ENV_FILE="env.properties"
+fi
+SLEEP_TIMEREVAL_S=2
+
+abs_path(){
+    SOURCE="${BASH_SOURCE[0]}"
+    while [ -h "${SOURCE}" ]; do
+        DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+        SOURCE="$(readlink "${SOURCE}")"
+        [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
+    done
+    echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+}
+
+function LOG(){
+  currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
+  echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
+}
+
+verify_java_env(){
+  if [ "x${JAVA_HOME}" != "x" ]; then
+    ${JAVA_HOME}/bin/java -version >/dev/null 2>&1
+  else
+    java -version >/dev/null 2>&1
+  fi
+  if [ $? -ne 0 ]; then
+    cat 1>&2 <<EOF
++========================================================================+
+| Error: Java Environment is not availiable, Please check your JAVA_HOME |
++------------------------------------------------------------------------+
+EOF
+  return 1
+  fi
+  return 0
+}
+
+load_env(){
+    LOG INFO "load environment variables"
+    while read line
+    do
+        if [[ ! -z $(echo "${line}" | grep "=") ]]; then
+                key=${line%%=*}
+                value=${line#*=}
+                key1=$(echo ${key} | tr '.' '_')
+                if [ -z $(echo "${key1}" | grep -P '\s*#+.*') ]; then
+                        eval "${key1}=${value}"
+                fi
+        fi
+    done < "${BIN}/${ENV_FILE}"
+}
+
+BIN=`abs_path`
+SHELL_LOG="${BIN}/console.out"
+load_env
+
+#verify environment
+verify_java_env
+if [ $? -ne 0 ]; then
+  exit $?
+fi
+
+if [[ ! ${SERVICE_LOG_PATH} ]]; then
+    SERVICE_LOG_PATH=${BIN}/../logs
+fi
+
+if [[ ! ${SERVICE_CONF_PATH} ]]; then
+    SERVICE_CONF_PATH=${BIN}/../conf
+fi
+
+if [[ ! ${DATA_PATH} ]]; then
+   DATA_PATH=${BIN}/../data
+fi
+
+if [[ ! ${MAIL_USERNAME} ]]; then
+    MAIL_USERNAME="datax"
+fi
+
+if [[ ! ${MAIL_PASSWORD} ]]; then
+    MAIL_PASSWORD="123456"
+fi
+
+if [[ ! ${SERVER_PORT} ]]; then
+   SERVER_PORT=9004
+fi
+
+if [[ ! ${JAVA_OPTS} ]]; then
+    JAVA_OPTS=" -Xms2g -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8"
+fi
+
+if [[ ! ${REMOTE_DEBUG_SWITCH} ]]; then
+    REMOTE_DEBUG_SWITCH=false
+fi
+
+if [[ ! ${REMOTE_DEBUG_PORT} ]]; then
+    REMOTE_DEBUG_PORT="8089"
+fi
+
+
+LIB_PATH=${BIN}/../lib
+USER_DIR=${BIN}/../
+CLASSPATH=${LIB_PATH}"/*:"${SERVICE_CONF_PATH}":."
+if [ ${REMOTE_DEBUG_SWITCH} == true ]; then
+    JAVA_OPTS=${JAVA_OPTS}" -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${REMOTE_DEBUG_PORT}"
+fi
+JAVA_OPTS=${JAVA_OPTS}" -XX:HeapDumpPath="${SERVICE_LOG_PATH}" -Dlog.path="${SERVICE_LOG_PATH}
+JAVA_OPTS=${JAVA_OPTS}" -Duser.dir="${USER_DIR}
+JAVA_OPTS=${JAVA_OPTS}" -Dserver.port="${SERVER_PORT}" -Ddata.path="${DATA_PATH}"  -Dmail.username="${MAIL_USERNAME}" -Dmail.password="${MAIL_PASSWORD}
+if [ "x"${PID_FILE_PATH} != "x" ]; then
+  JAVA_OPTS=${JAVA_OPTS}" -Dpid.file="${PID_FILE_PATH}
+fi
+JAVA_OPTS=${JAVA_OPTS}" -Dlogging.config="${SERVICE_CONF_PATH}"/logback.xml"
+JAVA_OPTS=${JAVA_OPTS}" -classpath "${CLASSPATH}
+
+if [ "x${JAVA_HOME}" != "x" ]; then
+  EXE_JAVA=${JAVA_HOME}"/bin/java "${JAVA_OPTS}" "${MAIN_CLASS}
+  JPS=${JAVA_HOME}/bin/jps
+else
+  EXE_JAVA="java "${JAVA_OPTS}" "${MAIN_CLASS}
+  JPS="jps"
+fi
+
+usage(){
+    echo " usage is [start|stop|shutdown|restart]"
+}
+
+# check if the process still in jvm
+status_class(){
+    local p=""
+    if [ "x"${PID_FILE_PATH} != "x" ]; then
+      if [ -f ${PID_FILE_PATH} ]; then
+        local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
+        if [ "x"${pid_in_file} !=  "x" ]; then
+          p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
+        fi
+      fi
+    else
+      p=`${JPS} -l | grep "$2" | awk '{print $1}'`
+    fi
+    if [ -n "$p" ]; then
+        # echo "$1 ($2) is still running with pid $p"
+        return 0
+    else
+        # echo "$1 ($2) does not appear in the java process table"
+        return 1
+    fi
+}
+
+wait_for_startup(){
+    local now_s=`date '+%s'`
+    local stop_s=$((${now_s} + $1))
+    while [ ${now_s} -le ${stop_s} ];do
+        status_class ${FRIEND_NAME} ${MAIN_CLASS}
+        if [ $? -eq 0 ]; then
+            return 0
+        fi
+        sleep ${SLEEP_TIMEREVAL_S}
+        now_s=`date '+%s'`
+    done
+    exit 1
+}
+
+wait_for_stop(){
+    local now_s=`date '+%s'`
+    local stop_s=$((${now_s} + $1))
+    while [ ${now_s} -le ${stop_s} ];do
+        status_class ${FRIEND_NAME} ${MAIN_CLASS}
+        if [ $? -eq 1 ]; then
+            return 0
+        fi
+        sleep ${SLEEP_TIMEREVAL_S}
+        now_s=`date '+%s'`
+    done
+    return 1
+}
+
+start_m(){
+    status_class ${FRIEND_NAME} ${MAIN_CLASS}
+    if [ $? -eq 0 ]; then
+        LOG INFO "${FRIEND_NAME} has been started in process"
+        exit 0
+    fi
+    LOG INFO ${EXE_JAVA}
+    nohup ${EXE_JAVA} >${SHELL_LOG} 2>&1 &
+    LOG INFO "Waiting ${FRIEND_NAME} to start complete ..."
+    wait_for_startup 20
+    if [ $? -eq 0 ]; then
+        LOG INFO "${FRIEND_NAME} start success"
+        return 0
+    else
+        LOG ERROR "${FRIEND_NAME} start exceeded over 20s" >&2
+        return 1
+    fi
+}
+
+stop_m(){
+    local p=""
+    if [ "x"${PID_FILE_PATH} != "x" ]; then
+      if [ -f ${PID_FILE_PATH} ]; then
+        local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
+        if [ "x"${pid_in_file} !=  "x" ]; then
+          p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
+        fi
+      fi
+    else
+      p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
+    fi
+    if [ -z "${p}" ]; then
+        LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
+        return 0
+    fi
+    LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
+    case "`uname`" in
+        CYCGWIN*) taskkill /PID "${p}" ;;
+        *) kill -SIGTERM "${p}" ;;
+    esac
+    LOG INFO "Waiting ${FRIEND_NAME} to stop complete ..."
+    wait_for_stop 20
+    if [ $? -eq 0 ]; then
+        LOG INFO "${FRIEND_NAME} stop success"
+        return 0
+    else
+        LOG ERROR "${FRIEND_NAME} stop exceeded over 20s" >&2
+        return 1
+    fi
+}
+
+shutdown_m(){
+    local p=""
+    if [ "x"${PID_FILE_PATH} != "x" ]; then
+      if [ -f ${PID_FILE_PATH} ]; then
+        local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
+        if [ "x"${pid_in_file} !=  "x" ]; then
+          p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
+        fi
+      fi
+    else
+      p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
+    fi
+    if [ -z "${p}" ]; then
+         LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
+        return 0
+    fi
+    LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
+    case "`uname`" in
+        CYCGWIN*) taskkill /F /PID "${p}" ;;
+        *) kill -9 "${p}" ;;
+    esac
+}
+
+restart_m(){
+    stop_m
+    if [ $? -eq 0 ]; then
+        start_m
+        exit $?
+    else
+        LOG ERROR "${FRIEND_NAME} restart fail" >&2
+        exit 1
+    fi
+}
+if [ ! $1 ]; then
+    usage
+    exit 1;
+fi
+case $1 in
+    start) start_m;;
+    stop) stop_m;;
+    shutdown) shutdown_m;;
+    restart) restart_m;;
+    *)
+       usage
+       exit 1
+     ;;
+esac
+exit $?

+ 21 - 0
datax-admin/src/main/bin/env.properties

@@ -0,0 +1,21 @@
+# environment variables
+
+#JAVA_HOME=""
+
+WEB_LOG_PATH=${BIN}/../logs
+WEB_CONF_PATH=${BIN}/../conf
+
+DATA_PATH=${BIN}/../data
+SERVER_PORT=9527
+
+#PID_FILE_PATH=${BIN}/dataxadmin.pid
+
+
+# mail account
+MAIL_USERNAME=""
+MAIL_PASSWORD=""
+
+
+#debug
+#REMOTE_DEBUG_SWITCH=true
+#REMOTE_DEBUG_PORT=7003

+ 38 - 0
datax-admin/src/main/java/com/wugui/datax/admin/DataXAdminApplication.java

@@ -0,0 +1,38 @@
+package com.wugui.datax.admin;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
+import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
+import org.springframework.core.env.Environment;
+import springfox.documentation.swagger2.annotations.EnableSwagger2;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+@EnableSwagger2
+@SpringBootApplication(exclude = {MongoAutoConfiguration.class, MongoDataAutoConfiguration.class})
+public class DataXAdminApplication {
+
+    private static Logger logger = LoggerFactory.getLogger(DataXAdminApplication.class);
+
+    public static void main(String[] args) throws UnknownHostException {
+        Environment env = new SpringApplication(DataXAdminApplication.class).run(args).getEnvironment();
+        String envPort = env.getProperty("server.port");
+        String envContext = env.getProperty("server.contextPath");
+        String port = envPort == null ? "8080" : envPort;
+        String context = envContext == null ? "" : envContext;
+        String path = port + "" + context + "/doc.html";
+        String externalAPI = InetAddress.getLocalHost().getHostAddress();
+        logger.info(
+                "Access URLs:\n----------------------------------------------------------\n\t"
+                        + "Local-API: \t\thttp://127.0.0.1:{}\n\t"
+                        + "External-API: \thttp://{}:{}\n\t"
+                        + "web-URL: \t\thttp://127.0.0.1:{}/index.html\n\t----------------------------------------------------------",
+                path, externalAPI, path, port);
+    }
+
+
+}

+ 44 - 0
datax-admin/src/main/java/com/wugui/datax/admin/config/MybatisPlusConfig.java

@@ -0,0 +1,44 @@
+package com.wugui.datax.admin.config;
+
+import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector;
+import com.baomidou.mybatisplus.core.injector.ISqlInjector;
+import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
+import org.mybatis.spring.annotation.MapperScan;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.transaction.annotation.EnableTransactionManagement;
+
+/**
+ * MybatisPlus配置类 Spring boot方式
+ *
+ * @author huzekang
+ */
+@EnableTransactionManagement
+@Configuration
+@MapperScan("com.wugui.datax.admin.mapper")
+public class MybatisPlusConfig {
+
+    /**
+     * 分页插件
+     */
+    @Bean
+    public PaginationInterceptor paginationInterceptor() {
+
+        PaginationInterceptor paginationInterceptor = new PaginationInterceptor();
+        return paginationInterceptor.setOverflow(true);
+    }
+
+    /**
+     * MyBatisPlus逻辑删除 ,需要在 yml 中配置开启
+     * 3.0.7.1版本的LogicSqlInjector里面什么都没做只是 extends DefaultSqlInjector
+     * 以后版本直接去的了LogicSqlInjector
+     *
+     * @return
+     */
+    @Bean
+    public ISqlInjector sqlInjector() {
+        return new DefaultSqlInjector();
+    }
+
+
+}

+ 73 - 0
datax-admin/src/main/java/com/wugui/datax/admin/config/SecurityConfig.java

@@ -0,0 +1,73 @@
+package com.wugui.datax.admin.config;
+
+
+import com.wugui.datatx.core.util.Constants;
+import com.wugui.datax.admin.filter.JWTAuthenticationFilter;
+import com.wugui.datax.admin.filter.JWTAuthorizationFilter;
+import com.wugui.datax.admin.service.impl.UserDetailsServiceImpl;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
+import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
+import org.springframework.security.config.http.SessionCreationPolicy;
+import org.springframework.security.core.userdetails.UserDetailsService;
+import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
+import org.springframework.web.cors.CorsConfiguration;
+import org.springframework.web.cors.CorsConfigurationSource;
+import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@EnableWebSecurity
+@EnableGlobalMethodSecurity(prePostEnabled = true)
+public class SecurityConfig extends WebSecurityConfigurerAdapter {
+
+    @Autowired
+    private UserDetailsService userDetailsService;
+
+    @Bean
+    UserDetailsService customUserService(){ //注册UserDetailsService 的bean
+        return new UserDetailsServiceImpl();
+    }
+
+
+
+    @Bean
+    public BCryptPasswordEncoder bCryptPasswordEncoder() {
+        return new BCryptPasswordEncoder();
+    }
+
+    @Override
+    protected void configure(AuthenticationManagerBuilder auth) throws Exception {
+        auth.userDetailsService(userDetailsService).passwordEncoder(bCryptPasswordEncoder());
+    }
+
+    @Override
+    protected void configure(HttpSecurity http) throws Exception {
+        http.cors().and().csrf().disable()
+                .authorizeRequests()
+                .antMatchers("/static/**","/index.html","/favicon.ico","/avatar.jpg").permitAll()
+                .antMatchers("/api/callback","/api/processCallback","/api/registry","/api/registryRemove").permitAll()
+                .antMatchers("/doc.html","/swagger-resources/**","/webjars/**","/*/api-docs").anonymous()
+                .anyRequest().authenticated()
+                .and()
+                .addFilter(new JWTAuthenticationFilter(authenticationManager()))
+                .addFilter(new JWTAuthorizationFilter(authenticationManager()))
+                .sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS);
+    }
+
+    @Bean
+    CorsConfigurationSource corsConfigurationSource() {
+        final UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
+        CorsConfiguration config = new CorsConfiguration();
+        config.addAllowedMethod(Constants.SPLIT_STAR);
+        config.applyPermitDefaultValues();
+        source.registerCorsConfiguration("/**", config);
+        return source;
+    }
+
+}

+ 269 - 0
datax-admin/src/main/java/com/wugui/datax/admin/config/ServiceModelToSwagger2MapperImpl.java

@@ -0,0 +1,269 @@
+package com.wugui.datax.admin.config;
+
+import com.google.common.collect.Multimap;
+import io.swagger.models.*;
+import io.swagger.models.parameters.Parameter;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
+import org.springframework.context.MessageSource;
+import org.springframework.context.annotation.Primary;
+import org.springframework.context.i18n.LocaleContextHolder;
+import org.springframework.stereotype.Component;
+import springfox.documentation.service.ApiInfo;
+import springfox.documentation.service.ApiListing;
+import springfox.documentation.service.Documentation;
+import springfox.documentation.service.ResourceListing;
+import springfox.documentation.swagger2.mappers.*;
+
+import java.util.*;
+
+import static com.google.common.collect.Maps.newTreeMap;
+
+/**
+ * application configuration
+ */
+@Component(value = "ServiceModelToSwagger2Mapper")
+@Primary
+@ConditionalOnWebApplication
+public class ServiceModelToSwagger2MapperImpl extends ServiceModelToSwagger2Mapper {
+
+
+    @Autowired
+    private ModelMapper modelMapper;
+    @Autowired
+    private ParameterMapper parameterMapper;
+    @Autowired
+    private SecurityMapper securityMapper;
+    @Autowired
+    private LicenseMapper licenseMapper;
+    @Autowired
+    private VendorExtensionsMapper vendorExtensionsMapper;
+
+    @Autowired
+    private MessageSource messageSource;
+
+    @Override
+    public Swagger mapDocumentation(Documentation from) {
+
+        if (from == null) {
+            return null;
+        }
+
+        Swagger swagger = new Swagger();
+
+        swagger.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
+        swagger.setSchemes(mapSchemes(from.getSchemes()));
+        swagger.setPaths(mapApiListings(from.getApiListings()));
+        swagger.setHost(from.getHost());
+        swagger.setDefinitions(modelsFromApiListings( from.getApiListings() ) );
+        swagger.setSecurityDefinitions(securityMapper.toSecuritySchemeDefinitions(from.getResourceListing()));
+        ApiInfo info = fromResourceListingInfo(from);
+        if (info != null) {
+            swagger.setInfo(mapApiInfo(info));
+        }
+        swagger.setBasePath(from.getBasePath());
+        swagger.setTags(tagSetToTagList(from.getTags()));
+        List<String> list2 = from.getConsumes();
+        if (list2 != null) {
+            swagger.setConsumes(new ArrayList<String>(list2));
+        } else {
+            swagger.setConsumes(null);
+        }
+        List<String> list3 = from.getProduces();
+        if (list3 != null) {
+            swagger.setProduces(new ArrayList<String>(list3));
+        } else {
+            swagger.setProduces(null);
+        }
+
+        return swagger;
+    }
+
+
+    @Override
+    protected Info mapApiInfo(ApiInfo from) {
+
+        if (from == null) {
+            return null;
+        }
+
+        Info info = new Info();
+
+        info.setLicense(licenseMapper.apiInfoToLicense(from));
+        info.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
+        info.setTermsOfService(from.getTermsOfServiceUrl());
+        info.setContact(map(from.getContact()));
+        info.setDescription(from.getDescription());
+        info.setVersion(from.getVersion());
+        info.setTitle(from.getTitle());
+
+        return info;
+    }
+
+    @Override
+    protected Contact map(springfox.documentation.service.Contact from) {
+
+        if (from == null) {
+            return null;
+        }
+
+        Contact contact = new Contact();
+
+        contact.setName(from.getName());
+        contact.setUrl(from.getUrl());
+        contact.setEmail(from.getEmail());
+
+        return contact;
+    }
+
+    @Override
+    protected Operation mapOperation(springfox.documentation.service.Operation from) {
+
+        if (from == null) {
+            return null;
+        }
+
+        Locale locale = LocaleContextHolder.getLocale();
+
+        Operation operation = new Operation();
+
+        operation.setSecurity(mapAuthorizations(from.getSecurityReferences()));
+        operation.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
+        operation.setDescription(messageSource.getMessage(from.getNotes(), null, from.getNotes(), locale));
+        operation.setOperationId(from.getUniqueId());
+        operation.setResponses(mapResponseMessages(from.getResponseMessages()));
+        operation.setSchemes(stringSetToSchemeList(from.getProtocol()));
+        Set<String> tagsSet = new HashSet<>(1);
+
+        if(from.getTags() != null && from.getTags().size() > 0){
+
+            List<String> list = new ArrayList<String>(tagsSet.size());
+
+            Iterator<String> it = from.getTags().iterator();
+            while(it.hasNext()){
+               String tag = it.next();
+               list.add(
+                   StringUtils.isNotBlank(tag) ? messageSource.getMessage(tag, null, tag, locale) : " ");
+            }
+
+            operation.setTags(list);
+        }else {
+            operation.setTags(null);
+        }
+
+        operation.setSummary(from.getSummary());
+        Set<String> set1 = from.getConsumes();
+        if (set1 != null) {
+            operation.setConsumes(new ArrayList<String>(set1));
+        } else {
+            operation.setConsumes(null);
+        }
+
+        Set<String> set2 = from.getProduces();
+        if (set2 != null) {
+            operation.setProduces(new ArrayList<String>(set2));
+        } else {
+            operation.setProduces(null);
+        }
+
+
+        operation.setParameters(parameterListToParameterList(from.getParameters()));
+        if (from.getDeprecated() != null) {
+            operation.setDeprecated(Boolean.parseBoolean(from.getDeprecated()));
+        }
+
+        return operation;
+    }
+
+    @Override
+    protected Tag mapTag(springfox.documentation.service.Tag from) {
+
+        if (from == null) {
+            return null;
+        }
+
+        Locale locale = LocaleContextHolder.getLocale();
+
+        Tag tag = new Tag();
+
+        tag.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
+        tag.setName(messageSource.getMessage(from.getName(), null, from.getName(), locale));
+        tag.setDescription(from.getDescription());
+
+        return tag;
+    }
+
+
+    private ApiInfo fromResourceListingInfo(Documentation documentation) {
+
+        if (documentation == null) {
+            return null;
+        }
+        ResourceListing resourceListing = documentation.getResourceListing();
+        if (resourceListing == null) {
+            return null;
+        }
+        ApiInfo info = resourceListing.getInfo();
+        if (info == null) {
+            return null;
+        }
+        return info;
+    }
+
+    protected List<Tag> tagSetToTagList(Set<springfox.documentation.service.Tag> set) {
+
+        if (set == null) {
+            return null;
+        }
+
+        List<Tag> list = new ArrayList<Tag>(set.size());
+        for (springfox.documentation.service.Tag tag : set) {
+            list.add(mapTag(tag));
+        }
+
+        return list;
+    }
+
+    protected List<Scheme> stringSetToSchemeList(Set<String> set) {
+        if (set == null) {
+            return null;
+        }
+
+        List<Scheme> list = new ArrayList<Scheme>(set.size());
+        for (String string : set) {
+            list.add(Enum.valueOf(Scheme.class, string));
+        }
+
+        return list;
+    }
+
+    protected List<Parameter> parameterListToParameterList(List<springfox.documentation.service.Parameter> list) {
+        if (list == null) {
+            return null;
+        }
+
+        List<Parameter> list1 = new ArrayList<Parameter>(list.size());
+
+        Locale locale = LocaleContextHolder.getLocale();
+
+        for (springfox.documentation.service.Parameter param : list) {
+            String description = messageSource.getMessage(param.getDescription(), null, param.getDescription(), locale);
+
+            springfox.documentation.service.Parameter parameter = new springfox.documentation.service.Parameter(param.getName(),description,param.getDefaultValue(),param.isRequired(),param.isAllowMultiple(),param.isAllowEmptyValue(),param.getModelRef(),param.getType(),param.getAllowableValues(),param.getParamType(),param.getParamAccess(),param.isHidden(),param.getPattern(),param.getCollectionFormat(),param.getOrder(),param.getScalarExample(),param.getExamples() ,param.getVendorExtentions());
+            list1.add(parameterMapper.mapParameter(parameter));
+        }
+
+        return list1;
+    }
+
+
+    Map<String, Model> modelsFromApiListings(Multimap<String, ApiListing> apiListings) {
+        Map<String, springfox.documentation.schema.Model> definitions = newTreeMap();
+        for (ApiListing each : apiListings.values()) {
+            definitions.putAll(each.getModels());
+        }
+        return modelMapper.mapModels(definitions);
+    }
+
+}

+ 39 - 0
datax-admin/src/main/java/com/wugui/datax/admin/config/SwaggerConfig.java

@@ -0,0 +1,39 @@
+package com.wugui.datax.admin.config;
+
+import com.github.xiaoymin.swaggerbootstrapui.annotations.EnableSwaggerBootstrapUI;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
+import springfox.documentation.builders.ApiInfoBuilder;
+import springfox.documentation.builders.PathSelectors;
+import springfox.documentation.builders.RequestHandlerSelectors;
+import springfox.documentation.service.ApiInfo;
+import springfox.documentation.spi.DocumentationType;
+import springfox.documentation.spring.web.plugins.Docket;
+import springfox.documentation.swagger2.annotations.EnableSwagger2;
+
+/**
+ * Swagger2API文档的配置
+ * Created by jwk on 2019/07/05.
+ */
+@Configuration
+@EnableSwagger2
+@EnableSwaggerBootstrapUI
+@ConditionalOnWebApplication
+public class SwaggerConfig implements WebMvcConfigurer {
+
+    @Bean
+    public Docket createRestApi() {
+        return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo()).select()
+                .apis(RequestHandlerSelectors.basePackage("com.wugui.datax.admin.controller")).paths(PathSelectors.any())
+                .build();
+    }
+
+    private ApiInfo apiInfo() {
+        return new ApiInfoBuilder().title("DataX Web Api Docs").description("DataX Web Api Docs")
+                .build();
+    }
+
+
+}

+ 24 - 0
datax-admin/src/main/java/com/wugui/datax/admin/config/WebConfig.java

@@ -0,0 +1,24 @@
+package com.wugui.datax.admin.config;
+
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
+
+/**
+ * 前端静态资源访问
+ *
+ * @program: com.wugui.datax-all
+ * @author: huzekang
+ * @create: 2019-06-17 10:40
+ **/
+@Configuration
+
+public class WebConfig implements WebMvcConfigurer {
+
+
+    @Override
+    public void addResourceHandlers(ResourceHandlerRegistry registry) {
+        registry.addResourceHandler("/index.html").addResourceLocations("classpath:/static/index.html");
+        registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/static/");
+    }
+}

+ 22 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/BaseController.java

@@ -0,0 +1,22 @@
+package com.wugui.datax.admin.controller;
+
+
+import com.baomidou.mybatisplus.extension.api.ApiController;
+import com.wugui.datax.admin.util.JwtTokenUtils;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.Enumeration;
+
+import static com.wugui.datatx.core.util.Constants.STRING_BLANK;
+
+/**
+ * base controller
+ */
+public class BaseController extends ApiController {
+
+    public Integer getCurrentUserId(HttpServletRequest request) {
+        Enumeration<String> auth = request.getHeaders(JwtTokenUtils.TOKEN_HEADER);
+        String token = auth.nextElement().replace(JwtTokenUtils.TOKEN_PREFIX, STRING_BLANK);
+        return JwtTokenUtils.getUserId(token);
+    }
+}

+ 253 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/BaseForm.java

@@ -0,0 +1,253 @@
+package com.wugui.datax.admin.controller;
+
+import cn.hutool.core.util.BooleanUtil;
+import cn.hutool.core.util.NumberUtil;
+import cn.hutool.core.util.ObjectUtil;
+import cn.hutool.core.util.StrUtil;
+import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
+import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
+import com.wugui.datax.admin.util.PageUtils;
+import com.wugui.datax.admin.util.ServletUtils;
+import lombok.extern.slf4j.Slf4j;
+
+import javax.servlet.http.HttpServletRequest;
+import java.net.URLDecoder;
+import java.util.Enumeration;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * 基础参数辅助类
+ *
+ * @author zhouhongfa@gz-yibo.com
+ * @version 1.0
+ * @since 2019/5/15
+ */
+@Slf4j
+public class BaseForm {
+    /**
+     * 查询参数对象
+     */
+    protected Map<String, Object> values = new LinkedHashMap<>();
+
+    /**
+     * 当前页码
+     */
+    private Long current = 1L;
+
+    /**
+     * 页大小
+     */
+    private Long size = 10L;
+
+    /**
+     * 构造方法
+     */
+    public BaseForm() {
+        try {
+            HttpServletRequest request = ServletUtils.getRequest();
+            Enumeration<String> params = request.getParameterNames();
+            while (params.hasMoreElements()) {
+                String name = params.nextElement();
+                String value = StrUtil.trim(request.getParameter(name));
+                this.set(name, URLDecoder.decode(value, "UTF-8"));
+            }
+            this.parsePagingQueryParams();
+        } catch (Exception e) {
+            e.printStackTrace();
+            log.error("BaseControlForm initialize parameters setting error:" + e);
+        }
+    }
+
+    /**
+     * 获取页码
+     *
+     * @return
+     */
+    public Long getPageNo() {
+        String pageNum = StrUtil.toString(this.get("current"));
+        if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) {
+            this.current = Long.parseLong(pageNum);
+        }
+        return this.current;
+    }
+
+    /**
+     * 获取页大小
+     *
+     * @return
+     */
+    public Long getPageSize() {
+        String pageSize = StrUtil.toString(this.get("size"));
+
+        if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) {
+            this.size = Long.parseLong(pageSize);
+        }
+        return this.size;
+    }
+
+    /**
+     * 获得参数信息对象
+     *
+     * @return
+     */
+    public Map<String, Object> getParameters() {
+        return values;
+    }
+
+    /**
+     * 根据key获取values中的值
+     *
+     * @param name
+     * @return
+     */
+    public Object get(String name) {
+        if (values == null) {
+            values = new LinkedHashMap<>();
+            return null;
+        }
+        return this.values.get(name);
+    }
+
+    /**
+     * 根据key获取values中String类型值
+     *
+     * @param key
+     * @return String
+     */
+    public String getString(String key) {
+        return StrUtil.toString(get(key));
+    }
+
+    /**
+     * 获取排序字段
+     *
+     * @return
+     */
+    public String getSort() {
+        return StrUtil.toString(this.values.get("sort"));
+    }
+
+    /**
+     * 获取排序
+     *
+     * @return
+     */
+    public String getOrder() {
+        return StrUtil.toString(this.values.get("order"));
+    }
+
+    /**
+     * 获取排序
+     *
+     * @return
+     */
+    public String getOrderby() {
+        return StrUtil.toString(this.values.get("orderby"));
+    }
+
+    /**
+     * 解析出mybatis plus分页查询参数
+     */
+    public Page getPlusPagingQueryEntity() {
+        Page page = new Page();
+        //如果无current,默认返回1000条数据
+        page.setCurrent(this.getPageNo());
+        page.setSize(this.getPageSize());
+        if (ObjectUtil.isNotNull(this.get("ifCount"))) {
+            page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount")));
+        } else {
+            //默认给true
+            page.setSearchCount(true);
+        }
+        return page;
+    }
+
+    /**
+     * 解析分页排序参数(pageHelper)
+     */
+    public void parsePagingQueryParams() {
+        // 排序字段解析
+        String orderBy = StrUtil.toString(this.get("orderby")).trim();
+        String sortName = StrUtil.toString(this.get("sort")).trim();
+        String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase();
+
+        if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) {
+            if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) {
+                sortOrder = "asc";
+            }
+            this.set("orderby", sortName + " " + sortOrder);
+        }
+    }
+
+
+    /**
+     * 设置参数
+     *
+     * @param name  参数名称
+     * @param value 参数值
+     */
+    public void set(String name, Object value) {
+        if (ObjectUtil.isNotNull(value)) {
+            this.values.put(name, value);
+        }
+    }
+
+    /**
+     * 移除参数
+     *
+     * @param name
+     */
+    public void remove(String name) {
+        this.values.remove(name);
+    }
+
+    /**
+     * 清除所有参数
+     */
+    public void clear() {
+        if (values != null) {
+            values.clear();
+        }
+    }
+
+
+    /**
+     * 自定义查询组装
+     *
+     * @param map
+     * @return
+     */
+    protected QueryWrapper<?> pageQueryWrapperCustom(Map<String, Object> map, QueryWrapper<?> queryWrapper) {
+        // mybatis plus 分页相关的参数
+        Map<String, Object> pageParams = PageUtils.filterPageParams(map);
+        //过滤空值,分页查询相关的参数
+        Map<String, Object> colQueryMap = PageUtils.filterColumnQueryParams(map);
+        //排序 操作
+        pageParams.forEach((k, v) -> {
+            switch (k) {
+                case "ascs":
+                    queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
+                    break;
+                case "descs":
+                    queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
+                    break;
+            }
+        });
+
+        //遍历进行字段查询条件组装
+        colQueryMap.forEach((k, v) -> {
+            switch (k) {
+                case "pluginName":
+                case "datasourceName":
+                    queryWrapper.like(StrUtil.toUnderlineCase(k), v);
+                    break;
+                default:
+                    queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
+            }
+        });
+
+        return queryWrapper;
+    }
+
+}

+ 48 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/DataxJsonController.java

@@ -0,0 +1,48 @@
+package com.wugui.datax.admin.controller;
+
+import com.baomidou.mybatisplus.extension.api.R;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.dto.DataXJsonBuildDto;
+import com.wugui.datax.admin.service.DataxJsonService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.util.CollectionUtils;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+/**
+ * Created by jingwk on 2020/05/05
+ */
+
+@RestController
+@RequestMapping("api/dataxJson")
+@Api(tags = "组装datax  json的控制器")
+public class DataxJsonController extends BaseController {
+
+    @Autowired
+    private DataxJsonService dataxJsonService;
+
+
+    @PostMapping("/buildJson")
+    @ApiOperation("JSON构建")
+    public R<String> buildJobJson(@RequestBody DataXJsonBuildDto dto) {
+        String key = "system_please_choose";
+        if (dto.getReaderDatasourceId() == null) {
+            return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerDataSource"));
+        }
+        if (dto.getWriterDatasourceId() == null) {
+            return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerDataSource"));
+        }
+        if (CollectionUtils.isEmpty(dto.getReaderColumns())) {
+            return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerColumns"));
+        }
+        if (CollectionUtils.isEmpty(dto.getWriterColumns())) {
+            return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerColumns"));
+        }
+        return success(dataxJsonService.buildJobJson(dto));
+    }
+
+}

+ 49 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/IndexController.java

@@ -0,0 +1,49 @@
+package com.wugui.datax.admin.controller;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datax.admin.service.JobService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.beans.propertyeditors.CustomDateEditor;
+import org.springframework.web.bind.WebDataBinder;
+import org.springframework.web.bind.annotation.*;
+
+import javax.annotation.Resource;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+
+/**
+ * index controller
+ *
+ * @author jingwk 2019-12-22 16:13:16
+ */
+@RestController
+@Api(tags = "首页接口")
+@RequestMapping("/api")
+public class IndexController {
+
+    @Resource
+    private JobService jobService;
+
+
+    @GetMapping("/index")
+    @ApiOperation("监控图")
+    public ReturnT<Map<String, Object>> index() {
+        return new ReturnT<>(jobService.dashboardInfo());
+    }
+
+    @PostMapping("/chartInfo")
+    @ApiOperation("图表信息")
+    public ReturnT<Map<String, Object>> chartInfo() {
+        return jobService.chartInfo();
+    }
+
+    @InitBinder
+    public void initBinder(WebDataBinder binder) {
+        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        dateFormat.setLenient(false);
+        binder.registerCustomEditor(Date.class, new CustomDateEditor(dateFormat, true));
+    }
+
+}

+ 144 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobApiController.java

@@ -0,0 +1,144 @@
+package com.wugui.datax.admin.controller;
+
+import com.wugui.datatx.core.biz.AdminBiz;
+import com.wugui.datatx.core.biz.model.HandleCallbackParam;
+import com.wugui.datatx.core.biz.model.HandleProcessCallbackParam;
+import com.wugui.datatx.core.biz.model.RegistryParam;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.util.JobRemotingUtil;
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.core.util.JacksonUtil;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+import java.util.List;
+
+/**
+ * Created by xuxueli on 17/5/10.
+ */
+@RestController
+@RequestMapping("/api")
+public class JobApiController {
+
+    @Resource
+    private AdminBiz adminBiz;
+
+    /**
+     * callback
+     *
+     * @param data
+     * @return
+     */
+    @RequestMapping("/callback")
+    public ReturnT<String> callback(HttpServletRequest request, @RequestBody(required = false) String data) {
+        // valid
+        if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
+                && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
+                && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
+        }
+
+        // param
+        List<HandleCallbackParam> callbackParamList = null;
+        try {
+            callbackParamList = JacksonUtil.readValue(data, List.class, HandleCallbackParam.class);
+        } catch (Exception e) { }
+        if (callbackParamList==null || callbackParamList.size()==0) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
+        }
+
+        // invoke
+        return adminBiz.callback(callbackParamList);
+    }
+
+    /**
+     * callback
+     *
+     * @param data
+     * @return
+     */
+    @RequestMapping("/processCallback")
+    public ReturnT<String> processCallback(HttpServletRequest request, @RequestBody(required = false) String data) {
+        // valid
+        if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
+                && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
+                && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
+        }
+
+        // param
+        List<HandleProcessCallbackParam> callbackParamList = null;
+        try {
+            callbackParamList = JacksonUtil.readValue(data, List.class, HandleProcessCallbackParam.class);
+        } catch (Exception e) { }
+        if (callbackParamList==null || callbackParamList.size()==0) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
+        }
+
+        // invoke
+        return adminBiz.processCallback(callbackParamList);
+    }
+
+
+
+    /**
+     * registry
+     *
+     * @param data
+     * @return
+     */
+    @RequestMapping("/registry")
+    public ReturnT<String> registry(HttpServletRequest request, @RequestBody(required = false) String data) {
+        // valid
+        if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
+                && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
+                && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
+            return new ReturnT<String>(ReturnT.FAIL_CODE, "The access token is wrong.");
+        }
+
+        // param
+        RegistryParam registryParam = null;
+        try {
+            registryParam = JacksonUtil.readValue(data, RegistryParam.class);
+        } catch (Exception e) {}
+        if (registryParam == null) {
+            return new ReturnT<String>(ReturnT.FAIL_CODE, "The request data invalid.");
+        }
+
+        // invoke
+        return adminBiz.registry(registryParam);
+    }
+
+    /**
+     * registry remove
+     *
+     * @param data
+     * @return
+     */
+    @RequestMapping("/registryRemove")
+    public ReturnT<String> registryRemove(HttpServletRequest request, @RequestBody(required = false) String data) {
+        // valid
+        if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
+                && JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
+                && !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
+        }
+
+        // param
+        RegistryParam registryParam = null;
+        try {
+            registryParam = JacksonUtil.readValue(data, RegistryParam.class);
+        } catch (Exception e) {}
+        if (registryParam == null) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
+        }
+
+        // invoke
+        return adminBiz.registryRemove(registryParam);
+    }
+
+
+}

+ 75 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobCodeController.java

@@ -0,0 +1,75 @@
+package com.wugui.datax.admin.controller;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobInfo;
+import com.wugui.datax.admin.entity.JobLogGlue;
+import com.wugui.datax.admin.mapper.JobInfoMapper;
+import com.wugui.datax.admin.mapper.JobLogGlueMapper;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.ui.Model;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.RestController;
+
+import javax.annotation.Resource;
+import java.util.Date;
+
+import static com.wugui.datatx.core.biz.model.ReturnT.FAIL_CODE;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@RestController
+@RequestMapping("/jobcode")
+@Api(tags = "任务状态接口")
+public class JobCodeController {
+
+    @Resource
+    private JobInfoMapper jobInfoMapper;
+    @Resource
+    private JobLogGlueMapper jobLogGlueMapper;
+
+
+    @RequestMapping(value = "/save", method = RequestMethod.POST)
+    @ApiOperation("保存任务状态")
+    public ReturnT<String> save(Model model, int id, String glueSource, String glueRemark) {
+        // valid
+        if (glueRemark == null) {
+            return new ReturnT<>(FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")));
+        }
+        if (glueRemark.length() < 4 || glueRemark.length() > 100) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_remark_limit"));
+        }
+        JobInfo existsJobInfo = jobInfoMapper.loadById(id);
+        if (existsJobInfo == null) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
+        }
+
+        // update new code
+        existsJobInfo.setGlueSource(glueSource);
+        existsJobInfo.setGlueRemark(glueRemark);
+        existsJobInfo.setGlueUpdatetime(new Date());
+
+        existsJobInfo.setUpdateTime(new Date());
+        jobInfoMapper.update(existsJobInfo);
+
+        // log old code
+        JobLogGlue jobLogGlue = new JobLogGlue();
+        jobLogGlue.setJobId(existsJobInfo.getId());
+        jobLogGlue.setGlueType(existsJobInfo.getGlueType());
+        jobLogGlue.setGlueSource(glueSource);
+        jobLogGlue.setGlueRemark(glueRemark);
+
+        jobLogGlue.setAddTime(new Date());
+        jobLogGlue.setUpdateTime(new Date());
+        jobLogGlueMapper.save(jobLogGlue);
+
+        // remove code backup more than 30
+        jobLogGlueMapper.removeOld(existsJobInfo.getId(), 30);
+
+        return ReturnT.SUCCESS;
+    }
+
+}

+ 133 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobDatasourceController.java

@@ -0,0 +1,133 @@
+package com.wugui.datax.admin.controller;
+
+import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.api.R;
+import com.wugui.datax.admin.core.util.LocalCacheUtil;
+import com.wugui.datax.admin.entity.JobDatasource;
+import com.wugui.datax.admin.service.JobDatasourceService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * jdbc数据源配置控制器层
+ *
+ * @author zhouhongfa@gz-yibo.com
+ * @version v1.0
+ * @since 2019-07-30
+ */
+@RestController
+@RequestMapping("/api/jobJdbcDatasource")
+@Api(tags = "jdbc数据源配置接口")
+public class JobDatasourceController extends BaseController {
+    /**
+     * 服务对象
+     */
+    @Autowired
+    private JobDatasourceService jobJdbcDatasourceService;
+
+    /**
+     * 分页查询所有数据
+     *
+     * @return 所有数据
+     */
+    @GetMapping
+    @ApiOperation("分页查询所有数据")
+    @ApiImplicitParams(
+            {@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
+                    @ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
+                    @ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
+                    @ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
+                    @ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
+            })
+    public R<IPage<JobDatasource>> selectAll() {
+        BaseForm form = new BaseForm();
+        QueryWrapper<JobDatasource> query = (QueryWrapper<JobDatasource>) form.pageQueryWrapperCustom(form.getParameters(), new QueryWrapper<JobDatasource>());
+        return success(jobJdbcDatasourceService.page(form.getPlusPagingQueryEntity(), query));
+    }
+
+    /**
+     * 获取所有数据源
+     * @return
+     */
+    @ApiOperation("获取所有数据源")
+    @GetMapping("/all")
+    public R<List<JobDatasource>> selectAllDatasource() {
+        return success(this.jobJdbcDatasourceService.selectAllDatasource());
+    }
+
+    /**
+     * 通过主键查询单条数据
+     *
+     * @param id 主键
+     * @return 单条数据
+     */
+    @ApiOperation("通过主键查询单条数据")
+    @GetMapping("{id}")
+    public R<JobDatasource> selectOne(@PathVariable Serializable id) {
+        return success(this.jobJdbcDatasourceService.getById(id));
+    }
+
+    /**
+     * 新增数据
+     *
+     * @param entity 实体对象
+     * @return 新增结果
+     */
+    @ApiOperation("新增数据")
+    @PostMapping
+    public R<Boolean> insert(@RequestBody JobDatasource entity) {
+        return success(this.jobJdbcDatasourceService.save(entity));
+    }
+
+    /**
+     * 修改数据
+     *
+     * @param entity 实体对象
+     * @return 修改结果
+     */
+    @PutMapping
+    @ApiOperation("修改数据")
+    public R<Boolean> update(@RequestBody JobDatasource entity) {
+        LocalCacheUtil.remove(entity.getDatasourceName());
+        JobDatasource d = jobJdbcDatasourceService.getById(entity.getId());
+        if (null != d.getJdbcUsername() && entity.getJdbcUsername().equals(d.getJdbcUsername())) {
+            entity.setJdbcUsername(null);
+        }
+        if (null != entity.getJdbcPassword() && entity.getJdbcPassword().equals(d.getJdbcPassword())) {
+            entity.setJdbcPassword(null);
+        }
+        return success(this.jobJdbcDatasourceService.updateById(entity));
+    }
+
+    /**
+     * 删除数据
+     *
+     * @param idList 主键结合
+     * @return 删除结果
+     */
+    @DeleteMapping
+    @ApiOperation("删除数据")
+    public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
+        return success(this.jobJdbcDatasourceService.removeByIds(idList));
+    }
+
+    /**
+     * 测试数据源
+     * @param jobJdbcDatasource
+     * @return
+     */
+    @PostMapping("/test")
+    @ApiOperation("测试数据")
+    public R<Boolean> dataSourceTest (@RequestBody JobDatasource jobJdbcDatasource) throws IOException {
+        return success(jobJdbcDatasourceService.dataSourceTest(jobJdbcDatasource));
+    }
+}

+ 172 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobGroupController.java

@@ -0,0 +1,172 @@
+package com.wugui.datax.admin.controller;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.enums.RegistryConfig;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobGroup;
+import com.wugui.datax.admin.entity.JobRegistry;
+import com.wugui.datax.admin.mapper.JobGroupMapper;
+import com.wugui.datax.admin.mapper.JobInfoMapper;
+import com.wugui.datax.admin.mapper.JobRegistryMapper;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiParam;
+import org.springframework.web.bind.annotation.*;
+
+import javax.annotation.Resource;
+import java.util.*;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@RestController
+@RequestMapping("/api/jobGroup")
+@Api(tags = "执行器管理接口")
+public class JobGroupController {
+
+    @Resource
+    public JobInfoMapper jobInfoMapper;
+    @Resource
+    public JobGroupMapper jobGroupMapper;
+    @Resource
+    private JobRegistryMapper jobRegistryMapper;
+
+    @GetMapping("/list")
+    @ApiOperation("执行器列表")
+    public ReturnT<List<JobGroup>> getExecutorList() {
+        return new ReturnT<>(jobGroupMapper.findAll());
+    }
+
+    @PostMapping("/save")
+    @ApiOperation("新建执行器")
+    public ReturnT<String> save(@RequestBody JobGroup jobGroup) {
+
+        // valid
+        if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
+            return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
+        }
+        if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
+            return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
+        }
+        if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
+            return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
+        }
+        if (jobGroup.getAddressType() != 0) {
+            if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
+                return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
+            }
+            String[] addresses = jobGroup.getAddressList().split(",");
+            for (String item : addresses) {
+                if (item == null || item.trim().length() == 0) {
+                    return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
+                }
+            }
+        }
+
+        int ret = jobGroupMapper.save(jobGroup);
+        return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
+    }
+
+    @PostMapping("/update")
+    @ApiOperation("更新执行器")
+    public ReturnT<String> update(@RequestBody JobGroup jobGroup) {
+        // valid
+        if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
+            return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
+        }
+        if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
+            return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
+        }
+        if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
+            return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
+        }
+        if (jobGroup.getAddressType() == 0) {
+            // 0=自动注册
+            List<String> registryList = findRegistryByAppName(jobGroup.getAppName());
+            String addressListStr = null;
+            if (registryList != null && !registryList.isEmpty()) {
+                Collections.sort(registryList);
+                addressListStr = "";
+                for (String item : registryList) {
+                    addressListStr += item + ",";
+                }
+                addressListStr = addressListStr.substring(0, addressListStr.length() - 1);
+            }
+            jobGroup.setAddressList(addressListStr);
+        } else {
+            // 1=手动录入
+            if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
+                return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
+            }
+            String[] addresses = jobGroup.getAddressList().split(",");
+            for (String item : addresses) {
+                if (item == null || item.trim().length() == 0) {
+                    return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
+                }
+            }
+        }
+
+        int ret = jobGroupMapper.update(jobGroup);
+        return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
+    }
+
+    private List<String> findRegistryByAppName(String appNameParam) {
+        HashMap<String, List<String>> appAddressMap = new HashMap<>();
+        List<JobRegistry> list = jobRegistryMapper.findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
+        if (list != null) {
+            for (JobRegistry item : list) {
+                if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
+                    String appName = item.getRegistryKey();
+                    List<String> registryList = appAddressMap.get(appName);
+                    if (registryList == null) {
+                        registryList = new ArrayList<>();
+                    }
+
+                    if (!registryList.contains(item.getRegistryValue())) {
+                        registryList.add(item.getRegistryValue());
+                    }
+                    appAddressMap.put(appName, registryList);
+                }
+            }
+        }
+        return appAddressMap.get(appNameParam);
+    }
+
+    @PostMapping("/remove")
+    @ApiOperation("移除执行器")
+    public ReturnT<String> remove(int id) {
+
+        // valid
+        int count = jobInfoMapper.pageListCount(0, 10, id, -1, null, null, 0,null);
+        if (count > 0) {
+            return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_0"));
+        }
+
+        List<JobGroup> allList = jobGroupMapper.findAll();
+        if (allList.size() == 1) {
+            return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_1"));
+        }
+
+        int ret = jobGroupMapper.remove(id);
+        return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
+    }
+
+    @RequestMapping(value = "/loadById", method = RequestMethod.POST)
+    @ApiOperation("根据id获取执行器")
+    public ReturnT<JobGroup> loadById(int id) {
+        JobGroup jobGroup = jobGroupMapper.load(id);
+        return jobGroup != null ? new ReturnT<>(jobGroup) : new ReturnT<>(ReturnT.FAIL_CODE, null);
+    }
+
+    @GetMapping("/query")
+    @ApiOperation("查询执行器")
+    public ReturnT<List<JobGroup>> get(@ApiParam(value = "执行器AppName")
+                                       @RequestParam(value = "appName", required = false) String appName,
+                                       @ApiParam(value = "执行器名称")
+                                       @RequestParam(value = "title", required = false) String title,
+                                       @ApiParam(value = "执行器地址列表")
+                                       @RequestParam(value = "addressList", required = false) String addressList) {
+        return new ReturnT<>(jobGroupMapper.find(appName, title, addressList));
+    }
+
+}

+ 129 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobInfoController.java

@@ -0,0 +1,129 @@
+package com.wugui.datax.admin.controller;
+
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.util.DateUtil;
+import com.wugui.datax.admin.core.cron.CronExpression;
+import com.wugui.datax.admin.core.thread.JobTriggerPoolHelper;
+import com.wugui.datax.admin.core.trigger.TriggerTypeEnum;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.dto.DataXBatchJsonBuildDto;
+import com.wugui.datax.admin.dto.TriggerJobDto;
+import com.wugui.datax.admin.entity.JobInfo;
+import com.wugui.datax.admin.service.JobService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.web.bind.annotation.*;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * index controller
+ *
+ * @author xuxueli 2015-12-19 16:13:16
+ */
+@Api(tags = "任务配置接口")
+@RestController
+@RequestMapping("/api/job")
+public class JobInfoController extends BaseController{
+
+    @Resource
+    private JobService jobService;
+
+
+    @GetMapping("/pageList")
+    @ApiOperation("任务列表")
+    public ReturnT<Map<String, Object>> pageList(@RequestParam(required = false, defaultValue = "0") int current,
+                                        @RequestParam(required = false, defaultValue = "10") int size,
+                                        int jobGroup, int triggerStatus, String jobDesc, String glueType, Integer[] projectIds) {
+
+        return new ReturnT<>(jobService.pageList((current-1)*size, size, jobGroup, triggerStatus, jobDesc, glueType, 0, projectIds));
+    }
+
+    @GetMapping("/list")
+    @ApiOperation("全部任务列表")
+    public ReturnT<List<JobInfo>> list(){
+        return new ReturnT<>(jobService.list());
+    }
+
+    @PostMapping("/add")
+    @ApiOperation("添加任务")
+    public ReturnT<String> add(HttpServletRequest request, @RequestBody JobInfo jobInfo) {
+        jobInfo.setUserId(getCurrentUserId(request));
+        return jobService.add(jobInfo);
+    }
+
+    @PostMapping("/update")
+    @ApiOperation("更新任务")
+    public ReturnT<String> update(HttpServletRequest request,@RequestBody JobInfo jobInfo) {
+        jobInfo.setUserId(getCurrentUserId(request));
+        return jobService.update(jobInfo);
+    }
+
+    @PostMapping(value = "/remove/{id}")
+    @ApiOperation("移除任务")
+    public ReturnT<String> remove(@PathVariable(value = "id") int id) {
+        return jobService.remove(id);
+    }
+
+    @RequestMapping(value = "/stop",method = RequestMethod.POST)
+    @ApiOperation("停止任务")
+    public ReturnT<String> pause(int id) {
+        return jobService.stop(id);
+    }
+
+    @RequestMapping(value = "/start",method = RequestMethod.POST)
+    @ApiOperation("开启任务")
+    public ReturnT<String> start(int id) {
+        return jobService.start(id);
+    }
+
+    @PostMapping(value = "/trigger")
+    @ApiOperation("触发任务")
+    public ReturnT<String> triggerJob(@RequestBody TriggerJobDto dto) {
+        // force cover job param
+        String executorParam=dto.getExecutorParam();
+        if (executorParam == null) {
+            executorParam = "";
+        }
+        JobTriggerPoolHelper.trigger(dto.getJobId(), TriggerTypeEnum.MANUAL, -1, null, executorParam);
+        return ReturnT.SUCCESS;
+    }
+
+    @GetMapping("/nextTriggerTime")
+    @ApiOperation("获取近5次触发时间")
+    public ReturnT<List<String>> nextTriggerTime(String cron) {
+        List<String> result = new ArrayList<>();
+        try {
+            CronExpression cronExpression = new CronExpression(cron);
+            Date lastTime = new Date();
+            for (int i = 0; i < 5; i++) {
+                lastTime = cronExpression.getNextValidTimeAfter(lastTime);
+                if (lastTime != null) {
+                    result.add(DateUtil.formatDateTime(lastTime));
+                } else {
+                    break;
+                }
+            }
+        } catch (ParseException e) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
+        }
+        return new ReturnT<>(result);
+    }
+
+    @PostMapping("/batchAdd")
+    @ApiOperation("批量创建任务")
+    public ReturnT<String> batchAdd(@RequestBody DataXBatchJsonBuildDto dto) throws IOException {
+        if (dto.getTemplateId() ==0) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_temp")));
+        }
+        return jobService.batchAdd(dto);
+    }
+}

+ 173 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobLogController.java

@@ -0,0 +1,173 @@
+package com.wugui.datax.admin.controller;
+
+import com.wugui.datatx.core.biz.ExecutorBiz;
+import com.wugui.datatx.core.biz.model.LogResult;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.util.DateUtil;
+import com.wugui.datax.admin.core.kill.KillJob;
+import com.wugui.datax.admin.core.scheduler.JobScheduler;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobInfo;
+import com.wugui.datax.admin.entity.JobLog;
+import com.wugui.datax.admin.mapper.JobInfoMapper;
+import com.wugui.datax.admin.mapper.JobLogMapper;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.web.bind.annotation.*;
+
+import javax.annotation.Resource;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@RestController
+@RequestMapping("/api/log")
+@Api(tags = "任务运行日志接口")
+public class JobLogController {
+    private static Logger logger = LoggerFactory.getLogger(JobLogController.class);
+
+    @Resource
+    public JobInfoMapper jobInfoMapper;
+    @Resource
+    public JobLogMapper jobLogMapper;
+
+    @GetMapping("/pageList")
+    @ApiOperation("运行日志列表")
+    public ReturnT<Map<String, Object>> pageList(
+            @RequestParam(required = false, defaultValue = "0") int current,
+            @RequestParam(required = false, defaultValue = "10") int size,
+            int jobGroup, int jobId, int logStatus, String filterTime) {
+
+        // valid permission
+        //JobInfoController.validPermission(request, jobGroup);	// 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup
+
+        // parse param
+        Date triggerTimeStart = null;
+        Date triggerTimeEnd = null;
+        if (filterTime != null && filterTime.trim().length() > 0) {
+            String[] temp = filterTime.split(" - ");
+            if (temp.length == 2) {
+                triggerTimeStart = DateUtil.parseDateTime(temp[0]);
+                triggerTimeEnd = DateUtil.parseDateTime(temp[1]);
+            }
+        }
+
+        // page query
+        List<JobLog> data = jobLogMapper.pageList((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
+        int cnt = jobLogMapper.pageListCount((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
+
+        // package result
+        Map<String, Object> maps = new HashMap<>();
+        maps.put("recordsTotal", cnt);        // 总记录数
+        maps.put("recordsFiltered", cnt);    // 过滤后的总记录数
+        maps.put("data", data);                    // 分页列表
+        return new ReturnT<>(maps);
+    }
+
+    @RequestMapping(value = "/logDetailCat", method = RequestMethod.GET)
+    @ApiOperation("运行日志详情")
+    public ReturnT<LogResult> logDetailCat(String executorAddress, long triggerTime, long logId, int fromLineNum) {
+        try {
+            ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(executorAddress);
+            ReturnT<LogResult> logResult = executorBiz.log(triggerTime, logId, fromLineNum);
+
+            // is end
+            if (logResult.getContent() != null && fromLineNum > logResult.getContent().getToLineNum()) {
+                JobLog jobLog = jobLogMapper.load(logId);
+                if (jobLog.getHandleCode() > 0) {
+                    logResult.getContent().setEnd(true);
+                }
+            }
+
+            return logResult;
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            return new ReturnT<>(ReturnT.FAIL_CODE, e.getMessage());
+        }
+    }
+
+    @RequestMapping(value = "/logKill", method = RequestMethod.POST)
+    @ApiOperation("kill任务")
+    public ReturnT<String> logKill(int id) {
+        // base check
+        JobLog log = jobLogMapper.load(id);
+        JobInfo jobInfo = jobInfoMapper.loadById(log.getJobId());
+        if (jobInfo == null) {
+            return new ReturnT<>(500, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
+        }
+        if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) {
+            return new ReturnT<>(500, I18nUtil.getString("joblog_kill_log_limit"));
+        }
+
+        // request of kill
+        ReturnT<String> runResult;
+        try {
+            ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(log.getExecutorAddress());
+            runResult = executorBiz.kill(jobInfo.getId());
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            runResult = new ReturnT<>(500, e.getMessage());
+        }
+
+        if (ReturnT.SUCCESS_CODE == runResult.getCode()) {
+            log.setHandleCode(ReturnT.FAIL_CODE);
+            log.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : ""));
+            log.setHandleTime(new Date());
+            jobLogMapper.updateHandleInfo(log);
+            return new ReturnT<>(runResult.getMsg());
+        } else {
+            return new ReturnT<>(500, runResult.getMsg());
+        }
+    }
+
+    @PostMapping("/clearLog")
+    @ApiOperation("清理日志")
+    public ReturnT<String> clearLog(int jobGroup, int jobId, int type) {
+
+        Date clearBeforeTime = null;
+        int clearBeforeNum = 0;
+        if (type == 1) {
+            clearBeforeTime = DateUtil.addMonths(new Date(), -1);    // 清理一个月之前日志数据
+        } else if (type == 2) {
+            clearBeforeTime = DateUtil.addMonths(new Date(), -3);    // 清理三个月之前日志数据
+        } else if (type == 3) {
+            clearBeforeTime = DateUtil.addMonths(new Date(), -6);    // 清理六个月之前日志数据
+        } else if (type == 4) {
+            clearBeforeTime = DateUtil.addYears(new Date(), -1);    // 清理一年之前日志数据
+        } else if (type == 5) {
+            clearBeforeNum = 1000;        // 清理一千条以前日志数据
+        } else if (type == 6) {
+            clearBeforeNum = 10000;        // 清理一万条以前日志数据
+        } else if (type == 7) {
+            clearBeforeNum = 30000;        // 清理三万条以前日志数据
+        } else if (type == 8) {
+            clearBeforeNum = 100000;    // 清理十万条以前日志数据
+        } else if (type == 9) {
+            clearBeforeNum = 0;            // 清理所有日志数据
+        } else {
+            return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_invalid"));
+        }
+
+        List<Long> logIds;
+        do {
+            logIds = jobLogMapper.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000);
+            if (logIds != null && logIds.size() > 0) {
+                jobLogMapper.clearLog(logIds);
+            }
+        } while (logIds != null && logIds.size() > 0);
+
+        return ReturnT.SUCCESS;
+    }
+
+    @ApiOperation("停止该job作业")
+    @PostMapping("/killJob")
+    public ReturnT<String> killJob(@RequestBody JobLog log) {
+        return KillJob.trigger(log.getId(), log.getTriggerTime(), log.getExecutorAddress(), log.getProcessId());
+    }
+}

+ 110 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobProjectController.java

@@ -0,0 +1,110 @@
+package com.wugui.datax.admin.controller;
+
+import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.api.R;
+import com.wugui.datax.admin.entity.JobProject;
+import com.wugui.datax.admin.service.JobProjectService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import javax.servlet.http.HttpServletRequest;
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * project manage controller
+ *
+ * @author jingwk 2020-05-24 16:13:16
+ */
+@RestController
+@RequestMapping("/api/jobProject")
+@Api(tags = "项目管理模块")
+public class JobProjectController extends BaseController {
+
+    @Autowired
+    private JobProjectService jobProjectService;
+
+
+    /**
+     * 分页查询所有数据
+     *
+     * @return 所有数据
+     */
+    @GetMapping
+    @ApiOperation("分页查询所有数据")
+    public R<IPage<JobProject>> selectAll(@RequestParam(value = "searchVal", required = false) String searchVal,
+                                          @RequestParam("pageSize") Integer pageSize,
+                                          @RequestParam("pageNo") Integer pageNo) {
+
+        return success(jobProjectService.getProjectListPaging(pageSize, pageNo, searchVal));
+    }
+
+    /**
+     * Get all project
+     *
+     * @return
+     */
+    @ApiOperation("获取所有数据")
+    @GetMapping("/list")
+    public R<List<JobProject>> selectList() {
+        QueryWrapper<JobProject> query = new QueryWrapper();
+        query.eq("flag", true);
+        return success(jobProjectService.list(query));
+    }
+
+    /**
+     * 通过主键查询单条数据
+     *
+     * @param id 主键
+     * @return 单条数据
+     */
+    @ApiOperation("通过主键查询单条数据")
+    @GetMapping("{id}")
+    public R<JobProject> selectOne(@PathVariable Serializable id) {
+        return success(this.jobProjectService.getById(id));
+    }
+
+    /**
+     * 新增数据
+     *
+     * @param entity 实体对象
+     * @return 新增结果
+     */
+    @ApiOperation("新增数据")
+    @PostMapping
+    public R<Boolean> insert(HttpServletRequest request, @RequestBody JobProject entity) {
+        entity.setUserId(getCurrentUserId(request));
+        return success(this.jobProjectService.save(entity));
+    }
+
+
+    /**
+     * 修改数据
+     *
+     * @param entity 实体对象
+     * @return 修改结果
+     */
+    @PutMapping
+    @ApiOperation("修改数据")
+    public R<Boolean> update(@RequestBody JobProject entity) {
+        JobProject project = jobProjectService.getById(entity.getId());
+        project.setName(entity.getName());
+        project.setDescription(entity.getDescription());
+        return success(this.jobProjectService.updateById(entity));
+    }
+
+    /**
+     * 删除数据
+     *
+     * @param idList 主键结合
+     * @return 删除结果
+     */
+    @DeleteMapping
+    @ApiOperation("删除数据")
+    public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
+        return success(this.jobProjectService.removeByIds(idList));
+    }
+}

+ 90 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobRegistryController.java

@@ -0,0 +1,90 @@
+package com.wugui.datax.admin.controller;
+
+import cn.hutool.core.util.StrUtil;
+import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.api.R;
+import com.wugui.datax.admin.entity.JobRegistry;
+import com.wugui.datax.admin.service.JobRegistryService;
+import com.wugui.datax.admin.util.PageUtils;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.Map;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@RestController
+@RequestMapping("/api/jobRegistry")
+@Api(tags = "执行器资源监控")
+public class JobRegistryController extends BaseController {
+
+	@Autowired
+	private JobRegistryService jobRegistryService;
+
+	/**
+	 * 分页查询所有数据
+	 *
+	 * @return 所有数据
+	 */
+	@GetMapping
+	@ApiOperation("分页查询所有数据")
+	@ApiImplicitParams(
+			{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
+					@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
+					@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
+					@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
+					@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
+			})
+	public R<IPage<JobRegistry>> selectAll() {
+		BaseForm baseForm = new BaseForm();
+		return success(this.jobRegistryService.page(baseForm.getPlusPagingQueryEntity(), pageQueryWrapperCustom(baseForm.getParameters())));
+	}
+
+	/**
+	 * 自定义查询组装
+	 *
+	 * @param map
+	 * @return
+	 */
+	protected QueryWrapper<JobRegistry> pageQueryWrapperCustom(Map<String, Object> map) {
+		// mybatis plus 分页相关的参数
+		Map<String, Object> pageHelperParams = PageUtils.filterPageParams(map);
+		//过滤空值,分页查询相关的参数
+		Map<String, Object> columnQueryMap = PageUtils.filterColumnQueryParams(map);
+
+		QueryWrapper<JobRegistry> queryWrapper = new QueryWrapper<>();
+
+		//排序 操作
+		pageHelperParams.forEach((k, v) -> {
+			switch (k) {
+				case "ascs":
+					queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
+					break;
+				case "descs":
+					queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
+					break;
+			}
+		});
+
+		//遍历进行字段查询条件组装
+		columnQueryMap.forEach((k, v) -> {
+			switch (k) {
+				case "datasourceName":
+					queryWrapper.like(StrUtil.toUnderlineCase(k), v);
+					break;
+				default:
+					queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
+			}
+		});
+
+		return queryWrapper;
+	}
+}

+ 84 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/JobTemplateController.java

@@ -0,0 +1,84 @@
+package com.wugui.datax.admin.controller;
+
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.util.DateUtil;
+import com.wugui.datax.admin.core.cron.CronExpression;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobTemplate;
+import com.wugui.datax.admin.service.JobTemplateService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.web.bind.annotation.*;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * template controller
+ *
+ * @author jingwk 2019-12-22 16:13:16
+ */
+@Api(tags = "任务配置接口")
+@RestController
+@RequestMapping("/api/jobTemplate")
+public class JobTemplateController extends BaseController{
+
+    @Resource
+    private JobTemplateService jobTemplateService;
+
+    @GetMapping("/pageList")
+    @ApiOperation("任务模板列表")
+    public ReturnT<Map<String, Object>> pageList(@RequestParam(required = false, defaultValue = "0") int current,
+                                        @RequestParam(required = false, defaultValue = "10") int size,
+                                        int jobGroup, String jobDesc, String executorHandler, int userId,Integer[] projectIds) {
+
+        return new ReturnT<>(jobTemplateService.pageList((current-1)*size, size, jobGroup, jobDesc, executorHandler, userId, projectIds));
+    }
+
+    @PostMapping("/add")
+    @ApiOperation("添加任务模板")
+    public ReturnT<String> add(HttpServletRequest request, @RequestBody JobTemplate jobTemplate) {
+        jobTemplate.setUserId(getCurrentUserId(request));
+        return jobTemplateService.add(jobTemplate);
+    }
+
+    @PostMapping("/update")
+    @ApiOperation("更新任务")
+    public ReturnT<String> update(HttpServletRequest request,@RequestBody JobTemplate jobTemplate) {
+        jobTemplate.setUserId(getCurrentUserId(request));
+        return jobTemplateService.update(jobTemplate);
+    }
+
+    @PostMapping(value = "/remove/{id}")
+    @ApiOperation("移除任务模板")
+    public ReturnT<String> remove(@PathVariable(value = "id") int id) {
+        return jobTemplateService.remove(id);
+    }
+
+    @GetMapping("/nextTriggerTime")
+    @ApiOperation("获取近5次触发时间")
+    public ReturnT<List<String>> nextTriggerTime(String cron) {
+        List<String> result = new ArrayList<>();
+        try {
+            CronExpression cronExpression = new CronExpression(cron);
+            Date lastTime = new Date();
+            for (int i = 0; i < 5; i++) {
+                lastTime = cronExpression.getNextValidTimeAfter(lastTime);
+                if (lastTime != null) {
+                    result.add(DateUtil.formatDateTime(lastTime));
+                } else {
+                    break;
+                }
+            }
+        } catch (ParseException e) {
+            return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
+        }
+        return new ReturnT<>(result);
+    }
+}

+ 104 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/MetadataController.java

@@ -0,0 +1,104 @@
+package com.wugui.datax.admin.controller;
+
+import com.baomidou.mybatisplus.extension.api.R;
+import com.wugui.datax.admin.service.DatasourceQueryService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.List;
+
+/**
+ * 查询数据库表名,字段的控制器
+ *
+ * @author jingwk
+ * @ClassName MetadataController
+ * @Version 2.1.2
+ * @since 2020/05/31 20:48
+ */
+@RestController
+@RequestMapping("api/metadata")
+@Api(tags = "jdbc数据库查询控制器")
+public class MetadataController extends BaseController {
+
+    @Autowired
+    private DatasourceQueryService datasourceQueryService;
+
+    /**
+     * 根据数据源id获取mongo库名
+     *
+     * @param datasourceId
+     * @return
+     */
+    @GetMapping("/getDBs")
+    @ApiOperation("根据数据源id获取mongo库名")
+    public R<List<String>> getDBs(Long datasourceId) throws IOException {
+        return success(datasourceQueryService.getDBs(datasourceId));
+    }
+
+
+    /**
+     * 根据数据源id,dbname获取CollectionNames
+     *
+     * @param datasourceId
+     * @return
+     */
+    @GetMapping("/collectionNames")
+    @ApiOperation("根据数据源id,dbname获取CollectionNames")
+    public R<List<String>> getCollectionNames(Long datasourceId,String dbName) throws IOException {
+        return success(datasourceQueryService.getCollectionNames(datasourceId,dbName));
+    }
+
+    /**
+     * 获取PG table schema
+     *
+     * @param datasourceId
+     * @return
+     */
+    @GetMapping("/getDBSchema")
+    @ApiOperation("根据数据源id获取 db schema")
+    public R<List<String>> getTableSchema(Long datasourceId) {
+        return success(datasourceQueryService.getTableSchema(datasourceId));
+    }
+
+    /**
+     * 根据数据源id获取可用表名
+     *
+     * @param datasourceId
+     * @return
+     */
+    @GetMapping("/getTables")
+    @ApiOperation("根据数据源id获取可用表名")
+    public R<List<String>> getTableNames(Long datasourceId,String tableSchema) throws IOException {
+        return success(datasourceQueryService.getTables(datasourceId,tableSchema));
+    }
+
+    /**
+     * 根据数据源id和表名获取所有字段
+     *
+     * @param datasourceId 数据源id
+     * @param tableName    表名
+     * @return
+     */
+    @GetMapping("/getColumns")
+    @ApiOperation("根据数据源id和表名获取所有字段")
+    public R<List<String>> getColumns(Long datasourceId, String tableName) throws IOException {
+        return success(datasourceQueryService.getColumns(datasourceId, tableName));
+    }
+
+    /**
+     * 根据数据源id和sql语句获取所有字段
+     *
+     * @param datasourceId 数据源id
+     * @param querySql     表名
+     * @return
+     */
+    @GetMapping("/getColumnsByQuerySql")
+    @ApiOperation("根据数据源id和sql语句获取所有字段")
+    public R<List<String>> getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException {
+        return success(datasourceQueryService.getColumnsByQuerySql(datasourceId, querySql));
+    }
+}

+ 149 - 0
datax-admin/src/main/java/com/wugui/datax/admin/controller/UserController.java

@@ -0,0 +1,149 @@
+package com.wugui.datax.admin.controller;
+
+import cn.hutool.core.util.StrUtil;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobUser;
+import com.wugui.datax.admin.mapper.JobUserMapper;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
+import org.springframework.util.StringUtils;
+import org.springframework.web.bind.annotation.*;
+
+import javax.annotation.Resource;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static com.wugui.datatx.core.biz.model.ReturnT.FAIL_CODE;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@RestController
+@RequestMapping("/api/user")
+@Api(tags = "用户信息接口")
+public class UserController {
+
+    @Resource
+    private JobUserMapper jobUserMapper;
+
+    @Resource
+    private BCryptPasswordEncoder bCryptPasswordEncoder;
+
+
+    @GetMapping("/pageList")
+    @ApiOperation("用户列表")
+    public ReturnT<Map<String, Object>> pageList(@RequestParam(required = false, defaultValue = "1") int current,
+                                                 @RequestParam(required = false, defaultValue = "10") int size,
+                                                 String username) {
+
+        // page list
+        List<JobUser> list = jobUserMapper.pageList((current - 1) * size, size, username);
+        int recordsTotal = jobUserMapper.pageListCount((current - 1) * size, size, username);
+
+        // package result
+        Map<String, Object> maps = new HashMap<>();
+        maps.put("recordsTotal", recordsTotal);        // 总记录数
+        maps.put("recordsFiltered", recordsTotal);    // 过滤后的总记录数
+        maps.put("data", list);                    // 分页列表
+        return new ReturnT<>(maps);
+    }
+
+    @GetMapping("/list")
+    @ApiOperation("用户列表")
+    public ReturnT<List<JobUser>> list(String username) {
+
+        // page list
+        List<JobUser> list = jobUserMapper.findAll(username);
+        return new ReturnT<>(list);
+    }
+
+    @GetMapping("/getUserById")
+    @ApiOperation(value = "根据id获取用户")
+    public ReturnT<JobUser> selectById(@RequestParam("userId") Integer userId) {
+        return new ReturnT<>(jobUserMapper.getUserById(userId));
+    }
+
+    @PostMapping("/add")
+    @ApiOperation("添加用户")
+    public ReturnT<String> add(@RequestBody JobUser jobUser) {
+
+        // valid username
+        if (!StringUtils.hasText(jobUser.getUsername())) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_please_input") + I18nUtil.getString("user_username"));
+        }
+        jobUser.setUsername(jobUser.getUsername().trim());
+        if (!(jobUser.getUsername().length() >= 4 && jobUser.getUsername().length() <= 20)) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
+        }
+        // valid password
+        if (!StringUtils.hasText(jobUser.getPassword())) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_please_input") + I18nUtil.getString("user_password"));
+        }
+        jobUser.setPassword(jobUser.getPassword().trim());
+        if (!(jobUser.getPassword().length() >= 4 && jobUser.getPassword().length() <= 20)) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
+        }
+        jobUser.setPassword(bCryptPasswordEncoder.encode(jobUser.getPassword()));
+
+
+        // check repeat
+        JobUser existUser = jobUserMapper.loadByUserName(jobUser.getUsername());
+        if (existUser != null) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("user_username_repeat"));
+        }
+
+        // write
+        jobUserMapper.save(jobUser);
+        return ReturnT.SUCCESS;
+    }
+
+    @PostMapping(value = "/update")
+    @ApiOperation("更新用户信息")
+    public ReturnT<String> update(@RequestBody JobUser jobUser) {
+        if (StringUtils.hasText(jobUser.getPassword())) {
+            String pwd = jobUser.getPassword().trim();
+            if (StrUtil.isBlank(pwd)) {
+                return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_no_blank") + "密码");
+            }
+
+            if (!(pwd.length() >= 4 && pwd.length() <= 20)) {
+                return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
+            }
+            jobUser.setPassword(bCryptPasswordEncoder.encode(pwd));
+        } else {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_no_blank") + "密码");
+        }
+        // write
+        jobUserMapper.update(jobUser);
+        return ReturnT.SUCCESS;
+    }
+
+    @RequestMapping(value = "/remove", method = RequestMethod.POST)
+    @ApiOperation("删除用户")
+    public ReturnT<String> remove(int id) {
+        int result = jobUserMapper.delete(id);
+        return result != 1 ? ReturnT.FAIL : ReturnT.SUCCESS;
+    }
+
+    @PostMapping(value = "/updatePwd")
+    @ApiOperation("修改密码")
+    public ReturnT<String> updatePwd(@RequestBody JobUser jobUser) {
+        String password = jobUser.getPassword();
+        if (password == null || password.trim().length() == 0) {
+            return new ReturnT<>(ReturnT.FAIL.getCode(), "密码不可为空");
+        }
+        password = password.trim();
+        if (!(password.length() >= 4 && password.length() <= 20)) {
+            return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
+        }
+        // do write
+        JobUser existUser = jobUserMapper.loadByUserName(jobUser.getUsername());
+        existUser.setPassword(bCryptPasswordEncoder.encode(password));
+        jobUserMapper.update(existUser);
+        return ReturnT.SUCCESS;
+    }
+
+}

+ 154 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/conf/JobAdminConfig.java

@@ -0,0 +1,154 @@
+package com.wugui.datax.admin.core.conf;
+
+import com.wugui.datax.admin.core.scheduler.JobScheduler;
+import com.wugui.datax.admin.mapper.*;
+import org.springframework.beans.factory.DisposableBean;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.mail.javamail.JavaMailSender;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import javax.sql.DataSource;
+
+/**
+ * xxl-job config
+ *
+ * @author xuxueli 2017-04-28
+ */
+
+@Component
+public class JobAdminConfig implements InitializingBean, DisposableBean {
+
+    private static JobAdminConfig adminConfig = null;
+
+    public static JobAdminConfig getAdminConfig() {
+        return adminConfig;
+    }
+
+
+    // ---------------------- XxlJobScheduler ----------------------
+
+    private JobScheduler xxlJobScheduler;
+
+    @Override
+    public void afterPropertiesSet() throws Exception {
+        adminConfig = this;
+
+        xxlJobScheduler = new JobScheduler();
+        xxlJobScheduler.init();
+    }
+
+    @Override
+    public void destroy() throws Exception {
+        xxlJobScheduler.destroy();
+    }
+
+
+    // ---------------------- XxlJobScheduler ----------------------
+
+    // conf
+    @Value("${datax.job.i18n}")
+    private String i18n;
+
+    @Value("${datax.job.accessToken}")
+    private String accessToken;
+
+    @Value("${spring.mail.username}")
+    private String emailUserName;
+
+    @Value("${datax.job.triggerpool.fast.max}")
+    private int triggerPoolFastMax;
+
+    @Value("${datax.job.triggerpool.slow.max}")
+    private int triggerPoolSlowMax;
+
+    @Value("${datax.job.logretentiondays}")
+    private int logretentiondays;
+
+    @Value("${datasource.aes.key}")
+    private String dataSourceAESKey;
+
+    // dao, service
+
+    @Resource
+    private JobLogMapper jobLogMapper;
+    @Resource
+    private JobInfoMapper jobInfoMapper;
+    @Resource
+    private JobRegistryMapper jobRegistryMapper;
+    @Resource
+    private JobGroupMapper jobGroupMapper;
+    @Resource
+    private JobLogReportMapper jobLogReportMapper;
+    @Resource
+    private JavaMailSender mailSender;
+    @Resource
+    private DataSource dataSource;
+    @Resource
+    private JobDatasourceMapper jobDatasourceMapper;
+
+    public String getI18n() {
+        return i18n;
+    }
+
+    public String getAccessToken() {
+        return accessToken;
+    }
+
+    public String getEmailUserName() {
+        return emailUserName;
+    }
+
+    public int getTriggerPoolFastMax() {
+        return triggerPoolFastMax < 200 ? 200 : triggerPoolFastMax;
+    }
+
+    public int getTriggerPoolSlowMax() {
+        return triggerPoolSlowMax < 100 ? 100 : triggerPoolSlowMax;
+    }
+
+    public int getLogretentiondays() {
+        return logretentiondays < 7 ? -1 : logretentiondays;
+    }
+
+    public JobLogMapper getJobLogMapper() {
+        return jobLogMapper;
+    }
+
+    public JobInfoMapper getJobInfoMapper() {
+        return jobInfoMapper;
+    }
+
+    public JobRegistryMapper getJobRegistryMapper() {
+        return jobRegistryMapper;
+    }
+
+    public JobGroupMapper getJobGroupMapper() {
+        return jobGroupMapper;
+    }
+
+    public JobLogReportMapper getJobLogReportMapper() {
+        return jobLogReportMapper;
+    }
+
+    public JavaMailSender getMailSender() {
+        return mailSender;
+    }
+
+    public DataSource getDataSource() {
+        return dataSource;
+    }
+
+    public JobDatasourceMapper getJobDatasourceMapper() {
+        return jobDatasourceMapper;
+    }
+
+    public String getDataSourceAESKey() {
+        return dataSourceAESKey;
+    }
+
+    public void setDataSourceAESKey(String dataSourceAESKey) {
+        this.dataSourceAESKey = dataSourceAESKey;
+    }
+}

File diff suppressed because it is too large
+ 1657 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/cron/CronExpression.java


+ 49 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/handler/AESEncryptHandler.java

@@ -0,0 +1,49 @@
+package com.wugui.datax.admin.core.handler;
+
+import com.wugui.datax.admin.util.AESUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.ibatis.type.BaseTypeHandler;
+import org.apache.ibatis.type.JdbcType;
+import org.apache.ibatis.type.MappedTypes;
+
+import java.sql.CallableStatement;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+/**
+ * @author water
+ * @date 20-03-17  下午5:38
+ */
+@MappedTypes({String.class})
+public class AESEncryptHandler extends BaseTypeHandler<String> {
+
+
+    @Override
+    public void setNonNullParameter(PreparedStatement ps, int i, String parameter, JdbcType jdbcType) throws SQLException {
+        if(StringUtils.isNotBlank(parameter)){
+            ps.setString(i, AESUtil.encrypt(parameter));
+        }else{
+            ps.setString(i, null);
+        }
+    }
+
+    @Override
+    public String getNullableResult(ResultSet rs, String columnName) throws SQLException {
+        String columnValue = rs.getString(columnName);
+        return AESUtil.decrypt(columnValue);
+    }
+
+    @Override
+    public String getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
+        String columnValue = rs.getString(columnIndex);
+        return AESUtil.decrypt(columnValue);
+    }
+
+    @Override
+    public String getNullableResult(CallableStatement cs, int columnIndex)
+            throws SQLException {
+        String columnValue = cs.getString(columnIndex);
+        return AESUtil.decrypt(columnValue);
+    }
+}

+ 36 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/handler/MybatisMetaObjectHandler.java

@@ -0,0 +1,36 @@
+package com.wugui.datax.admin.core.handler;
+
+import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.ibatis.reflection.MetaObject;
+import org.springframework.security.core.context.SecurityContext;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.stereotype.Component;
+
+import java.util.Date;
+
+/**
+ * 通用的字段填充,如createBy createDate这些字段的自动填充
+ *
+ * @author huzekang
+ */
+@Component
+@Slf4j
+public class MybatisMetaObjectHandler implements MetaObjectHandler {
+
+    @Override
+    public void insertFill(MetaObject metaObject) {
+        setFieldValByName("createDate", new Date(), metaObject);
+        setFieldValByName("createBy", getCurrentUser(), metaObject);
+    }
+
+    @Override
+    public void updateFill(MetaObject metaObject) {
+        setFieldValByName("updateDate", new Date(), metaObject);
+        setFieldValByName("updateBy", getCurrentUser(), metaObject);
+    }
+
+    private String getCurrentUser() {
+        return SecurityContextHolder.getContext().getAuthentication().getPrincipal().toString();
+    }
+}

+ 40 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/kill/KillJob.java

@@ -0,0 +1,40 @@
+package com.wugui.datax.admin.core.kill;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datatx.core.enums.ExecutorBlockStrategyEnum;
+import com.wugui.datatx.core.glue.GlueTypeEnum;
+import com.wugui.datax.admin.core.trigger.JobTrigger;
+
+import java.util.Date;
+
+/**
+ * datax-job trigger
+ * Created by jingwk on 2019/12/15.
+ */
+public class KillJob {
+
+    /**
+     * @param logId
+     * @param address
+     * @param processId
+     */
+    public static ReturnT<String> trigger(long logId, Date triggerTime, String address, String processId) {
+        ReturnT<String> triggerResult;
+        TriggerParam triggerParam = new TriggerParam();
+        triggerParam.setJobId(-1);
+        triggerParam.setExecutorHandler("killJobHandler");
+        triggerParam.setProcessId(processId);
+        triggerParam.setLogId(logId);
+        triggerParam.setGlueType(GlueTypeEnum.BEAN.getDesc());
+        triggerParam.setExecutorBlockStrategy(ExecutorBlockStrategyEnum.SERIAL_EXECUTION.getTitle());
+        triggerParam.setLogDateTime(triggerTime.getTime());
+        if (address != null) {
+            triggerResult = JobTrigger.runExecutor(triggerParam, address);
+        } else {
+            triggerResult = new ReturnT<>(ReturnT.FAIL_CODE, null);
+        }
+        return triggerResult;
+    }
+
+}

+ 48 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/ExecutorRouteStrategyEnum.java

@@ -0,0 +1,48 @@
+package com.wugui.datax.admin.core.route;
+
+import com.wugui.datax.admin.core.route.strategy.*;
+import com.wugui.datax.admin.core.util.I18nUtil;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public enum ExecutorRouteStrategyEnum {
+
+    FIRST(I18nUtil.getString("jobconf_route_first"), new ExecutorRouteFirst()),
+    LAST(I18nUtil.getString("jobconf_route_last"), new ExecutorRouteLast()),
+    ROUND(I18nUtil.getString("jobconf_route_round"), new ExecutorRouteRound()),
+    RANDOM(I18nUtil.getString("jobconf_route_random"), new ExecutorRouteRandom()),
+    CONSISTENT_HASH(I18nUtil.getString("jobconf_route_consistenthash"), new ExecutorRouteConsistentHash()),
+    LEAST_FREQUENTLY_USED(I18nUtil.getString("jobconf_route_lfu"), new ExecutorRouteLFU()),
+    LEAST_RECENTLY_USED(I18nUtil.getString("jobconf_route_lru"), new ExecutorRouteLRU()),
+    FAILOVER(I18nUtil.getString("jobconf_route_failover"), new ExecutorRouteFailover()),
+    BUSYOVER(I18nUtil.getString("jobconf_route_busyover"), new ExecutorRouteBusyover()),
+    SHARDING_BROADCAST(I18nUtil.getString("jobconf_route_shard"), null);
+
+    ExecutorRouteStrategyEnum(String title, ExecutorRouter router) {
+        this.title = title;
+        this.router = router;
+    }
+
+    private String title;
+    private ExecutorRouter router;
+
+    public String getTitle() {
+        return title;
+    }
+    public ExecutorRouter getRouter() {
+        return router;
+    }
+
+    public static ExecutorRouteStrategyEnum match(String name, ExecutorRouteStrategyEnum defaultItem){
+        if (name != null) {
+            for (ExecutorRouteStrategyEnum item: ExecutorRouteStrategyEnum.values()) {
+                if (item.name().equals(name)) {
+                    return item;
+                }
+            }
+        }
+        return defaultItem;
+    }
+
+}

+ 24 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/ExecutorRouter.java

@@ -0,0 +1,24 @@
+package com.wugui.datax.admin.core.route;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public abstract class ExecutorRouter {
+    protected static Logger logger = LoggerFactory.getLogger(ExecutorRouter.class);
+
+    /**
+     * route address
+     *
+     * @param addressList
+     * @return  ReturnT.content=address
+     */
+    public abstract ReturnT<String> route(TriggerParam triggerParam, List<String> addressList);
+
+}

+ 47 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteBusyover.java

@@ -0,0 +1,47 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.ExecutorBiz;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+import com.wugui.datax.admin.core.scheduler.JobScheduler;
+import com.wugui.datax.admin.core.util.I18nUtil;
+
+import java.util.List;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteBusyover extends ExecutorRouter {
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        StringBuffer idleBeatResultSB = new StringBuffer();
+        for (String address : addressList) {
+            // beat
+            ReturnT<String> idleBeatResult = null;
+            try {
+                ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
+                idleBeatResult = executorBiz.idleBeat(triggerParam.getJobId());
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+                idleBeatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
+            }
+            idleBeatResultSB.append( (idleBeatResultSB.length()>0)?"<br><br>":"")
+                    .append(I18nUtil.getString("jobconf_idleBeat") + ":")
+                    .append("<br>address:").append(address)
+                    .append("<br>code:").append(idleBeatResult.getCode())
+                    .append("<br>msg:").append(idleBeatResult.getMsg());
+
+            // beat success
+            if (idleBeatResult.getCode() == ReturnT.SUCCESS_CODE) {
+                idleBeatResult.setMsg(idleBeatResultSB.toString());
+                idleBeatResult.setContent(address);
+                return idleBeatResult;
+            }
+        }
+
+        return new ReturnT<String>(ReturnT.FAIL_CODE, idleBeatResultSB.toString());
+    }
+
+}

+ 85 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteConsistentHash.java

@@ -0,0 +1,85 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.io.UnsupportedEncodingException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+/**
+ * 分组下机器地址相同,不同JOB均匀散列在不同机器上,保证分组下机器分配JOB平均;且每个JOB固定调度其中一台机器;
+ *      a、virtual node:解决不均衡问题
+ *      b、hash method replace hashCode:String的hashCode可能重复,需要进一步扩大hashCode的取值范围
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteConsistentHash extends ExecutorRouter {
+
+    private static int VIRTUAL_NODE_NUM = 100;
+
+    /**
+     * get hash code on 2^32 ring (md5散列的方式计算hash值)
+     * @param key
+     * @return
+     */
+    private static long hash(String key) {
+
+        // md5 byte
+        MessageDigest md5;
+        try {
+            md5 = MessageDigest.getInstance("MD5");
+        } catch (NoSuchAlgorithmException e) {
+            throw new RuntimeException("MD5 not supported", e);
+        }
+        md5.reset();
+        byte[] keyBytes = null;
+        try {
+            keyBytes = key.getBytes("UTF-8");
+        } catch (UnsupportedEncodingException e) {
+            throw new RuntimeException("Unknown string :" + key, e);
+        }
+
+        md5.update(keyBytes);
+        byte[] digest = md5.digest();
+
+        // hash code, Truncate to 32-bits
+        long hashCode = ((long) (digest[3] & 0xFF) << 24)
+                | ((long) (digest[2] & 0xFF) << 16)
+                | ((long) (digest[1] & 0xFF) << 8)
+                | (digest[0] & 0xFF);
+
+        long truncateHashCode = hashCode & 0xffffffffL;
+        return truncateHashCode;
+    }
+
+    public String hashJob(int jobId, List<String> addressList) {
+
+        // ------A1------A2-------A3------
+        // -----------J1------------------
+        TreeMap<Long, String> addressRing = new TreeMap<Long, String>();
+        for (String address: addressList) {
+            for (int i = 0; i < VIRTUAL_NODE_NUM; i++) {
+                long addressHash = hash("SHARD-" + address + "-NODE-" + i);
+                addressRing.put(addressHash, address);
+            }
+        }
+
+        long jobHash = hash(String.valueOf(jobId));
+        SortedMap<Long, String> lastRing = addressRing.tailMap(jobHash);
+        if (!lastRing.isEmpty()) {
+            return lastRing.get(lastRing.firstKey());
+        }
+        return addressRing.firstEntry().getValue();
+    }
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        String address = hashJob(triggerParam.getJobId(), addressList);
+        return new ReturnT<String>(address);
+    }
+
+}

+ 48 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteFailover.java

@@ -0,0 +1,48 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.ExecutorBiz;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+import com.wugui.datax.admin.core.scheduler.JobScheduler;
+import com.wugui.datax.admin.core.util.I18nUtil;
+
+import java.util.List;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteFailover extends ExecutorRouter {
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+
+        StringBuffer beatResultSB = new StringBuffer();
+        for (String address : addressList) {
+            // beat
+            ReturnT<String> beatResult = null;
+            try {
+                ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
+                beatResult = executorBiz.beat();
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+                beatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
+            }
+            beatResultSB.append( (beatResultSB.length()>0)?"<br><br>":"")
+                    .append(I18nUtil.getString("jobconf_beat") + ":")
+                    .append("<br>address:").append(address)
+                    .append("<br>code:").append(beatResult.getCode())
+                    .append("<br>msg:").append(beatResult.getMsg());
+
+            // beat success
+            if (beatResult.getCode() == ReturnT.SUCCESS_CODE) {
+
+                beatResult.setMsg(beatResultSB.toString());
+                beatResult.setContent(address);
+                return beatResult;
+            }
+        }
+        return new ReturnT<String>(ReturnT.FAIL_CODE, beatResultSB.toString());
+
+    }
+}

+ 19 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteFirst.java

@@ -0,0 +1,19 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.util.List;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteFirst extends ExecutorRouter {
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList){
+        return new ReturnT<String>(addressList.get(0));
+    }
+
+}

+ 79 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteLFU.java

@@ -0,0 +1,79 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * 单个JOB对应的每个执行器,使用频率最低的优先被选举
+ *      a(*)、LFU(Least Frequently Used):最不经常使用,频率/次数
+ *      b、LRU(Least Recently Used):最近最久未使用,时间
+ *
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteLFU extends ExecutorRouter {
+
+    private static ConcurrentMap<Integer, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<Integer, HashMap<String, Integer>>();
+    private static long CACHE_VALID_TIME = 0;
+
+    public String route(int jobId, List<String> addressList) {
+
+        // cache clear
+        if (System.currentTimeMillis() > CACHE_VALID_TIME) {
+            jobLfuMap.clear();
+            CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
+        }
+
+        // lfu item init
+        HashMap<String, Integer> lfuItemMap = jobLfuMap.get(jobId);     // Key排序可以用TreeMap+构造入参Compare;Value排序暂时只能通过ArrayList;
+        if (lfuItemMap == null) {
+            lfuItemMap = new HashMap<String, Integer>();
+            jobLfuMap.putIfAbsent(jobId, lfuItemMap);   // 避免重复覆盖
+        }
+
+        // put new
+        for (String address: addressList) {
+            if (!lfuItemMap.containsKey(address) || lfuItemMap.get(address) >1000000 ) {
+                lfuItemMap.put(address, new Random().nextInt(addressList.size()));  // 初始化时主动Random一次,缓解首次压力
+            }
+        }
+        // remove old
+        List<String> delKeys = new ArrayList<>();
+        for (String existKey: lfuItemMap.keySet()) {
+            if (!addressList.contains(existKey)) {
+                delKeys.add(existKey);
+            }
+        }
+        if (delKeys.size() > 0) {
+            for (String delKey: delKeys) {
+                lfuItemMap.remove(delKey);
+            }
+        }
+
+        // load least userd count address
+        List<Map.Entry<String, Integer>> lfuItemList = new ArrayList<Map.Entry<String, Integer>>(lfuItemMap.entrySet());
+        Collections.sort(lfuItemList, new Comparator<Map.Entry<String, Integer>>() {
+            @Override
+            public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
+                return o1.getValue().compareTo(o2.getValue());
+            }
+        });
+
+        Map.Entry<String, Integer> addressItem = lfuItemList.get(0);
+        String minAddress = addressItem.getKey();
+        addressItem.setValue(addressItem.getValue() + 1);
+
+        return addressItem.getKey();
+    }
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        String address = route(triggerParam.getJobId(), addressList);
+        return new ReturnT<String>(address);
+    }
+
+}

+ 76 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteLRU.java

@@ -0,0 +1,76 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * 单个JOB对应的每个执行器,最久为使用的优先被选举
+ *      a、LFU(Least Frequently Used):最不经常使用,频率/次数
+ *      b(*)、LRU(Least Recently Used):最近最久未使用,时间
+ *
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteLRU extends ExecutorRouter {
+
+    private static ConcurrentMap<Integer, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<Integer, LinkedHashMap<String, String>>();
+    private static long CACHE_VALID_TIME = 0;
+
+    public String route(int jobId, List<String> addressList) {
+
+        // cache clear
+        if (System.currentTimeMillis() > CACHE_VALID_TIME) {
+            jobLRUMap.clear();
+            CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
+        }
+
+        // init lru
+        LinkedHashMap<String, String> lruItem = jobLRUMap.get(jobId);
+        if (lruItem == null) {
+            /**
+             * LinkedHashMap
+             *      a、accessOrder:true=访问顺序排序(get/put时排序);false=插入顺序排期;
+             *      b、removeEldestEntry:新增元素时将会调用,返回true时会删除最老元素;可封装LinkedHashMap并重写该方法,比如定义最大容量,超出是返回true即可实现固定长度的LRU算法;
+             */
+            lruItem = new LinkedHashMap<String, String>(16, 0.75f, true);
+            jobLRUMap.putIfAbsent(jobId, lruItem);
+        }
+
+        // put new
+        for (String address: addressList) {
+            if (!lruItem.containsKey(address)) {
+                lruItem.put(address, address);
+            }
+        }
+        // remove old
+        List<String> delKeys = new ArrayList<>();
+        for (String existKey: lruItem.keySet()) {
+            if (!addressList.contains(existKey)) {
+                delKeys.add(existKey);
+            }
+        }
+        if (delKeys.size() > 0) {
+            for (String delKey: delKeys) {
+                lruItem.remove(delKey);
+            }
+        }
+
+        // load
+        String eldestKey = lruItem.entrySet().iterator().next().getKey();
+        String eldestValue = lruItem.get(eldestKey);
+        return eldestValue;
+    }
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        String address = route(triggerParam.getJobId(), addressList);
+        return new ReturnT<String>(address);
+    }
+
+}

+ 19 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteLast.java

@@ -0,0 +1,19 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.util.List;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteLast extends ExecutorRouter {
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        return new ReturnT<String>(addressList.get(addressList.size()-1));
+    }
+
+}

+ 23 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteRandom.java

@@ -0,0 +1,23 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.util.List;
+import java.util.Random;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteRandom extends ExecutorRouter {
+
+    private static Random localRandom = new Random();
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        String address = addressList.get(localRandom.nextInt(addressList.size()));
+        return new ReturnT<String>(address);
+    }
+
+}

+ 39 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/route/strategy/ExecutorRouteRound.java

@@ -0,0 +1,39 @@
+package com.wugui.datax.admin.core.route.strategy;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datax.admin.core.route.ExecutorRouter;
+
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * Created by xuxueli on 17/3/10.
+ */
+public class ExecutorRouteRound extends ExecutorRouter {
+
+    private static ConcurrentMap<Integer, Integer> routeCountEachJob = new ConcurrentHashMap<Integer, Integer>();
+    private static long CACHE_VALID_TIME = 0;
+    private static int count(int jobId) {
+        // cache clear
+        if (System.currentTimeMillis() > CACHE_VALID_TIME) {
+            routeCountEachJob.clear();
+            CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
+        }
+
+        // count++
+        Integer count = routeCountEachJob.get(jobId);
+        count = (count==null || count>1000000)?(new Random().nextInt(100)):++count;  // 初始化时主动Random一次,缓解首次压力
+        routeCountEachJob.put(jobId, count);
+        return count;
+    }
+
+    @Override
+    public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
+        String address = addressList.get(count(triggerParam.getJobId())%addressList.size());
+        return new ReturnT<String>(address);
+    }
+
+}

+ 113 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/scheduler/JobScheduler.java

@@ -0,0 +1,113 @@
+package com.wugui.datax.admin.core.scheduler;
+
+import com.wugui.datatx.core.biz.ExecutorBiz;
+import com.wugui.datatx.core.enums.ExecutorBlockStrategyEnum;
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.core.thread.*;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.rpc.remoting.invoker.call.CallType;
+import com.wugui.datax.rpc.remoting.invoker.reference.XxlRpcReferenceBean;
+import com.wugui.datax.rpc.remoting.invoker.route.LoadBalance;
+import com.wugui.datax.rpc.remoting.net.impl.netty_http.client.NettyHttpClient;
+import com.wugui.datax.rpc.serialize.impl.HessianSerializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * @author xuxueli 2018-10-28 00:18:17
+ */
+
+public class JobScheduler {
+    private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class);
+
+
+    public void init() throws Exception {
+        // init i18n
+        initI18n();
+
+        // admin registry monitor run
+        JobRegistryMonitorHelper.getInstance().start();
+
+        // admin monitor run
+        JobFailMonitorHelper.getInstance().start();
+
+        // admin trigger pool start
+        JobTriggerPoolHelper.toStart();
+
+        // admin log report start
+        JobLogReportHelper.getInstance().start();
+
+        // start-schedule
+        JobScheduleHelper.getInstance().start();
+
+        logger.info(">>>>>>>>> init datax-web admin success.");
+    }
+
+
+    public void destroy() throws Exception {
+
+        // stop-schedule
+        JobScheduleHelper.getInstance().toStop();
+
+        // admin log report stop
+        JobLogReportHelper.getInstance().toStop();
+
+        // admin trigger pool stop
+        JobTriggerPoolHelper.toStop();
+
+        // admin monitor stop
+        JobFailMonitorHelper.getInstance().toStop();
+
+        // admin registry stop
+        JobRegistryMonitorHelper.getInstance().toStop();
+
+    }
+
+    // ---------------------- I18n ----------------------
+
+    private void initI18n() {
+        for (ExecutorBlockStrategyEnum item : ExecutorBlockStrategyEnum.values()) {
+            item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name())));
+        }
+    }
+
+    // ---------------------- executor-client ----------------------
+    private static ConcurrentMap<String, ExecutorBiz> executorBizRepository = new ConcurrentHashMap<>();
+
+    public static ExecutorBiz getExecutorBiz(String address) throws Exception {
+        // valid
+        if (address == null || address.trim().length() == 0) {
+            return null;
+        }
+
+        // load-cache
+        address = address.trim();
+        ExecutorBiz executorBiz = executorBizRepository.get(address);
+        if (executorBiz != null) {
+            return executorBiz;
+        }
+
+        // set-cache
+        XxlRpcReferenceBean referenceBean = new XxlRpcReferenceBean();
+        referenceBean.setClient(NettyHttpClient.class);
+        referenceBean.setSerializer(HessianSerializer.class);
+        referenceBean.setCallType(CallType.SYNC);
+        referenceBean.setLoadBalance(LoadBalance.ROUND);
+        referenceBean.setIface(ExecutorBiz.class);
+        referenceBean.setVersion(null);
+        referenceBean.setTimeout(3000);
+        referenceBean.setAddress(address);
+        referenceBean.setAccessToken(JobAdminConfig.getAdminConfig().getAccessToken());
+        referenceBean.setInvokeCallback(null);
+        referenceBean.setInvokerFactory(null);
+
+        executorBiz = (ExecutorBiz) referenceBean.getObject();
+
+        executorBizRepository.put(address, executorBiz);
+        return executorBiz;
+    }
+
+}

+ 209 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobFailMonitorHelper.java

@@ -0,0 +1,209 @@
+package com.wugui.datax.admin.core.thread;
+
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.core.trigger.TriggerTypeEnum;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobGroup;
+import com.wugui.datax.admin.entity.JobInfo;
+import com.wugui.datax.admin.entity.JobLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.mail.javamail.MimeMessageHelper;
+
+import javax.mail.internet.MimeMessage;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * job monitor instance
+ *
+ * @author xuxueli 2015-9-1 18:05:56
+ */
+public class JobFailMonitorHelper {
+	private static Logger logger = LoggerFactory.getLogger(JobFailMonitorHelper.class);
+
+	private static JobFailMonitorHelper instance = new JobFailMonitorHelper();
+	public static JobFailMonitorHelper getInstance(){
+		return instance;
+	}
+
+	// ---------------------- monitor ----------------------
+
+	private Thread monitorThread;
+	private volatile boolean toStop = false;
+	public void start(){
+		monitorThread = new Thread(new Runnable() {
+
+			@Override
+			public void run() {
+
+				// monitor
+				while (!toStop) {
+					try {
+
+						List<Long> failLogIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findFailJobLogIds(1000);
+						if (failLogIds!=null && !failLogIds.isEmpty()) {
+							for (long failLogId: failLogIds) {
+
+								// lock log
+								int lockRet = JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, 0, -1);
+								if (lockRet < 1) {
+									continue;
+								}
+								JobLog log = JobAdminConfig.getAdminConfig().getJobLogMapper().load(failLogId);
+								JobInfo info = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(log.getJobId());
+
+								// 1、fail retry monitor
+								if (log.getExecutorFailRetryCount() > 0) {
+									JobTriggerPoolHelper.trigger(log.getJobId(), TriggerTypeEnum.RETRY, (log.getExecutorFailRetryCount()-1), log.getExecutorShardingParam(), log.getExecutorParam());
+									String retryMsg = "<br><br><span style=\"color:#F39C12;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_type_retry") +"<<<<<<<<<<< </span><br>";
+									log.setTriggerMsg(log.getTriggerMsg() + retryMsg);
+									JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(log);
+								}
+
+								// 2、fail alarm monitor
+								int newAlarmStatus = 0;		// 告警状态:0-默认、-1=锁定状态、1-无需告警、2-告警成功、3-告警失败
+								if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
+									boolean alarmResult = true;
+									try {
+										alarmResult = failAlarm(info, log);
+									} catch (Exception e) {
+										alarmResult = false;
+										logger.error(e.getMessage(), e);
+									}
+									newAlarmStatus = alarmResult?2:3;
+								} else {
+									newAlarmStatus = 1;
+								}
+
+								JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, -1, newAlarmStatus);
+							}
+						}
+
+					} catch (Exception e) {
+						if (!toStop) {
+							logger.error(">>>>>>>>>>> datax-web, job fail monitor thread error:{0}", e);
+						}
+					}
+
+                    try {
+                        TimeUnit.SECONDS.sleep(10);
+                    } catch (Exception e) {
+                        if (!toStop) {
+                            logger.error(e.getMessage(), e);
+                        }
+                    }
+
+                }
+
+				logger.info(">>>>>>>>>>> datax-web, job fail monitor thread stop");
+
+			}
+		});
+		monitorThread.setDaemon(true);
+		monitorThread.setName("datax-web, admin JobFailMonitorHelper");
+		monitorThread.start();
+	}
+
+	public void toStop(){
+		toStop = true;
+		// interrupt and wait
+		monitorThread.interrupt();
+		try {
+			monitorThread.join();
+		} catch (InterruptedException e) {
+			logger.error(e.getMessage(), e);
+		}
+	}
+
+
+	// ---------------------- alarm ----------------------
+
+	// email alarm template
+	private static final String mailBodyTemplate = "<h5>" + I18nUtil.getString("jobconf_monitor_detail") + ":</span>" +
+			"<table border=\"1\" cellpadding=\"3\" style=\"border-collapse:collapse; width:80%;\" >\n" +
+			"   <thead style=\"font-weight: bold;color: #ffffff;background-color: #ff8c00;\" >" +
+			"      <tr>\n" +
+			"         <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobgroup") +"</td>\n" +
+			"         <td width=\"10%\" >"+ I18nUtil.getString("jobinfo_field_id") +"</td>\n" +
+			"         <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobdesc") +"</td>\n" +
+			"         <td width=\"10%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_title") +"</td>\n" +
+			"         <td width=\"40%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_content") +"</td>\n" +
+			"      </tr>\n" +
+			"   </thead>\n" +
+			"   <tbody>\n" +
+			"      <tr>\n" +
+			"         <td>{0}</td>\n" +
+			"         <td>{1}</td>\n" +
+			"         <td>{2}</td>\n" +
+			"         <td>"+ I18nUtil.getString("jobconf_monitor_alarm_type") +"</td>\n" +
+			"         <td>{3}</td>\n" +
+			"      </tr>\n" +
+			"   </tbody>\n" +
+			"</table>";
+
+	/**
+	 * fail alarm
+	 *
+	 * @param jobLog
+	 */
+	private boolean failAlarm(JobInfo info, JobLog jobLog){
+		boolean alarmResult = true;
+
+		// send monitor email
+		if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
+
+			// alarmContent
+			String alarmContent = "Alarm Job LogId=" + jobLog.getId();
+			if (jobLog.getTriggerCode() != ReturnT.SUCCESS_CODE) {
+				alarmContent += "<br>TriggerMsg=<br>" + jobLog.getTriggerMsg();
+			}
+			if (jobLog.getHandleCode()>0 && jobLog.getHandleCode() != ReturnT.SUCCESS_CODE) {
+				alarmContent += "<br>HandleCode=" + jobLog.getHandleMsg();
+			}
+
+			// email info
+			JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(Integer.valueOf(info.getJobGroup()));
+			String personal = I18nUtil.getString("admin_name_full");
+			String title = I18nUtil.getString("jobconf_monitor");
+			String content = MessageFormat.format(mailBodyTemplate,
+					group!=null?group.getTitle():"null",
+					info.getId(),
+					info.getJobDesc(),
+					alarmContent);
+
+			Set<String> emailSet = new HashSet<String>(Arrays.asList(info.getAlarmEmail().split(",")));
+			for (String email: emailSet) {
+
+				// make mail
+				try {
+					MimeMessage mimeMessage = JobAdminConfig.getAdminConfig().getMailSender().createMimeMessage();
+
+					MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true);
+					helper.setFrom(JobAdminConfig.getAdminConfig().getEmailUserName(), personal);
+					helper.setTo(email);
+					helper.setSubject(title);
+					helper.setText(content, true);
+
+					JobAdminConfig.getAdminConfig().getMailSender().send(mimeMessage);
+				} catch (Exception e) {
+					logger.error(">>>>>>>>>>> datax-web, job fail alarm email send error, JobLogId:{}", jobLog.getId(), e);
+
+					alarmResult = false;
+				}
+
+			}
+		}
+
+		// do something, custom alarm strategy, such as sms
+
+
+		return alarmResult;
+	}
+
+}

+ 152 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobLogReportHelper.java

@@ -0,0 +1,152 @@
+package com.wugui.datax.admin.core.thread;
+
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.entity.JobLogReport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * job log report helper
+ *
+ * @author xuxueli 2019-11-22
+ */
+public class JobLogReportHelper {
+    private static Logger logger = LoggerFactory.getLogger(JobLogReportHelper.class);
+
+    private static JobLogReportHelper instance = new JobLogReportHelper();
+    public static JobLogReportHelper getInstance(){
+        return instance;
+    }
+
+
+    private Thread logrThread;
+    private volatile boolean toStop = false;
+    public void start(){
+        logrThread = new Thread(new Runnable() {
+
+            @Override
+            public void run() {
+
+                // last clean log time
+                long lastCleanLogTime = 0;
+
+
+                while (!toStop) {
+
+                    // 1、log-report refresh: refresh log report in 3 days
+                    try {
+
+                        for (int i = 0; i < 3; i++) {
+
+                            // today
+                            Calendar itemDay = Calendar.getInstance();
+                            itemDay.add(Calendar.DAY_OF_MONTH, -i);
+                            itemDay.set(Calendar.HOUR_OF_DAY, 0);
+                            itemDay.set(Calendar.MINUTE, 0);
+                            itemDay.set(Calendar.SECOND, 0);
+                            itemDay.set(Calendar.MILLISECOND, 0);
+
+                            Date todayFrom = itemDay.getTime();
+
+                            itemDay.set(Calendar.HOUR_OF_DAY, 23);
+                            itemDay.set(Calendar.MINUTE, 59);
+                            itemDay.set(Calendar.SECOND, 59);
+                            itemDay.set(Calendar.MILLISECOND, 999);
+
+                            Date todayTo = itemDay.getTime();
+
+                            // refresh log-report every minute
+                            JobLogReport xxlJobLogReport = new JobLogReport();
+                            xxlJobLogReport.setTriggerDay(todayFrom);
+                            xxlJobLogReport.setRunningCount(0);
+                            xxlJobLogReport.setSucCount(0);
+                            xxlJobLogReport.setFailCount(0);
+
+                            Map<String, Object> triggerCountMap = JobAdminConfig.getAdminConfig().getJobLogMapper().findLogReport(todayFrom, todayTo);
+                            if (triggerCountMap!=null && triggerCountMap.size()>0) {
+                                int triggerDayCount = triggerCountMap.containsKey("triggerDayCount")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCount"))):0;
+                                int triggerDayCountRunning = triggerCountMap.containsKey("triggerDayCountRunning")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountRunning"))):0;
+                                int triggerDayCountSuc = triggerCountMap.containsKey("triggerDayCountSuc")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountSuc"))):0;
+                                int triggerDayCountFail = triggerDayCount - triggerDayCountRunning - triggerDayCountSuc;
+
+                                xxlJobLogReport.setRunningCount(triggerDayCountRunning);
+                                xxlJobLogReport.setSucCount(triggerDayCountSuc);
+                                xxlJobLogReport.setFailCount(triggerDayCountFail);
+                            }
+
+                            // do refresh
+                            int ret = JobAdminConfig.getAdminConfig().getJobLogReportMapper().update(xxlJobLogReport);
+                            if (ret < 1) {
+                                JobAdminConfig.getAdminConfig().getJobLogReportMapper().save(xxlJobLogReport);
+                            }
+                        }
+
+                    } catch (Exception e) {
+                        if (!toStop) {
+                            logger.error(">>>>>>>>>>> datax-web, job log report thread error:{}", e);
+                        }
+                    }
+
+                    // 2、log-clean: switch open & once each day
+                    if (JobAdminConfig.getAdminConfig().getLogretentiondays()>0
+                            && System.currentTimeMillis() - lastCleanLogTime > 24*60*60*1000) {
+
+                        // expire-time
+                        Calendar expiredDay = Calendar.getInstance();
+                        expiredDay.add(Calendar.DAY_OF_MONTH, -1 * JobAdminConfig.getAdminConfig().getLogretentiondays());
+                        expiredDay.set(Calendar.HOUR_OF_DAY, 0);
+                        expiredDay.set(Calendar.MINUTE, 0);
+                        expiredDay.set(Calendar.SECOND, 0);
+                        expiredDay.set(Calendar.MILLISECOND, 0);
+                        Date clearBeforeTime = expiredDay.getTime();
+
+                        // clean expired log
+                        List<Long> logIds = null;
+                        do {
+                            logIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findClearLogIds(0, 0, clearBeforeTime, 0, 1000);
+                            if (logIds!=null && logIds.size()>0) {
+                                JobAdminConfig.getAdminConfig().getJobLogMapper().clearLog(logIds);
+                            }
+                        } while (logIds!=null && logIds.size()>0);
+
+                        // update clean time
+                        lastCleanLogTime = System.currentTimeMillis();
+                    }
+
+                    try {
+                        TimeUnit.MINUTES.sleep(1);
+                    } catch (Exception e) {
+                        if (!toStop) {
+                            logger.error(e.getMessage(), e);
+                        }
+                    }
+
+                }
+
+                logger.info(">>>>>>>>>>> datax-web, job log report thread stop");
+
+            }
+        });
+        logrThread.setDaemon(true);
+        logrThread.setName("datax-web, admin JobLogReportHelper");
+        logrThread.start();
+    }
+
+    public void toStop(){
+        toStop = true;
+        // interrupt and wait
+        logrThread.interrupt();
+        try {
+            logrThread.join();
+        } catch (InterruptedException e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+}

+ 108 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobRegistryMonitorHelper.java

@@ -0,0 +1,108 @@
+package com.wugui.datax.admin.core.thread;
+
+import com.wugui.datatx.core.enums.RegistryConfig;
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.entity.JobGroup;
+import com.wugui.datax.admin.entity.JobRegistry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * job registry instance
+ * @author xuxueli 2016-10-02 19:10:24
+ */
+public class JobRegistryMonitorHelper {
+	private static Logger logger = LoggerFactory.getLogger(JobRegistryMonitorHelper.class);
+
+	private static JobRegistryMonitorHelper instance = new JobRegistryMonitorHelper();
+	public static JobRegistryMonitorHelper getInstance(){
+		return instance;
+	}
+
+	private Thread registryThread;
+	private volatile boolean toStop = false;
+	public void start(){
+		registryThread = new Thread(() -> {
+			while (!toStop) {
+				try {
+					// auto registry group
+					List<JobGroup> groupList = JobAdminConfig.getAdminConfig().getJobGroupMapper().findByAddressType(0);
+					if (groupList!=null && !groupList.isEmpty()) {
+
+						// remove dead address (admin/executor)
+						List<Integer> ids = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findDead(RegistryConfig.DEAD_TIMEOUT, new Date());
+						if (ids!=null && ids.size()>0) {
+							JobAdminConfig.getAdminConfig().getJobRegistryMapper().removeDead(ids);
+						}
+
+						// fresh online address (admin/executor)
+						HashMap<String, List<String>> appAddressMap = new HashMap<>();
+						List<JobRegistry> list = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
+						if (list != null) {
+							for (JobRegistry item: list) {
+								if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
+									String appName = item.getRegistryKey();
+									List<String> registryList = appAddressMap.get(appName);
+									if (registryList == null) {
+										registryList = new ArrayList<>();
+									}
+
+									if (!registryList.contains(item.getRegistryValue())) {
+										registryList.add(item.getRegistryValue());
+									}
+									appAddressMap.put(appName, registryList);
+								}
+							}
+						}
+
+						// fresh group address
+						for (JobGroup group: groupList) {
+							List<String> registryList = appAddressMap.get(group.getAppName());
+							String addressListStr = null;
+							if (registryList!=null && !registryList.isEmpty()) {
+								Collections.sort(registryList);
+								addressListStr = "";
+								for (String item:registryList) {
+									addressListStr += item + ",";
+								}
+								addressListStr = addressListStr.substring(0, addressListStr.length()-1);
+							}
+							group.setAddressList(addressListStr);
+							JobAdminConfig.getAdminConfig().getJobGroupMapper().update(group);
+						}
+					}
+				} catch (Exception e) {
+					if (!toStop) {
+						logger.error(">>>>>>>>>>> datax-web, job registry monitor thread error:{}", e);
+					}
+				}
+				try {
+					TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
+				} catch (InterruptedException e) {
+					if (!toStop) {
+						logger.error(">>>>>>>>>>> datax-web, job registry monitor thread error:{}", e);
+					}
+				}
+			}
+			logger.info(">>>>>>>>>>> datax-web, job registry monitor thread stop");
+		});
+		registryThread.setDaemon(true);
+		registryThread.setName("datax-web, admin JobRegistryMonitorHelper");
+		registryThread.start();
+	}
+
+	public void toStop(){
+		toStop = true;
+		// interrupt and wait
+		registryThread.interrupt();
+		try {
+			registryThread.join();
+		} catch (InterruptedException e) {
+			logger.error(e.getMessage(), e);
+		}
+	}
+
+}

+ 352 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobScheduleHelper.java

@@ -0,0 +1,352 @@
+package com.wugui.datax.admin.core.thread;
+
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.core.cron.CronExpression;
+import com.wugui.datax.admin.core.trigger.TriggerTypeEnum;
+import com.wugui.datax.admin.entity.JobInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * @author xuxueli 2019-05-21
+ */
+public class JobScheduleHelper {
+    private static Logger logger = LoggerFactory.getLogger(JobScheduleHelper.class);
+
+    private static JobScheduleHelper instance = new JobScheduleHelper();
+
+    public static JobScheduleHelper getInstance() {
+        return instance;
+    }
+
+    public static final long PRE_READ_MS = 5000;    // pre read
+
+    private Thread scheduleThread;
+    private Thread ringThread;
+    private volatile boolean scheduleThreadToStop = false;
+    private volatile boolean ringThreadToStop = false;
+    private volatile static Map<Integer, List<Integer>> ringData = new ConcurrentHashMap<>();
+
+    public void start() {
+
+        // schedule thread
+        scheduleThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+
+                try {
+                    TimeUnit.MILLISECONDS.sleep(5000 - System.currentTimeMillis() % 1000);
+                } catch (InterruptedException e) {
+                    if (!scheduleThreadToStop) {
+                        logger.error(e.getMessage(), e);
+                    }
+                }
+                logger.info(">>>>>>>>> init datax-web admin scheduler success.");
+
+                // pre-read count: treadpool-size * trigger-qps (each trigger cost 50ms, qps = 1000/50 = 20)
+                int preReadCount = (JobAdminConfig.getAdminConfig().getTriggerPoolFastMax() + JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax()) * 20;
+
+                while (!scheduleThreadToStop) {
+
+                    // Scan Job
+                    long start = System.currentTimeMillis();
+
+                    Connection conn = null;
+                    Boolean connAutoCommit = null;
+                    PreparedStatement preparedStatement = null;
+
+                    boolean preReadSuc = true;
+                    try {
+
+                        conn = JobAdminConfig.getAdminConfig().getDataSource().getConnection();
+                        connAutoCommit = conn.getAutoCommit();
+                        conn.setAutoCommit(false);
+
+                        preparedStatement = conn.prepareStatement("select * from job_lock where lock_name = 'schedule_lock' for update");
+                        preparedStatement.execute();
+
+                        // tx start
+
+                        // 1、pre read
+                        long nowTime = System.currentTimeMillis();
+                        List<JobInfo> scheduleList = JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount);
+                        if (scheduleList != null && scheduleList.size() > 0) {
+                            // 2、push time-ring
+                            for (JobInfo jobInfo : scheduleList) {
+
+                                // time-ring jump
+                                if (nowTime > jobInfo.getTriggerNextTime() + PRE_READ_MS) {
+                                    // 2.1、trigger-expire > 5s:pass && make next-trigger-time
+                                    logger.warn(">>>>>>>>>>> datax-web, schedule misfire, jobId = " + jobInfo.getId());
+
+                                    // fresh next
+                                    refreshNextValidTime(jobInfo, new Date());
+
+                                } else if (nowTime > jobInfo.getTriggerNextTime()) {
+                                    // 2.2、trigger-expire < 5s:direct-trigger && make next-trigger-time
+
+                                    // 1、trigger
+                                    JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.CRON, -1, null, null);
+                                    logger.debug(">>>>>>>>>>> datax-web, schedule push trigger : jobId = " + jobInfo.getId());
+
+                                    // 2、fresh next
+                                    refreshNextValidTime(jobInfo, new Date());
+
+                                    // next-trigger-time in 5s, pre-read again
+                                    if (jobInfo.getTriggerStatus() == 1 && nowTime + PRE_READ_MS > jobInfo.getTriggerNextTime()) {
+
+                                        // 1、make ring second
+                                        int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
+
+                                        // 2、push time ring
+                                        pushTimeRing(ringSecond, jobInfo.getId());
+
+                                        // 3、fresh next
+                                        refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
+
+                                    }
+
+                                } else {
+                                    // 2.3、trigger-pre-read:time-ring trigger && make next-trigger-time
+
+                                    // 1、make ring second
+                                    int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
+
+                                    // 2、push time ring
+                                    pushTimeRing(ringSecond, jobInfo.getId());
+
+                                    // 3、fresh next
+                                    refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
+
+                                }
+
+                            }
+
+                            // 3、update trigger info
+                            for (JobInfo jobInfo : scheduleList) {
+                                JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleUpdate(jobInfo);
+                            }
+
+                        } else {
+                            preReadSuc = false;
+                        }
+
+                        // tx stop
+
+
+                    } catch (Exception e) {
+                        if (!scheduleThreadToStop) {
+                            logger.error(">>>>>>>>>>> datax-web, JobScheduleHelper#scheduleThread error:{}", e);
+                        }
+                    } finally {
+
+                        // commit
+                        if (conn != null) {
+                            try {
+                                conn.commit();
+                            } catch (SQLException e) {
+                                if (!scheduleThreadToStop) {
+                                    logger.error(e.getMessage(), e);
+                                }
+                            }
+                            try {
+                                conn.setAutoCommit(connAutoCommit);
+                            } catch (SQLException e) {
+                                if (!scheduleThreadToStop) {
+                                    logger.error(e.getMessage(), e);
+                                }
+                            }
+                            try {
+                                conn.close();
+                            } catch (SQLException e) {
+                                if (!scheduleThreadToStop) {
+                                    logger.error(e.getMessage(), e);
+                                }
+                            }
+                        }
+
+                        // close PreparedStatement
+                        if (null != preparedStatement) {
+                            try {
+                                preparedStatement.close();
+                            } catch (SQLException e) {
+                                if (!scheduleThreadToStop) {
+                                    logger.error(e.getMessage(), e);
+                                }
+                            }
+                        }
+                    }
+                    long cost = System.currentTimeMillis() - start;
+
+
+                    // Wait seconds, align second
+                    if (cost < 1000) {  // scan-overtime, not wait
+                        try {
+                            // pre-read period: success > scan each second; fail > skip this period;
+                            TimeUnit.MILLISECONDS.sleep((preReadSuc ? 1000 : PRE_READ_MS) - System.currentTimeMillis() % 1000);
+                        } catch (InterruptedException e) {
+                            if (!scheduleThreadToStop) {
+                                logger.error(e.getMessage(), e);
+                            }
+                        }
+                    }
+
+                }
+
+                logger.info(">>>>>>>>>>> datax-web, JobScheduleHelper#scheduleThread stop");
+            }
+        });
+        scheduleThread.setDaemon(true);
+        scheduleThread.setName("datax-web, admin JobScheduleHelper#scheduleThread");
+        scheduleThread.start();
+
+
+        // ring thread
+        ringThread = new Thread(() -> {
+
+            // align second
+            try {
+                TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
+            } catch (InterruptedException e) {
+                if (!ringThreadToStop) {
+                    logger.error(e.getMessage(), e);
+                }
+            }
+
+            while (!ringThreadToStop) {
+
+                try {
+                    // second data
+                    List<Integer> ringItemData = new ArrayList<>();
+                    int nowSecond = Calendar.getInstance().get(Calendar.SECOND);   // 避免处理耗时太长,跨过刻度,向前校验一个刻度;
+                    for (int i = 0; i < 2; i++) {
+                        List<Integer> tmpData = ringData.remove((nowSecond + 60 - i) % 60);
+                        if (tmpData != null) {
+                            ringItemData.addAll(tmpData);
+                        }
+                    }
+
+                    // ring trigger
+                    logger.debug(">>>>>>>>>>> datax-web, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData));
+                    if (ringItemData.size() > 0) {
+                        // do trigger
+                        for (int jobId : ringItemData) {
+                            // do trigger
+                            JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null);
+                        }
+                        // clear
+                        ringItemData.clear();
+                    }
+                } catch (Exception e) {
+                    if (!ringThreadToStop) {
+                        logger.error(">>>>>>>>>>> datax-web, JobScheduleHelper#ringThread error:{}", e);
+                    }
+                }
+
+                // next second, align second
+                try {
+                    TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
+                } catch (InterruptedException e) {
+                    if (!ringThreadToStop) {
+                        logger.error(e.getMessage(), e);
+                    }
+                }
+            }
+            logger.info(">>>>>>>>>>> datax-web, JobScheduleHelper#ringThread stop");
+        });
+        ringThread.setDaemon(true);
+        ringThread.setName("datax-web, admin JobScheduleHelper#ringThread");
+        ringThread.start();
+    }
+
+    private void refreshNextValidTime(JobInfo jobInfo, Date fromTime) throws ParseException {
+        Date nextValidTime = new CronExpression(jobInfo.getJobCron()).getNextValidTimeAfter(fromTime);
+        if (nextValidTime != null) {
+            jobInfo.setTriggerLastTime(jobInfo.getTriggerNextTime());
+            jobInfo.setTriggerNextTime(nextValidTime.getTime());
+        } else {
+            jobInfo.setTriggerStatus(0);
+            jobInfo.setTriggerLastTime(0);
+            jobInfo.setTriggerNextTime(0);
+        }
+    }
+
+    private void pushTimeRing(int ringSecond, int jobId) {
+        // push async ring
+        List<Integer> ringItemData = ringData.get(ringSecond);
+        if (ringItemData == null) {
+            ringItemData = new ArrayList<Integer>();
+            ringData.put(ringSecond, ringItemData);
+        }
+        ringItemData.add(jobId);
+
+        logger.debug(">>>>>>>>>>> datax-web, schedule push time-ring : " + ringSecond + " = " + Arrays.asList(ringItemData));
+    }
+
+    public void toStop() {
+
+        // 1、stop schedule
+        scheduleThreadToStop = true;
+        try {
+            TimeUnit.SECONDS.sleep(1);  // wait
+        } catch (InterruptedException e) {
+            logger.error(e.getMessage(), e);
+        }
+        if (scheduleThread.getState() != Thread.State.TERMINATED) {
+            // interrupt and wait
+            scheduleThread.interrupt();
+            try {
+                scheduleThread.join();
+            } catch (InterruptedException e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        // if has ring data
+        boolean hasRingData = false;
+        if (!ringData.isEmpty()) {
+            for (int second : ringData.keySet()) {
+                List<Integer> tmpData = ringData.get(second);
+                if (tmpData != null && tmpData.size() > 0) {
+                    hasRingData = true;
+                    break;
+                }
+            }
+        }
+        if (hasRingData) {
+            try {
+                TimeUnit.SECONDS.sleep(8);
+            } catch (InterruptedException e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        // stop ring (wait job-in-memory stop)
+        ringThreadToStop = true;
+        try {
+            TimeUnit.SECONDS.sleep(1);
+        } catch (InterruptedException e) {
+            logger.error(e.getMessage(), e);
+        }
+        if (ringThread.getState() != Thread.State.TERMINATED) {
+            // interrupt and wait
+            ringThread.interrupt();
+            try {
+                ringThread.join();
+            } catch (InterruptedException e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+
+        logger.info(">>>>>>>>>>> datax-web, JobScheduleHelper stop");
+    }
+
+}

+ 133 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/thread/JobTriggerPoolHelper.java

@@ -0,0 +1,133 @@
+package com.wugui.datax.admin.core.thread;
+
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.core.trigger.TriggerTypeEnum;
+import com.wugui.datax.admin.core.trigger.JobTrigger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * job trigger thread pool helper
+ *
+ * @author xuxueli 2018-07-03 21:08:07
+ */
+public class JobTriggerPoolHelper {
+    private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class);
+
+
+    // ---------------------- trigger pool ----------------------
+
+    // fast/slow thread pool
+    private ThreadPoolExecutor fastTriggerPool = null;
+    private ThreadPoolExecutor slowTriggerPool = null;
+
+    public void start() {
+        fastTriggerPool = new ThreadPoolExecutor(
+                10,
+                JobAdminConfig.getAdminConfig().getTriggerPoolFastMax(),
+                60L,
+                TimeUnit.SECONDS,
+                new LinkedBlockingQueue<Runnable>(1000),
+                new ThreadFactory() {
+                    @Override
+                    public Thread newThread(Runnable r) {
+                        return new Thread(r, "datax-web, admin JobTriggerPoolHelper-fastTriggerPool-" + r.hashCode());
+                    }
+                });
+
+        slowTriggerPool = new ThreadPoolExecutor(
+                10,
+                JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax(),
+                60L,
+                TimeUnit.SECONDS,
+                new LinkedBlockingQueue<Runnable>(2000),
+                new ThreadFactory() {
+                    @Override
+                    public Thread newThread(Runnable r) {
+                        return new Thread(r, "datax-web, admin JobTriggerPoolHelper-slowTriggerPool-" + r.hashCode());
+                    }
+                });
+    }
+
+
+    public void stop() {
+        //triggerPool.shutdown();
+        fastTriggerPool.shutdownNow();
+        slowTriggerPool.shutdownNow();
+        logger.info(">>>>>>>>> datax-web trigger thread pool shutdown success.");
+    }
+
+
+    // job timeout count
+    private volatile long minTim = System.currentTimeMillis() / 60000;     // ms > min
+    private volatile ConcurrentMap<Integer, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>();
+
+
+    /**
+     * add trigger
+     */
+    public void addTrigger(final int jobId, final TriggerTypeEnum triggerType, final int failRetryCount, final String executorShardingParam, final String executorParam) {
+
+        // choose thread pool
+        ThreadPoolExecutor triggerPool_ = fastTriggerPool;
+        AtomicInteger jobTimeoutCount = jobTimeoutCountMap.get(jobId);
+        if (jobTimeoutCount != null && jobTimeoutCount.get() > 10) {      // job-timeout 10 times in 1 min
+            triggerPool_ = slowTriggerPool;
+        }
+        // trigger
+        triggerPool_.execute(() -> {
+            long start = System.currentTimeMillis();
+            try {
+                // do trigger
+                JobTrigger.trigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
+            } catch (Exception e) {
+                logger.error(e.getMessage(), e);
+            } finally {
+                // check timeout-count-map
+                long minTim_now = System.currentTimeMillis() / 60000;
+                if (minTim != minTim_now) {
+                    minTim = minTim_now;
+                    jobTimeoutCountMap.clear();
+                }
+                // incr timeout-count-map
+                long cost = System.currentTimeMillis() - start;
+                if (cost > 500) {       // ob-timeout threshold 500ms
+                    AtomicInteger timeoutCount = jobTimeoutCountMap.putIfAbsent(jobId, new AtomicInteger(1));
+                    if (timeoutCount != null) {
+                        timeoutCount.incrementAndGet();
+                    }
+                }
+            }
+        });
+    }
+
+
+    // ---------------------- helper ----------------------
+
+    private static JobTriggerPoolHelper helper = new JobTriggerPoolHelper();
+
+    public static void toStart() {
+        helper.start();
+    }
+
+    public static void toStop() {
+        helper.stop();
+    }
+
+    /**
+     * @param jobId
+     * @param triggerType
+     * @param failRetryCount        >=0: use this param
+     *                              <0: use param from job info config
+     * @param executorShardingParam
+     * @param executorParam         null: use job param
+     *                              not null: cover job param
+     */
+    public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
+        helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
+    }
+
+}

+ 256 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/trigger/JobTrigger.java

@@ -0,0 +1,256 @@
+package com.wugui.datax.admin.core.trigger;
+
+import com.wugui.datatx.core.biz.ExecutorBiz;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datatx.core.biz.model.TriggerParam;
+import com.wugui.datatx.core.enums.ExecutorBlockStrategyEnum;
+import com.wugui.datatx.core.enums.IncrementTypeEnum;
+import com.wugui.datatx.core.glue.GlueTypeEnum;
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import com.wugui.datax.admin.core.route.ExecutorRouteStrategyEnum;
+import com.wugui.datax.admin.core.scheduler.JobScheduler;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JobDatasource;
+import com.wugui.datax.admin.entity.JobGroup;
+import com.wugui.datax.admin.entity.JobInfo;
+import com.wugui.datax.admin.entity.JobLog;
+import com.wugui.datax.admin.tool.query.BaseQueryTool;
+import com.wugui.datax.admin.tool.query.QueryToolFactory;
+import com.wugui.datax.admin.util.JSONUtils;
+import com.wugui.datax.rpc.util.IpUtil;
+import com.wugui.datax.rpc.util.ThrowableUtil;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Calendar;
+import java.util.Date;
+
+/**
+ * xxl-job trigger
+ * Created by xuxueli on 17/7/13.
+ */
+public class JobTrigger {
+    private static Logger logger = LoggerFactory.getLogger(JobTrigger.class);
+
+    /**
+     * trigger job
+     *
+     * @param jobId
+     * @param triggerType
+     * @param failRetryCount        >=0: use this param
+     *                              <0: use param from job info config
+     * @param executorShardingParam
+     * @param executorParam         null: use job param
+     *                              not null: cover job param
+     */
+    public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
+        JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId);
+        if (jobInfo == null) {
+            logger.warn(">>>>>>>>>>>> trigger fail, jobId invalid,jobId={}", jobId);
+            return;
+        }
+        if (GlueTypeEnum.BEAN.getDesc().equals(jobInfo.getGlueType())) {
+            //解密账密
+            String json = JSONUtils.changeJson(jobInfo.getJobJson(), JSONUtils.decrypt);
+            jobInfo.setJobJson(json);
+        }
+        if (StringUtils.isNotBlank(executorParam)) {
+            jobInfo.setExecutorParam(executorParam);
+        }
+        int finalFailRetryCount = failRetryCount >= 0 ? failRetryCount : jobInfo.getExecutorFailRetryCount();
+        JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(jobInfo.getJobGroup());
+
+        // sharding param
+        int[] shardingParam = null;
+        if (executorShardingParam != null) {
+            String[] shardingArr = executorShardingParam.split("/");
+            if (shardingArr.length == 2 && isNumeric(shardingArr[0]) && isNumeric(shardingArr[1])) {
+                shardingParam = new int[2];
+                shardingParam[0] = Integer.valueOf(shardingArr[0]);
+                shardingParam[1] = Integer.valueOf(shardingArr[1]);
+            }
+        }
+        if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null)
+                && group.getRegistryList() != null && !group.getRegistryList().isEmpty()
+                && shardingParam == null) {
+            for (int i = 0; i < group.getRegistryList().size(); i++) {
+                processTrigger(group, jobInfo, finalFailRetryCount, triggerType, i, group.getRegistryList().size());
+            }
+        } else {
+            if (shardingParam == null) {
+                shardingParam = new int[]{0, 1};
+            }
+            processTrigger(group, jobInfo, finalFailRetryCount, triggerType, shardingParam[0], shardingParam[1]);
+        }
+
+    }
+
+    private static boolean isNumeric(String str) {
+        try {
+            int result = Integer.valueOf(str);
+            return true;
+        } catch (NumberFormatException e) {
+            return false;
+        }
+    }
+
+    /**
+     * @param group               job group, registry list may be empty
+     * @param jobInfo
+     * @param finalFailRetryCount
+     * @param triggerType
+     * @param index               sharding index
+     * @param total               sharding index
+     */
+    private static void processTrigger(JobGroup group, JobInfo jobInfo, int finalFailRetryCount, TriggerTypeEnum triggerType, int index, int total) {
+
+        TriggerParam triggerParam = new TriggerParam();
+
+        // param
+        ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum.match(jobInfo.getExecutorBlockStrategy(), ExecutorBlockStrategyEnum.SERIAL_EXECUTION);  // block strategy
+        ExecutorRouteStrategyEnum executorRouteStrategyEnum = ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null);    // route strategy
+        String shardingParam = (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) ? String.valueOf(index).concat("/").concat(String.valueOf(total)) : null;
+
+        // 1、save log-id
+        Calendar calendar = Calendar.getInstance();
+        calendar.setTime(new Date());
+        calendar.set(Calendar.MILLISECOND, 0);
+        Date triggerTime = calendar.getTime();
+        JobLog jobLog = new JobLog();
+        jobLog.setJobGroup(jobInfo.getJobGroup());
+        jobLog.setJobId(jobInfo.getId());
+        jobLog.setTriggerTime(triggerTime);
+        jobLog.setJobDesc(jobInfo.getJobDesc());
+
+        JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog);
+        logger.debug(">>>>>>>>>>> datax-web trigger start, jobId:{}", jobLog.getId());
+
+        // 2、init trigger-param
+        triggerParam.setJobId(jobInfo.getId());
+        triggerParam.setExecutorHandler(jobInfo.getExecutorHandler());
+        triggerParam.setExecutorParams(jobInfo.getExecutorParam());
+        triggerParam.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy());
+        triggerParam.setExecutorTimeout(jobInfo.getExecutorTimeout());
+        triggerParam.setLogId(jobLog.getId());
+        triggerParam.setLogDateTime(jobLog.getTriggerTime().getTime());
+        triggerParam.setGlueType(jobInfo.getGlueType());
+        triggerParam.setGlueSource(jobInfo.getGlueSource());
+        triggerParam.setGlueUpdatetime(jobInfo.getGlueUpdatetime().getTime());
+        triggerParam.setBroadcastIndex(index);
+        triggerParam.setBroadcastTotal(total);
+        triggerParam.setJobJson(jobInfo.getJobJson());
+
+        //increment parameter
+        Integer incrementType = jobInfo.getIncrementType();
+        if (incrementType != null) {
+            triggerParam.setIncrementType(incrementType);
+            if (IncrementTypeEnum.ID.getCode() == incrementType) {
+                long maxId = getMaxId(jobInfo);
+                jobLog.setMaxId(maxId);
+                triggerParam.setEndId(maxId);
+                triggerParam.setStartId(jobInfo.getIncStartId());
+            } else if (IncrementTypeEnum.TIME.getCode() == incrementType) {
+                triggerParam.setStartTime(jobInfo.getIncStartTime());
+                triggerParam.setTriggerTime(triggerTime);
+                triggerParam.setReplaceParamType(jobInfo.getReplaceParamType());
+            } else if (IncrementTypeEnum.PARTITION.getCode() == incrementType) {
+                triggerParam.setPartitionInfo(jobInfo.getPartitionInfo());
+            }
+            triggerParam.setReplaceParam(jobInfo.getReplaceParam());
+        }
+        //jvm parameter
+        triggerParam.setJvmParam(jobInfo.getJvmParam());
+
+        // 3、init address
+        String address = null;
+        ReturnT<String> routeAddressResult = null;
+        if (group.getRegistryList() != null && !group.getRegistryList().isEmpty()) {
+            if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) {
+                if (index < group.getRegistryList().size()) {
+                    address = group.getRegistryList().get(index);
+                } else {
+                    address = group.getRegistryList().get(0);
+                }
+            } else {
+                routeAddressResult = executorRouteStrategyEnum.getRouter().route(triggerParam, group.getRegistryList());
+                if (routeAddressResult.getCode() == ReturnT.SUCCESS_CODE) {
+                    address = routeAddressResult.getContent();
+                }
+            }
+        } else {
+            routeAddressResult = new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("jobconf_trigger_address_empty"));
+        }
+
+        // 4、trigger remote executor
+        ReturnT<String> triggerResult = null;
+        if (address != null) {
+            triggerResult = runExecutor(triggerParam, address);
+        } else {
+            triggerResult = new ReturnT<String>(ReturnT.FAIL_CODE, null);
+        }
+
+        // 5、collection trigger info
+        StringBuffer triggerMsgSb = new StringBuffer();
+        triggerMsgSb.append(I18nUtil.getString("jobconf_trigger_type")).append(":").append(triggerType.getTitle());
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_admin_adress")).append(":").append(IpUtil.getIp());
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regtype")).append(":")
+                .append((group.getAddressType() == 0) ? I18nUtil.getString("jobgroup_field_addressType_0") : I18nUtil.getString("jobgroup_field_addressType_1"));
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regaddress")).append(":").append(group.getRegistryList());
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorRouteStrategy")).append(":").append(executorRouteStrategyEnum.getTitle());
+        if (shardingParam != null) {
+            triggerMsgSb.append("(" + shardingParam + ")");
+        }
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorBlockStrategy")).append(":").append(blockStrategy.getTitle());
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_timeout")).append(":").append(jobInfo.getExecutorTimeout());
+        triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorFailRetryCount")).append(":").append(finalFailRetryCount);
+
+        triggerMsgSb.append("<br><br><span style=\"color:#00c0ef;\" > >>>>>>>>>>>" + I18nUtil.getString("jobconf_trigger_run") + "<<<<<<<<<<< </span><br>")
+                .append((routeAddressResult != null && routeAddressResult.getMsg() != null) ? routeAddressResult.getMsg() + "<br><br>" : "").append(triggerResult.getMsg() != null ? triggerResult.getMsg() : "");
+
+        // 6、save log trigger-info
+        jobLog.setExecutorAddress(address);
+        jobLog.setExecutorHandler(jobInfo.getExecutorHandler());
+        jobLog.setExecutorParam(jobInfo.getExecutorParam());
+        jobLog.setExecutorShardingParam(shardingParam);
+        jobLog.setExecutorFailRetryCount(finalFailRetryCount);
+        jobLog.setTriggerCode(triggerResult.getCode());
+        jobLog.setTriggerMsg(triggerMsgSb.toString());
+        JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(jobLog);
+
+        logger.debug(">>>>>>>>>>> datax-web trigger end, jobId:{}", jobLog.getId());
+    }
+
+    private static long getMaxId(JobInfo jobInfo) {
+        JobDatasource datasource = JobAdminConfig.getAdminConfig().getJobDatasourceMapper().selectById(jobInfo.getDatasourceId());
+        BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource);
+        return qTool.getMaxIdVal(jobInfo.getReaderTable(), jobInfo.getPrimaryKey());
+    }
+
+    /**
+     * run executor
+     *
+     * @param triggerParam
+     * @param address
+     * @return
+     */
+    public static ReturnT<String> runExecutor(TriggerParam triggerParam, String address) {
+        ReturnT<String> runResult = null;
+        try {
+            ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
+            runResult = executorBiz.run(triggerParam);
+        } catch (Exception e) {
+            logger.error(">>>>>>>>>>> datax-web trigger error, please check if the executor[{}] is running.", address, e);
+            runResult = new ReturnT<String>(ReturnT.FAIL_CODE, ThrowableUtil.toString(e));
+        }
+
+        StringBuffer runResultSB = new StringBuffer(I18nUtil.getString("jobconf_trigger_run") + ":");
+        runResultSB.append("<br>address:").append(address);
+        runResultSB.append("<br>code:").append(runResult.getCode());
+        runResultSB.append("<br>msg:").append(runResult.getMsg());
+
+        runResult.setMsg(runResultSB.toString());
+        return runResult;
+    }
+
+}

+ 26 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/trigger/TriggerTypeEnum.java

@@ -0,0 +1,26 @@
+package com.wugui.datax.admin.core.trigger;
+
+import com.wugui.datax.admin.core.util.I18nUtil;
+
+/**
+ * trigger type enum
+ *
+ * @author xuxueli 2018-09-16 04:56:41
+ */
+public enum TriggerTypeEnum {
+
+    MANUAL(I18nUtil.getString("jobconf_trigger_type_manual")),
+    CRON(I18nUtil.getString("jobconf_trigger_type_cron")),
+    RETRY(I18nUtil.getString("jobconf_trigger_type_retry")),
+    PARENT(I18nUtil.getString("jobconf_trigger_type_parent")),
+    API(I18nUtil.getString("jobconf_trigger_type_api"));
+
+    private TriggerTypeEnum(String title){
+        this.title = title;
+    }
+    private String title;
+    public String getTitle() {
+        return title;
+    }
+
+}

+ 80 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/util/I18nUtil.java

@@ -0,0 +1,80 @@
+package com.wugui.datax.admin.core.util;
+
+import com.wugui.datax.admin.core.conf.JobAdminConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.support.EncodedResource;
+import org.springframework.core.io.support.PropertiesLoaderUtils;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * i18n util
+ *
+ * @author xuxueli 2018-01-17 20:39:06
+ */
+public class I18nUtil {
+    private static Logger logger = LoggerFactory.getLogger(I18nUtil.class);
+
+    private static Properties prop = null;
+    public static Properties loadI18nProp(){
+        if (prop != null) {
+            return prop;
+        }
+        try {
+            // build i18n prop
+            String i18n = JobAdminConfig.getAdminConfig().getI18n();
+            i18n = (i18n!=null && i18n.trim().length()>0)?("_"+i18n):i18n;
+            String i18nFile = MessageFormat.format("i18n/message{0}.properties", i18n);
+
+            // load prop
+            Resource resource = new ClassPathResource(i18nFile);
+            EncodedResource encodedResource = new EncodedResource(resource,"UTF-8");
+            prop = PropertiesLoaderUtils.loadProperties(encodedResource);
+        } catch (IOException e) {
+            logger.error(e.getMessage(), e);
+        }
+        return prop;
+    }
+
+    /**
+     * get val of i18n key
+     *
+     * @param key
+     * @return
+     */
+    public static String getString(String key) {
+        return loadI18nProp().getProperty(key);
+    }
+
+    /**
+     * get mult val of i18n mult key, as json
+     *
+     * @param keys
+     * @return
+     */
+    public static String getMultString(String... keys) {
+        Map<String, String> map = new HashMap<String, String>();
+
+        Properties prop = loadI18nProp();
+        if (keys!=null && keys.length>0) {
+            for (String key: keys) {
+                map.put(key, prop.getProperty(key));
+            }
+        } else {
+            for (String key: prop.stringPropertyNames()) {
+                map.put(key, prop.getProperty(key));
+            }
+        }
+
+        String json = JacksonUtil.writeValueAsString(map);
+        return json;
+    }
+
+}

+ 92 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/util/JacksonUtil.java

@@ -0,0 +1,92 @@
+package com.wugui.datax.admin.core.util;
+
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JavaType;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ * Jackson util
+ * 
+ * 1、obj need private and set/get;
+ * 2、do not support inner class;
+ * 
+ * @author xuxueli 2015-9-25 18:02:56
+ */
+public class JacksonUtil {
+	private static Logger logger = LoggerFactory.getLogger(JacksonUtil.class);
+
+    private final static ObjectMapper objectMapper = new ObjectMapper();
+    public static ObjectMapper getInstance() {
+        return objectMapper;
+    }
+
+    /**
+     * bean、array、List、Map --> json
+     * 
+     * @param obj
+     * @return json string
+     * @throws Exception
+     */
+    public static String writeValueAsString(Object obj) {
+    	try {
+			return getInstance().writeValueAsString(obj);
+		} catch (JsonGenerationException e) {
+			logger.error(e.getMessage(), e);
+		} catch (JsonMappingException e) {
+			logger.error(e.getMessage(), e);
+		} catch (IOException e) {
+			logger.error(e.getMessage(), e);
+		}
+        return null;
+    }
+
+    /**
+     * string --> bean、Map、List(array)
+     * 
+     * @param jsonStr
+     * @param clazz
+     * @return obj
+     * @throws Exception
+     */
+    public static <T> T readValue(String jsonStr, Class<T> clazz) {
+    	try {
+			return getInstance().readValue(jsonStr, clazz);
+		} catch (JsonParseException e) {
+			logger.error(e.getMessage(), e);
+		} catch (JsonMappingException e) {
+			logger.error(e.getMessage(), e);
+		} catch (IOException e) {
+			logger.error(e.getMessage(), e);
+		}
+    	return null;
+    }
+
+	/**
+	 * string --> List<Bean>...
+	 *
+	 * @param jsonStr
+	 * @param parametrized
+	 * @param parameterClasses
+	 * @param <T>
+	 * @return
+	 */
+	public static <T> T readValue(String jsonStr, Class<?> parametrized, Class<?>... parameterClasses) {
+		try {
+			JavaType javaType = getInstance().getTypeFactory().constructParametricType(parametrized, parameterClasses);
+			return getInstance().readValue(jsonStr, javaType);
+		} catch (JsonParseException e) {
+			logger.error(e.getMessage(), e);
+		} catch (JsonMappingException e) {
+			logger.error(e.getMessage(), e);
+		} catch (IOException e) {
+			logger.error(e.getMessage(), e);
+		}
+		return null;
+	}
+}

+ 133 - 0
datax-admin/src/main/java/com/wugui/datax/admin/core/util/LocalCacheUtil.java

@@ -0,0 +1,133 @@
+package com.wugui.datax.admin.core.util;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * local cache tool
+ *
+ * @author xuxueli 2018-01-22 21:37:34
+ */
+public class LocalCacheUtil {
+
+    private static ConcurrentMap<String, LocalCacheData> cacheRepository = new ConcurrentHashMap<String, LocalCacheData>();   // 类型建议用抽象父类,兼容性更好;
+    private static class LocalCacheData{
+        private String key;
+        private Object val;
+        private long timeoutTime;
+
+        public LocalCacheData() {
+        }
+
+        public LocalCacheData(String key, Object val, long timeoutTime) {
+            this.key = key;
+            this.val = val;
+            this.timeoutTime = timeoutTime;
+        }
+
+        public String getKey() {
+            return key;
+        }
+
+        public void setKey(String key) {
+            this.key = key;
+        }
+
+        public Object getVal() {
+            return val;
+        }
+
+        public void setVal(Object val) {
+            this.val = val;
+        }
+
+        public long getTimeoutTime() {
+            return timeoutTime;
+        }
+
+        public void setTimeoutTime(long timeoutTime) {
+            this.timeoutTime = timeoutTime;
+        }
+    }
+
+
+    /**
+     * set cache
+     *
+     * @param key
+     * @param val
+     * @param cacheTime
+     * @return
+     */
+    public static boolean set(String key, Object val, long cacheTime){
+
+        // clean timeout cache, before set new cache (avoid cache too much)
+        cleanTimeoutCache();
+
+        // set new cache
+        if (key==null || key.trim().length()==0) {
+            return false;
+        }
+        if (val == null) {
+            remove(key);
+        }
+        if (cacheTime <= 0) {
+            remove(key);
+        }
+        long timeoutTime = System.currentTimeMillis() + cacheTime;
+        LocalCacheData localCacheData = new LocalCacheData(key, val, timeoutTime);
+        cacheRepository.put(localCacheData.getKey(), localCacheData);
+        return true;
+    }
+
+    /**
+     * remove cache
+     *
+     * @param key
+     * @return
+     */
+    public static boolean remove(String key){
+        if (key==null || key.trim().length()==0) {
+            return false;
+        }
+        cacheRepository.remove(key);
+        return true;
+    }
+
+    /**
+     * get cache
+     *
+     * @param key
+     * @return
+     */
+    public static Object get(String key){
+        if (key==null || key.trim().length()==0) {
+            return null;
+        }
+        LocalCacheData localCacheData = cacheRepository.get(key);
+        if (localCacheData!=null && System.currentTimeMillis()<localCacheData.getTimeoutTime()) {
+            return localCacheData.getVal();
+        } else {
+            remove(key);
+            return null;
+        }
+    }
+
+    /**
+     * clean timeout cache
+     *
+     * @return
+     */
+    public static boolean cleanTimeoutCache(){
+        if (!cacheRepository.keySet().isEmpty()) {
+            for (String key: cacheRepository.keySet()) {
+                LocalCacheData localCacheData = cacheRepository.get(key);
+                if (localCacheData!=null && System.currentTimeMillis()>=localCacheData.getTimeoutTime()) {
+                    cacheRepository.remove(key);
+                }
+            }
+        }
+        return true;
+    }
+
+}

+ 32 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/DataXBatchJsonBuildDto.java

@@ -0,0 +1,32 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 构建json dto
+ *
+ * @author jingwk
+ * @ClassName DataXJsonDto
+ * @Version 2.1.2
+ * @since 2020/05/05 17:15
+ */
+@Data
+public class DataXBatchJsonBuildDto implements Serializable {
+
+    private Long readerDatasourceId;
+
+    private List<String> readerTables;
+
+    private Long writerDatasourceId;
+
+    private List<String> writerTables;
+
+    private int templateId;
+
+    private RdbmsReaderDto rdbmsReader;
+
+    private RdbmsWriterDto rdbmsWriter;
+}

+ 46 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/DataXJsonBuildDto.java

@@ -0,0 +1,46 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 构建json dto
+ *
+ * @author jingwk
+ * @ClassName DataxJsonDto
+ * @Version 2.1.1
+ * @since 2020/03/14 07:15
+ */
+@Data
+public class DataXJsonBuildDto implements Serializable {
+
+    private Long readerDatasourceId;
+
+    private List<String> readerTables;
+
+    private List<String> readerColumns;
+
+    private Long writerDatasourceId;
+
+    private List<String> writerTables;
+
+    private List<String> writerColumns;
+
+    private HiveReaderDto hiveReader;
+
+    private HiveWriterDto hiveWriter;
+
+    private HbaseReaderDto hbaseReader;
+
+    private HbaseWriterDto hbaseWriter;
+
+    private RdbmsReaderDto rdbmsReader;
+
+    private RdbmsWriterDto rdbmsWriter;
+
+    private MongoDBReaderDto mongoDBReader;
+
+    private MongoDBWriterDto mongoDBWriter;
+}

+ 17 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/HbaseReaderDto.java

@@ -0,0 +1,17 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+@Data
+public class HbaseReaderDto implements Serializable {
+
+  private String readerMaxVersion;
+
+  private String readerMode;
+
+  private Range readerRange;
+
+}
+

+ 17 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/HbaseWriterDto.java

@@ -0,0 +1,17 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+@Data
+public class HbaseWriterDto implements Serializable {
+
+  private String writeNullMode;
+
+  private String writerMode;
+
+  private String writerRowkeyColumn;
+
+  private VersionColumn writerVersionColumn;
+}

+ 28 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/HiveReaderDto.java

@@ -0,0 +1,28 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 构建hive reader dto
+ *
+ * @author jingwk
+ * @ClassName hive reader
+ * @Version 2.0
+ * @since 2020/01/11 17:15
+ */
+@Data
+public class HiveReaderDto implements Serializable {
+
+    private String readerPath;
+
+    private String readerDefaultFS;
+
+    private String readerFileType;
+
+    private String readerFieldDelimiter;
+
+    private Boolean readerSkipHeader;
+
+}

+ 29 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/HiveWriterDto.java

@@ -0,0 +1,29 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 构建hive write dto
+ *
+ * @author jingwk
+ * @ClassName hive write dto
+ * @Version 2.0
+ * @since 2020/01/11 17:15
+ */
+@Data
+public class HiveWriterDto implements Serializable {
+
+    private String writerDefaultFS;
+
+    private String writerFileType;
+
+    private String writerPath;
+
+    private String writerFileName;
+
+    private String writeMode;
+
+    private String writeFieldDelimiter;
+}

+ 19 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/MongoDBReaderDto.java

@@ -0,0 +1,19 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 构建mongodb reader dto
+ *
+ * @author jingwk
+ * @ClassName mongodb reader
+ * @Version 2.1.1
+ * @since 2020/03/14 07:15
+ */
+@Data
+public class MongoDBReaderDto implements Serializable {
+
+
+}

+ 20 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/MongoDBWriterDto.java

@@ -0,0 +1,20 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 构建mongodb write dto
+ *
+ * @author jingwk
+ * @ClassName mongodb write dto
+ * @Version 2.1.1
+ * @since 2020/03/14 07:15
+ */
+@Data
+public class MongoDBWriterDto implements Serializable {
+
+    private UpsertInfo upsertInfo;
+
+}

+ 15 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/Range.java

@@ -0,0 +1,15 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+@Data
+public class Range implements Serializable {
+
+  private String startRowkey;
+
+  private String endRowkey;
+
+  private Boolean isBinaryRowkey;
+}

+ 24 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/RdbmsReaderDto.java

@@ -0,0 +1,24 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * 构建json dto
+ *
+ * @author jingwk
+ * @ClassName RdbmsReaderDto
+ * @Version 2.0
+ * @since 2020/01/11 17:15
+ */
+@Data
+public class RdbmsReaderDto implements Serializable {
+
+    private String readerSplitPk;
+
+    private String whereParams;
+
+    private String querySql;
+}

+ 21 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/RdbmsWriterDto.java

@@ -0,0 +1,21 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 构建json dto
+ *
+ * @author jingwk
+ * @ClassName RdbmsWriteDto
+ * @Version 2.0
+ * @since 2020/01/11 17:15
+ */
+@Data
+public class RdbmsWriterDto implements Serializable {
+
+    private String preSql;
+
+    private String postSql;
+}

+ 86 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/TaskScheduleDto.java

@@ -0,0 +1,86 @@
+package com.wugui.datax.admin.dto;
+
+public class TaskScheduleDto {
+
+
+    /**
+     * 所选作业类型:
+     * 1  -> 每天
+     * 2  -> 每月
+     * 3  -> 每周
+     */
+    Integer jobType;
+
+    /**
+     * 一周的哪几天
+     */
+    Integer[] dayOfWeeks;
+
+    /**
+     * 一个月的哪几天
+     */
+    Integer[] dayOfMonths;
+
+    /**
+     * 秒
+     */
+    Integer second;
+
+    /**
+     * 分
+     */
+    Integer minute;
+
+    /**
+     * 时
+     */
+    Integer hour;
+
+    public Integer getJobType() {
+        return jobType;
+    }
+
+    public void setJobType(Integer jobType) {
+        this.jobType = jobType;
+    }
+
+    public Integer[] getDayOfWeeks() {
+        return dayOfWeeks;
+    }
+
+    public void setDayOfWeeks(Integer[] dayOfWeeks) {
+        this.dayOfWeeks = dayOfWeeks;
+    }
+
+    public Integer[] getDayOfMonths() {
+        return dayOfMonths;
+    }
+
+    public void setDayOfMonths(Integer[] dayOfMonths) {
+        this.dayOfMonths = dayOfMonths;
+    }
+
+    public Integer getSecond() {
+        return second;
+    }
+
+    public void setSecond(Integer second) {
+        this.second = second;
+    }
+
+    public Integer getMinute() {
+        return minute;
+    }
+
+    public void setMinute(Integer minute) {
+        this.minute = minute;
+    }
+
+    public Integer getHour() {
+        return hour;
+    }
+
+    public void setHour(Integer hour) {
+        this.hour = hour;
+    }
+}

+ 21 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/TriggerJobDto.java

@@ -0,0 +1,21 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * 用于启动任务接收的实体
+ *
+ * @author jingwk
+ * @ClassName TriggerJobDto
+ * @Version 1.0
+ * @since 2019/12/01 16:12
+ */
+@Data
+public class TriggerJobDto implements Serializable {
+
+    private String executorParam;
+
+    private int jobId;
+}

+ 18 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/UpsertInfo.java

@@ -0,0 +1,18 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+/**
+ * Created by mac on 2020/3/16.
+ */
+@Data
+public class UpsertInfo {
+    /**
+     * 当设置为true时,表示针对相同的upsertKey做更新操作
+     */
+    private Boolean isUpsert;
+    /**
+     * upsertKey指定了没行记录的业务主键。用来做更新时使用。
+     */
+    private String upsertKey;
+}

+ 11 - 0
datax-admin/src/main/java/com/wugui/datax/admin/dto/VersionColumn.java

@@ -0,0 +1,11 @@
+package com.wugui.datax.admin.dto;
+
+import lombok.Data;
+
+@Data
+public class VersionColumn {
+
+  private Integer index;
+
+  private String value;
+}

+ 143 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobDatasource.java

@@ -0,0 +1,143 @@
+package com.wugui.datax.admin.entity;
+
+import com.alibaba.fastjson.annotation.JSONField;
+import com.baomidou.mybatisplus.annotation.*;
+import com.baomidou.mybatisplus.extension.activerecord.Model;
+import com.wugui.datax.admin.core.handler.AESEncryptHandler;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.Date;
+
+/**
+ * jdbc数据源配置实体类(job_jdbc_datasource)
+ *
+ * @author zhouhongfa@gz-yibo.com
+ * @version v1.0
+ * @since 2019-07-30
+ */
+
+@Data
+@ApiModel
+@TableName("job_jdbc_datasource")
+public class JobDatasource extends Model<JobDatasource> {
+
+    /**
+     * 自增主键
+     */
+    @TableId
+    @ApiModelProperty(value = "自增主键")
+    private Long id;
+
+    /**
+     * 数据源名称
+     */
+    @ApiModelProperty(value = "数据源名称")
+    private String datasourceName;
+
+    /**
+     * 数据源
+     */
+    @ApiModelProperty(value = "数据源")
+    private String datasource;
+
+    /**
+     * 数据源分组
+     */
+    @ApiModelProperty(value = "数据源分组")
+    private String datasourceGroup;
+
+    /**
+     * 用户名
+     * AESEncryptHandler 加密类
+     * MyBatis Plus 3.0.7.1之前版本没有typeHandler属性,需要升级到最低3.1.2
+     */
+    @ApiModelProperty(value = "用户名")
+    @TableField(typeHandler = AESEncryptHandler.class)
+    private String jdbcUsername;
+
+    /**
+     * 密码
+     */
+    @TableField(typeHandler = AESEncryptHandler.class)
+    @ApiModelProperty(value = "密码")
+    private String jdbcPassword;
+
+    /**
+     * jdbc url
+     */
+    @ApiModelProperty(value = "jdbc url")
+    private String jdbcUrl;
+
+    /**
+     * jdbc驱动类
+     */
+    @ApiModelProperty(value = "jdbc驱动类")
+    private String jdbcDriverClass;
+
+    /**
+     * 状态:0删除 1启用 2禁用
+     */
+    @TableLogic
+    @ApiModelProperty(value = "状态:0删除 1启用 2禁用")
+    private Integer status;
+
+    /**
+     * 创建人
+     */
+    @TableField(fill = FieldFill.INSERT)
+    @ApiModelProperty(value = "创建人", hidden = true)
+    private String createBy;
+
+    /**
+     * 创建时间
+     */
+    @TableField(fill = FieldFill.INSERT)
+    @JSONField(format = "yyyy/MM/dd")
+    @ApiModelProperty(value = "创建时间", hidden = true)
+    private Date createDate;
+
+    /**
+     * 更新人
+     */
+    @TableField(fill = FieldFill.INSERT_UPDATE)
+    @ApiModelProperty(value = "更新人", hidden = true)
+    private String updateBy;
+
+    /**
+     * 更新时间
+     */
+    @TableField(fill = FieldFill.INSERT_UPDATE)
+    @JSONField(format = "yyyy/MM/dd")
+    @ApiModelProperty(value = "更新时间", hidden = true)
+    private Date updateDate;
+
+    /**
+     * 备注
+     */
+    @ApiModelProperty(value = "备注", hidden = true)
+    private String comments;
+
+    /**
+     * zookeeper地址
+     */
+    @ApiModelProperty(value = "zookeeper地址", hidden = true)
+    private String zkAdress;
+
+    /**
+     * 数据库名
+     */
+    @ApiModelProperty(value = "数据库名", hidden = true)
+    private String databaseName;
+    /**
+     * 获取主键值
+     *
+     * @return 主键值
+     */
+    @Override
+    protected Serializable pkVal() {
+        return this.id;
+    }
+}

+ 84 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobGroup.java

@@ -0,0 +1,84 @@
+package com.wugui.datax.admin.entity;
+
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+public class JobGroup {
+
+    @ApiModelProperty("执行器Id")
+    private int id;
+    @ApiModelProperty("执行器AppName")
+    private String appName;
+    @ApiModelProperty("执行器名称")
+    private String title;
+    @ApiModelProperty("排序")
+    private int order;
+    @ApiModelProperty("执行器地址类型:0=自动注册、1=手动录入")
+    private int addressType;
+    @ApiModelProperty("执行器地址列表,多地址逗号分隔(手动录入)")
+    private String addressList;
+
+    // registry list
+    private List<String> registryList;  // 执行器地址列表(系统注册)
+    public List<String> getRegistryList() {
+        if (addressList!=null && addressList.trim().length()>0) {
+            registryList = new ArrayList<>(Arrays.asList(addressList.split(",")));
+        }
+        return registryList;
+    }
+
+    public int getId() {
+        return id;
+    }
+
+    public void setId(int id) {
+        this.id = id;
+    }
+
+    public String getAppName() {
+        return appName;
+    }
+
+    public void setAppName(String appName) {
+        this.appName = appName;
+    }
+
+    public String getTitle() {
+        return title;
+    }
+
+    public void setTitle(String title) {
+        this.title = title;
+    }
+
+    public int getOrder() {
+        return order;
+    }
+
+    public void setOrder(int order) {
+        this.order = order;
+    }
+
+    public int getAddressType() {
+        return addressType;
+    }
+
+    public void setAddressType(int addressType) {
+        this.addressType = addressType;
+    }
+
+    public String getAddressList() {
+        return addressList;
+    }
+
+    public void setAddressList(String addressList) {
+        this.addressList = addressList;
+    }
+
+}

+ 125 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobInfo.java

@@ -0,0 +1,125 @@
+package com.wugui.datax.admin.entity;
+
+import com.baomidou.mybatisplus.annotation.TableField;
+import io.swagger.annotations.ApiModelProperty;
+import lombok.Data;
+
+import java.util.Date;
+
+/**
+ * xxl-job info
+ *
+ * @author jingwk  2019-11-17 14:25:49
+ */
+@Data
+public class JobInfo {
+
+	@ApiModelProperty("主键ID")
+	private int id;
+
+	@ApiModelProperty("执行器主键ID")
+	private int jobGroup;
+
+	@ApiModelProperty("任务执行CRON表达式")
+	private String jobCron;
+
+	@ApiModelProperty("排序")
+	private String jobDesc;
+
+	private Date addTime;
+
+	private Date updateTime;
+
+	@ApiModelProperty("修改用户")
+	private int userId;
+
+	@ApiModelProperty("报警邮件")
+	private String alarmEmail;
+
+	@ApiModelProperty("执行器路由策略")
+	private String executorRouteStrategy;
+
+	@ApiModelProperty("执行器,任务Handler名称")
+	private String executorHandler;
+
+	@ApiModelProperty("执行器,任务参数")
+	private String executorParam;
+
+	@ApiModelProperty("阻塞处理策略")
+	private String executorBlockStrategy;
+
+	@ApiModelProperty("任务执行超时时间,单位秒")
+	private int executorTimeout;
+
+	@ApiModelProperty("失败重试次数")
+	private int executorFailRetryCount;
+
+	@ApiModelProperty("GLUE类型\t#com.wugui.datatx.core.glue.GlueTypeEnum")
+	private String glueType;
+
+	@ApiModelProperty("GLUE源代码")
+	private String glueSource;
+
+	@ApiModelProperty("GLUE备注")
+	private String glueRemark;
+
+	@ApiModelProperty("GLUE更新时间")
+	private Date glueUpdatetime;
+
+	@ApiModelProperty("子任务ID")
+	private String childJobId;
+
+	@ApiModelProperty("调度状态:0-停止,1-运行")
+	private int triggerStatus;
+
+	@ApiModelProperty("上次调度时间")
+	private long triggerLastTime;
+
+	@ApiModelProperty("下次调度时间")
+	private long triggerNextTime;
+
+	@ApiModelProperty("datax运行json")
+	private String jobJson;
+
+	@ApiModelProperty("脚本动态参数")
+	private String replaceParam;
+
+	@ApiModelProperty("增量日期格式")
+	private String replaceParamType;
+
+	@ApiModelProperty("jvm参数")
+	private String jvmParam;
+
+	@ApiModelProperty("增量初始时间")
+	private Date incStartTime;
+
+	@ApiModelProperty("分区信息")
+	private String partitionInfo;
+
+	@ApiModelProperty("最近一次执行状态")
+	private int lastHandleCode;
+
+	@ApiModelProperty("所属项目Id")
+	private int projectId;
+
+	@ApiModelProperty("主键字段")
+	private String primaryKey;
+
+	@ApiModelProperty("增量初始id")
+	private Long incStartId;
+
+	@ApiModelProperty("增量方式")
+	private int incrementType;
+
+	@ApiModelProperty("datax的读表")
+	private  String readerTable;
+
+	@ApiModelProperty("数据源id")
+	private int datasourceId;
+
+	@TableField(exist=false)
+	private String projectName;
+
+	@TableField(exist=false)
+	private String userName;
+}

+ 66 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobLog.java

@@ -0,0 +1,66 @@
+package com.wugui.datax.admin.entity;
+
+import com.fasterxml.jackson.annotation.JsonFormat;
+import io.swagger.annotations.ApiModelProperty;
+import lombok.Data;
+
+import java.util.Date;
+
+/**
+ * datax-web log, used to track trigger process
+ *
+ * @author jingwk  2019-11-17 22:08:11
+ */
+@Data
+public class JobLog {
+
+    private long id;
+
+    // job info
+    @ApiModelProperty("执行器主键ID")
+    private int jobGroup;
+    @ApiModelProperty("任务,主键ID")
+    private int jobId;
+    @ApiModelProperty("任务描述")
+    private String jobDesc;
+
+    // execute info
+    @ApiModelProperty("执行器地址,本次执行的地址")
+    private String executorAddress;
+    @ApiModelProperty("执行器任务handler")
+    private String executorHandler;
+    @ApiModelProperty("执行器任务参数")
+    private String executorParam;
+    @ApiModelProperty("执行器任务分片参数,格式如 1/2")
+    private String executorShardingParam;
+    @ApiModelProperty("失败重试次数")
+    private int executorFailRetryCount;
+
+    // trigger info
+    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
+    @ApiModelProperty("调度-时间")
+    private Date triggerTime;
+    @ApiModelProperty("调度-结果")
+    private int triggerCode;
+    @ApiModelProperty("调度-日志")
+    private String triggerMsg;
+
+    // handle info
+    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
+    @ApiModelProperty("执行-时间")
+    private Date handleTime;
+    @ApiModelProperty("执行-状态")
+    private int handleCode;
+    @ApiModelProperty("执行-日志")
+    private String handleMsg;
+
+    // alarm info
+    @ApiModelProperty("告警状态:0-默认、1-无需告警、2-告警成功、3-告警失败")
+    private int alarmStatus;
+
+    @ApiModelProperty("DataX进程Id")
+    private String processId;
+
+    @ApiModelProperty("增量最大id")
+    private Long maxId;
+}

+ 34 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobLogGlue.java

@@ -0,0 +1,34 @@
+package com.wugui.datax.admin.entity;
+
+import io.swagger.annotations.ApiModelProperty;
+import lombok.Data;
+
+import java.util.Date;
+
+/**
+ * xxl-job log for glue, used to track job code process
+ *
+ * @author xuxueli 2016-5-19 17:57:46
+ */
+@Data
+public class JobLogGlue {
+
+    private int id;
+
+    @ApiModelProperty("任务主键ID")
+    private int jobId;
+
+    @ApiModelProperty("GLUE类型\t#com.xxl.job.core.glue.GlueTypeEnum")
+    private String glueType;
+
+    @ApiModelProperty("GLUE源代码")
+    private String glueSource;
+
+    @ApiModelProperty("GLUE备注")
+    private String glueRemark;
+
+    private Date addTime;
+
+    private Date updateTime;
+
+}

+ 17 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobLogReport.java

@@ -0,0 +1,17 @@
+package com.wugui.datax.admin.entity;
+
+import lombok.Data;
+
+import java.util.Date;
+
+@Data
+public class JobLogReport {
+
+    private int id;
+
+    private Date triggerDay;
+
+    private int runningCount;
+    private int sucCount;
+    private int failCount;
+}

+ 57 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobPermission.java

@@ -0,0 +1,57 @@
+package com.wugui.datax.admin.entity;
+
+public class JobPermission {
+
+    private int id;
+    //权限名称
+    private String name;
+
+    //权限描述
+    private String descritpion;
+
+    //授权链接
+    private String url;
+
+    //父节点id
+    private int pid;
+
+    public int getId() {
+        return id;
+    }
+
+    public void setId(int id) {
+        this.id = id;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getDescritpion() {
+        return descritpion;
+    }
+
+    public void setDescritpion(String descritpion) {
+        this.descritpion = descritpion;
+    }
+
+    public String getUrl() {
+        return url;
+    }
+
+    public void setUrl(String url) {
+        this.url = url;
+    }
+
+    public int getPid() {
+        return pid;
+    }
+
+    public void setPid(int pid) {
+        this.pid = pid;
+    }
+}

+ 44 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobProject.java

@@ -0,0 +1,44 @@
+package com.wugui.datax.admin.entity;
+
+import com.baomidou.mybatisplus.annotation.TableField;
+import com.fasterxml.jackson.annotation.JsonFormat;
+import io.swagger.annotations.ApiModelProperty;
+import lombok.Data;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Created by jingwk on 2020/05/24
+ */
+@Data
+public class JobProject {
+
+    @ApiModelProperty("项目Id")
+    private int id;
+
+    @ApiModelProperty("项目名称")
+    private String name;
+
+    @ApiModelProperty("项目描述")
+    private String description;
+
+    @ApiModelProperty("用户Id")
+    private int userId;
+
+    @ApiModelProperty("标记")
+    private Boolean flag;
+
+    @ApiModelProperty("创建时间")
+    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
+    private Date createTime;
+
+    @ApiModelProperty("更新时间")
+    private Date updateTime;
+
+    @TableField(exist=false)
+    private String userName;
+
+}

+ 23 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobRegistry.java

@@ -0,0 +1,23 @@
+package com.wugui.datax.admin.entity;
+
+import com.fasterxml.jackson.annotation.JsonFormat;
+import lombok.Data;
+
+import java.util.Date;
+
+/**
+ * Created by xuxueli on 16/9/30.
+ */
+@Data
+public class JobRegistry {
+
+    private int id;
+    private String registryGroup;
+    private String registryKey;
+    private String registryValue;
+    private double cpuUsage;
+    private double memoryUsage;
+    private double loadAverage;
+    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
+    private Date updateTime;
+}

+ 30 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobRole.java

@@ -0,0 +1,30 @@
+package com.wugui.datax.admin.entity;
+
+import io.swagger.annotations.ApiModelProperty;
+
+/**
+ * @author xuxueli 2019-05-04 16:43:12
+ */
+public class JobRole {
+
+    private int id;
+    @ApiModelProperty("账号")
+    private String name;
+
+    public int getId() {
+        return id;
+    }
+
+    public void setId(int id) {
+        this.id = id;
+    }
+
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}

+ 92 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobTemplate.java

@@ -0,0 +1,92 @@
+package com.wugui.datax.admin.entity;
+
+import com.baomidou.mybatisplus.annotation.TableField;
+import io.swagger.annotations.ApiModelProperty;
+import lombok.Data;
+
+import java.util.Date;
+
+/**
+ * xxl-job info
+ *
+ * @author jingwk  2019-11-17 14:25:49
+ */
+@Data
+public class JobTemplate {
+
+	@ApiModelProperty("主键ID")
+	private int id;
+
+	@ApiModelProperty("执行器主键ID")
+	private int jobGroup;
+
+	@ApiModelProperty("任务执行CRON表达式")
+	private String jobCron;
+
+	@ApiModelProperty("排序")
+	private String jobDesc;
+
+	private Date addTime;
+
+	private Date updateTime;
+
+	@ApiModelProperty("修改用户")
+	private int userId;
+
+	@ApiModelProperty("报警邮件")
+	private String alarmEmail;
+
+	@ApiModelProperty("执行器路由策略")
+	private String executorRouteStrategy;
+
+	@ApiModelProperty("执行器,任务Handler名称")
+	private String executorHandler;
+
+	@ApiModelProperty("执行器,任务参数")
+	private String executorParam;
+
+	@ApiModelProperty("阻塞处理策略")
+	private String executorBlockStrategy;
+
+	@ApiModelProperty("任务执行超时时间,单位秒")
+	private int executorTimeout;
+
+	@ApiModelProperty("失败重试次数")
+	private int executorFailRetryCount;
+
+	@ApiModelProperty("GLUE类型\t#com.wugui.datatx.core.glue.GlueTypeEnum")
+	private String glueType;
+
+	@ApiModelProperty("GLUE源代码")
+	private String glueSource;
+
+	@ApiModelProperty("GLUE备注")
+	private String glueRemark;
+
+	@ApiModelProperty("GLUE更新时间")
+	private Date glueUpdatetime;
+
+	@ApiModelProperty("子任务ID")
+	private String childJobId;
+
+	@ApiModelProperty("上次调度时间")
+	private long triggerLastTime;
+
+	@ApiModelProperty("下次调度时间")
+	private long triggerNextTime;
+
+	@ApiModelProperty("datax运行json")
+	private String jobJson;
+
+	@ApiModelProperty("jvm参数")
+	private String jvmParam;
+
+    @ApiModelProperty("所属项目")
+	private int projectId;
+
+	@TableField(exist=false)
+	private String projectName;
+
+	@TableField(exist=false)
+	private String userName;
+}

+ 78 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JobUser.java

@@ -0,0 +1,78 @@
+package com.wugui.datax.admin.entity;
+
+import io.swagger.annotations.ApiModelProperty;
+import org.springframework.util.StringUtils;
+
+/**
+ * @author xuxueli 2019-05-04 16:43:12
+ */
+public class JobUser {
+
+    private int id;
+    @ApiModelProperty("账号")
+    private String username;
+    @ApiModelProperty("密码")
+    private String password;
+    @ApiModelProperty("角色:0-普通用户、1-管理员")
+    private String role;
+    @ApiModelProperty("权限:执行器ID列表,多个逗号分割")
+    private String permission;
+
+    public int getId() {
+        return id;
+    }
+
+    public void setId(int id) {
+        this.id = id;
+    }
+
+    public String getUsername() {
+        return username;
+    }
+
+    public void setUsername(String username) {
+        this.username = username;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public void setPassword(String password) {
+        this.password = password;
+    }
+
+    public String getRole() {
+        return role;
+    }
+
+    public void setRole(String role) {
+        this.role = role;
+    }
+
+    public String getPermission() {
+        return permission;
+    }
+
+    public void setPermission(String permission) {
+        this.permission = permission;
+    }
+
+    // plugin
+    public boolean validPermission(int jobGroup){
+        if ("1".equals(this.role)) {
+            return true;
+        } else {
+            if (StringUtils.hasText(this.permission)) {
+                for (String permissionItem : this.permission.split(",")) {
+                    if (String.valueOf(jobGroup).equals(permissionItem)) {
+                        return true;
+                    }
+                }
+            }
+            return false;
+        }
+
+    }
+
+}

+ 84 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/JwtUser.java

@@ -0,0 +1,84 @@
+package com.wugui.datax.admin.entity;
+
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.core.userdetails.UserDetails;
+
+import java.util.Collection;
+import java.util.Collections;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+public class JwtUser implements UserDetails {
+
+    private Integer id;
+    private String username;
+    private String password;
+    private Collection<? extends GrantedAuthority> authorities;
+
+    public JwtUser() {
+    }
+
+    // 写一个能直接使用user创建jwtUser的构造器
+    public JwtUser(JobUser user) {
+        id = user.getId();
+        username = user.getUsername();
+        password = user.getPassword();
+        authorities = Collections.singleton(new SimpleGrantedAuthority(user.getRole()));
+    }
+
+    @Override
+    public Collection<? extends GrantedAuthority> getAuthorities() {
+        return authorities;
+    }
+
+    @Override
+    public String getPassword() {
+        return password;
+    }
+
+    @Override
+    public String getUsername() {
+        return username;
+    }
+
+    @Override
+    public boolean isAccountNonExpired() {
+        return true;
+    }
+
+    @Override
+    public boolean isAccountNonLocked() {
+        return true;
+    }
+
+    @Override
+    public boolean isCredentialsNonExpired() {
+        return true;
+    }
+
+    @Override
+    public boolean isEnabled() {
+        return true;
+    }
+
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Override
+    public String toString() {
+        return "JwtUser{" +
+                "id=" + id +
+                ", username='" + username + '\'' +
+                ", password='" + password + '\'' +
+                ", authorities=" + authorities +
+                '}';
+    }
+
+}

+ 15 - 0
datax-admin/src/main/java/com/wugui/datax/admin/entity/LoginUser.java

@@ -0,0 +1,15 @@
+package com.wugui.datax.admin.entity;
+
+import lombok.Data;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@Data
+public class LoginUser {
+
+    private String username;
+    private String password;
+    private Integer rememberMe;
+
+}

+ 23 - 0
datax-admin/src/main/java/com/wugui/datax/admin/exception/GlobalExceptionHandler.java

@@ -0,0 +1,23 @@
+package com.wugui.datax.admin.exception;
+
+import com.baomidou.mybatisplus.extension.api.R;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.bind.annotation.ExceptionHandler;
+import org.springframework.web.bind.annotation.RestControllerAdvice;
+
+/**
+ * Created by jwk on 2019/07/05.
+ * 全局异常处理
+ * @author Jing WenKai
+ * @date 2019/07/05 11:57
+ */
+@Slf4j
+@RestControllerAdvice
+public class GlobalExceptionHandler {
+
+    @ExceptionHandler(Exception.class)
+    public R handleException(Exception e){
+        log.error("系统异常{0}",e);
+        return R.failed(e.getMessage());
+    }
+}

+ 27 - 0
datax-admin/src/main/java/com/wugui/datax/admin/exception/TokenIsExpiredException.java

@@ -0,0 +1,27 @@
+package com.wugui.datax.admin.exception;
+
+/**
+ * @description: 自定义异常
+ * @author: jingwk
+ * @date: 2019/11/17 17:21
+ */
+public class TokenIsExpiredException extends Exception{
+    public TokenIsExpiredException() {
+    }
+
+    public TokenIsExpiredException(String message) {
+        super(message);
+    }
+
+    public TokenIsExpiredException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+    public TokenIsExpiredException(Throwable cause) {
+        super(cause);
+    }
+
+    public TokenIsExpiredException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+        super(message, cause, enableSuppression, writableStackTrace);
+    }
+}

+ 92 - 0
datax-admin/src/main/java/com/wugui/datax/admin/filter/JWTAuthenticationFilter.java

@@ -0,0 +1,92 @@
+package com.wugui.datax.admin.filter;
+
+import com.alibaba.fastjson.JSON;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.wugui.datatx.core.biz.model.ReturnT;
+import com.wugui.datax.admin.core.util.I18nUtil;
+import com.wugui.datax.admin.entity.JwtUser;
+import com.wugui.datax.admin.entity.LoginUser;
+import com.wugui.datax.admin.util.JwtTokenUtils;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.security.authentication.AuthenticationManager;
+import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.AuthenticationException;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import static com.wugui.datatx.core.util.Constants.SPLIT_COMMA;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+@Slf4j
+public class JWTAuthenticationFilter extends UsernamePasswordAuthenticationFilter {
+
+    private ThreadLocal<Integer> rememberMe = new ThreadLocal<>();
+    private AuthenticationManager authenticationManager;
+
+    public JWTAuthenticationFilter(AuthenticationManager authenticationManager) {
+        this.authenticationManager = authenticationManager;
+        super.setFilterProcessesUrl("/api/auth/login");
+    }
+
+    @Override
+    public Authentication attemptAuthentication(HttpServletRequest request,
+                                                HttpServletResponse response) throws AuthenticationException {
+
+        // 从输入流中获取到登录的信息
+        try {
+            LoginUser loginUser = new ObjectMapper().readValue(request.getInputStream(), LoginUser.class);
+            rememberMe.set(loginUser.getRememberMe());
+            return authenticationManager.authenticate(
+                    new UsernamePasswordAuthenticationToken(loginUser.getUsername(), loginUser.getPassword(), new ArrayList<>())
+            );
+        } catch (IOException e) {
+            logger.error("attemptAuthentication error :{}",e);
+            return null;
+        }
+    }
+
+    // 成功验证后调用的方法
+    // 如果验证成功,就生成token并返回
+    @Override
+    protected void successfulAuthentication(HttpServletRequest request,
+                                            HttpServletResponse response,
+                                            FilterChain chain,
+                                            Authentication authResult) throws IOException {
+
+        JwtUser jwtUser = (JwtUser) authResult.getPrincipal();
+        boolean isRemember = rememberMe.get() == 1;
+
+        String role = "";
+        Collection<? extends GrantedAuthority> authorities = jwtUser.getAuthorities();
+        for (GrantedAuthority authority : authorities){
+            role = authority.getAuthority();
+        }
+
+        String token = JwtTokenUtils.createToken(jwtUser.getId(),jwtUser.getUsername(), role, isRemember);
+        response.setHeader("token", JwtTokenUtils.TOKEN_PREFIX + token);
+        response.setCharacterEncoding("UTF-8");
+        Map<String, Object> maps = new HashMap<>();
+        maps.put("data", JwtTokenUtils.TOKEN_PREFIX + token);
+        maps.put("roles", role.split(SPLIT_COMMA));
+        response.getWriter().write(JSON.toJSONString(new ReturnT<>(maps)));
+    }
+
+    @Override
+    protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, AuthenticationException failed) throws IOException, ServletException {
+        response.setCharacterEncoding("UTF-8");
+        response.getWriter().write(JSON.toJSON(new ReturnT<>(ReturnT.FAIL_CODE,I18nUtil.getString("login_param_invalid"))).toString());
+    }
+}

+ 73 - 0
datax-admin/src/main/java/com/wugui/datax/admin/filter/JWTAuthorizationFilter.java

@@ -0,0 +1,73 @@
+package com.wugui.datax.admin.filter;
+
+import com.alibaba.fastjson.JSON;
+import com.baomidou.mybatisplus.extension.api.R;
+import com.wugui.datax.admin.exception.TokenIsExpiredException;
+import com.wugui.datax.admin.util.JwtTokenUtils;
+import org.springframework.security.authentication.AuthenticationManager;
+import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.Collections;
+
+/**
+ * Created by jingwk on 2019/11/17
+ */
+public class JWTAuthorizationFilter extends BasicAuthenticationFilter {
+
+    public JWTAuthorizationFilter(AuthenticationManager authenticationManager) {
+        super(authenticationManager);
+    }
+
+    @Override
+    protected void doFilterInternal(HttpServletRequest request,
+                                    HttpServletResponse response,
+                                    FilterChain chain) throws IOException, ServletException {
+
+        String tokenHeader = request.getHeader(JwtTokenUtils.TOKEN_HEADER);
+        // 如果请求头中没有Authorization信息则直接放行
+        if (tokenHeader == null || !tokenHeader.startsWith(JwtTokenUtils.TOKEN_PREFIX)) {
+            chain.doFilter(request, response);
+            return;
+        }
+        // 如果请求头中有token,则进行解析,并且设置认证信息
+        try {
+            SecurityContextHolder.getContext().setAuthentication(getAuthentication(tokenHeader));
+        } catch (TokenIsExpiredException e) {
+            //返回json形式的错误信息
+            response.setCharacterEncoding("UTF-8");
+            response.setContentType("application/json; charset=utf-8");
+            response.getWriter().write(JSON.toJSONString(R.failed(e.getMessage())));
+            response.getWriter().flush();
+            return;
+        }
+        super.doFilterInternal(request, response, chain);
+    }
+
+    // 这里从token中获取用户信息并新建一个token
+    private UsernamePasswordAuthenticationToken getAuthentication(String tokenHeader) throws TokenIsExpiredException {
+        String token = tokenHeader.replace(JwtTokenUtils.TOKEN_PREFIX, "");
+        boolean expiration = JwtTokenUtils.isExpiration(token);
+        if (expiration) {
+            throw new TokenIsExpiredException("登录时间过长,请退出重新登录");
+        }
+        else {
+            String username = JwtTokenUtils.getUsername(token);
+            String role = JwtTokenUtils.getUserRole(token);
+            if (username != null) {
+                return new UsernamePasswordAuthenticationToken(username, null,
+                        Collections.singleton(new SimpleGrantedAuthority(role))
+                );
+            }
+        }
+        return null;
+    }
+
+}

+ 18 - 0
datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobDatasourceMapper.java

@@ -0,0 +1,18 @@
+package com.wugui.datax.admin.mapper;
+
+import com.baomidou.mybatisplus.core.mapper.BaseMapper;
+import com.wugui.datax.admin.entity.JobDatasource;
+import org.apache.ibatis.annotations.Mapper;
+
+/**
+ * jdbc数据源配置表数据库访问层
+ *
+ * @author zhouhongfa@gz-yibo.com
+ * @version v1.0
+ * @since 2019-07-30
+ */
+@Mapper
+public interface JobDatasourceMapper extends BaseMapper<JobDatasource> {
+    int update(JobDatasource datasource);
+
+}

+ 29 - 0
datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobGroupMapper.java

@@ -0,0 +1,29 @@
+package com.wugui.datax.admin.mapper;
+
+import com.wugui.datax.admin.entity.JobGroup;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+/**
+ * Created by xuxueli on 16/9/30.
+ */
+@Mapper
+public interface JobGroupMapper {
+
+    List<JobGroup> findAll();
+
+    List<JobGroup> find(@Param("appName") String appName,
+                        @Param("title") String title,
+                        @Param("addressList") String addressList);
+
+    int save(JobGroup jobGroup);
+    List<JobGroup> findByAddressType(@Param("addressType") int addressType);
+
+    int update(JobGroup jobGroup);
+
+    int remove(@Param("id") int id);
+
+    JobGroup load(@Param("id") int id);
+}

+ 60 - 0
datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobInfoMapper.java

@@ -0,0 +1,60 @@
+package com.wugui.datax.admin.mapper;
+
+import com.wugui.datax.admin.entity.JobInfo;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.Date;
+import java.util.List;
+
+
+/**
+ * job info
+ *
+ * @author xuxueli 2016-1-12 18:03:45
+ */
+@Mapper
+public interface JobInfoMapper {
+
+    List<JobInfo> pageList(@Param("offset") int offset,
+                           @Param("pagesize") int pagesize,
+                           @Param("jobGroup") int jobGroup,
+                           @Param("triggerStatus") int triggerStatus,
+                           @Param("jobDesc") String jobDesc,
+                           @Param("glueType") String glueType,
+                           @Param("userId") int userId,
+                           @Param("projectIds") Integer[] projectIds);
+
+    int pageListCount(@Param("offset") int offset,
+                      @Param("pagesize") int pagesize,
+                      @Param("jobGroup") int jobGroup,
+                      @Param("triggerStatus") int triggerStatus,
+                      @Param("jobDesc") String jobDesc,
+                      @Param("glueType") String glueType,
+                      @Param("userId") int userId,
+                      @Param("projectIds") Integer[] projectIds);
+
+    List<JobInfo> findAll();
+
+    int save(JobInfo info);
+
+    JobInfo loadById(@Param("id") int id);
+
+    int update(JobInfo jobInfo);
+
+    int delete(@Param("id") long id);
+
+    List<JobInfo> getJobsByGroup(@Param("jobGroup") int jobGroup);
+
+    int findAllCount();
+
+    List<JobInfo> scheduleJobQuery(@Param("maxNextTime") long maxNextTime, @Param("pagesize") int pagesize);
+
+    int scheduleUpdate(JobInfo xxlJobInfo);
+
+    int incrementTimeUpdate(@Param("id") int id, @Param("incStartTime") Date incStartTime);
+
+	public int updateLastHandleCode(@Param("id") int id,@Param("lastHandleCode")int lastHandleCode);
+
+    void incrementIdUpdate(@Param("id") int id, @Param("incStartId")Long incStartId);
+}

+ 25 - 0
datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobLogGlueMapper.java

@@ -0,0 +1,25 @@
+package com.wugui.datax.admin.mapper;
+
+import com.wugui.datax.admin.entity.JobLogGlue;
+import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+/**
+ * job log for glue
+ *
+ * @author xuxueli 2016-5-19 18:04:56
+ */
+@Mapper
+public interface JobLogGlueMapper {
+
+    int save(JobLogGlue jobLogGlue);
+
+    List<JobLogGlue> findByJobId(@Param("jobId") int jobId);
+
+    int removeOld(@Param("jobId") int jobId, @Param("limit") int limit);
+
+    int deleteByJobId(@Param("jobId") int jobId);
+
+}

+ 0 - 0
datax-admin/src/main/java/com/wugui/datax/admin/mapper/JobLogMapper.java


Some files were not shown because too many files changed in this diff