浏览代码

init from phab

xielq 4 年之前
父节点
当前提交
60156a8004
共有 55 个文件被更改,包括 3985 次插入0 次删除
  1. 0 0
      README.md
  2. 33 0
      pom.xml
  3. 16 0
      ps-sync-consumer/README.md
  4. 127 0
      ps-sync-consumer/pom.xml
  5. 6 0
      ps-sync-consumer/src/main/docker/Dockerfile
  6. 38 0
      ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/Application.java
  7. 247 0
      ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/DruidDBConfiguration.java
  8. 326 0
      ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/KafkaConsumer.java
  9. 15 0
      ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/SyncMessageDeserializer.java
  10. 70 0
      ps-sync-consumer/src/main/resources/application.yml
  11. 19 0
      ps-sync-consumer/src/main/resources/config/application-b2b-mysql-pre.properties
  12. 19 0
      ps-sync-consumer/src/main/resources/config/application-b2b-mysql.properties
  13. 19 0
      ps-sync-consumer/src/main/resources/config/application-b2b-prod.properties
  14. 19 0
      ps-sync-consumer/src/main/resources/config/application-b2b.properties
  15. 19 0
      ps-sync-consumer/src/main/resources/config/application-b2c.properties
  16. 19 0
      ps-sync-consumer/src/main/resources/config/application-copy.properties
  17. 44 0
      ps-sync-consumer/src/main/resources/logback.xml
  18. 43 0
      ps-sync-core/pom.xml
  19. 117 0
      ps-sync-core/src/main/java/com/uas/ps/sync/core/MessageProducer.java
  20. 100 0
      ps-sync-core/src/main/java/com/uas/ps/sync/core/SyncMessage.java
  21. 17 0
      ps-sync-core/src/main/java/com/uas/ps/sync/core/annotation/Sync.java
  22. 41 0
      ps-sync-core/src/main/java/com/uas/ps/sync/core/intercept/CRUDIntercept.java
  23. 71 0
      ps-sync-core/src/main/java/com/uas/ps/sync/core/util/ReflectUtils.java
  24. 47 0
      ps-sync-entity/pom.xml
  25. 92 0
      ps-sync-entity/src/main/java/com/uas/ps/sync/converter/JpaJsonDocumentsConverter.java
  26. 207 0
      ps-sync-entity/src/main/java/com/uas/ps/sync/entity/SyncMessage.java
  27. 14 0
      ps-sync-producer/README.md
  28. 75 0
      ps-sync-producer/pom.xml
  29. 79 0
      ps-sync-producer/sql/product$storestaus.sql
  30. 81 0
      ps-sync-producer/sql/product$users.sql
  31. 101 0
      ps-sync-producer/sql/productmatchresults.sql
  32. 167 0
      ps-sync-producer/sql/products.sql
  33. 96 0
      ps-sync-producer/sql/trigger-mysql.sql
  34. 50 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/Application.java
  35. 261 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/DruidDBConfiguration.java
  36. 29 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/RepositoryConfiguration.java
  37. 43 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/WebAppConfiguration.java
  38. 67 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/JmsController.java
  39. 173 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/JmsListener.java
  40. 100 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SPage.java
  41. 52 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/ScheduleController.java
  42. 55 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SyncMessageDao.java
  43. 19 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SyncMessageService.java
  44. 66 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SyncMessageServiceImpl.java
  45. 17 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/Executable.java
  46. 36 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/NumberGenerator.java
  47. 154 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskInformation.java
  48. 74 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskLog.java
  49. 62 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskService.java
  50. 209 0
      ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskServiceImpl.java
  51. 33 0
      ps-sync-producer/src/main/resources/application.yml
  52. 19 0
      ps-sync-producer/src/main/resources/config/application-cloud.properties
  53. 19 0
      ps-sync-producer/src/main/resources/config/application-dev.properties
  54. 19 0
      ps-sync-producer/src/main/resources/config/application-test.properties
  55. 44 0
      ps-sync-producer/src/main/resources/logback.xml

+ 0 - 0
README.md


+ 33 - 0
pom.xml

@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.uas.ps</groupId>
+        <artifactId>ps-parent</artifactId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+    <artifactId>ps-sync-parent</artifactId>
+    <packaging>pom</packaging>
+
+    <modules>
+        <module>ps-sync-entity</module>
+        <module>ps-sync-core</module>
+        <module>ps-sync-producer</module>
+        <module>ps-sync-consumer</module>
+    </modules>
+
+    <properties>
+        <ps.sync.entity.version>0.0.1-SNAPSHOT</ps.sync.entity.version>
+    </properties>
+
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>com.uas.ps</groupId>
+                <artifactId>ps-sync-entity</artifactId>
+                <version>${ps.sync.entity.version}</version>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+</project>

+ 16 - 0
ps-sync-consumer/README.md

@@ -0,0 +1,16 @@
+# Description
+This is Sync Consumer.
+
+# Deploy
+### Test
+##### B2B
+* Visit [Jenkins](http://10.10.100.200:5001/job/ps-sync-consumer.b2b/)
+* Click "立即构建"
+
+### Production
+##### B2B
+* Visit [Jenkins](http://10.10.100.200:5001/job/ps-sync-consumer.prod.package/)
+* Click "立即构建"
+* Visit [Jenkins](http://119.147.37.222:9091/job/ps-sync-consumer/)
+* Click "Build with Parameters"
+* Click "开始构建"

+ 127 - 0
ps-sync-consumer/pom.xml

@@ -0,0 +1,127 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.uas.ps</groupId>
+        <artifactId>ps-sync-parent</artifactId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+    <artifactId>ps-sync-consumer</artifactId>
+    <packaging>jar</packaging>
+
+    <properties>
+        <docker.plugin.version>0.4.13</docker.plugin.version>
+        <docker.registry>10.10.100.200:5000</docker.registry>
+        <docker.directory>${project.build.directory}/generated-docker</docker.directory>
+        <docker.pushImage>false</docker.pushImage>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>com.uas.ps</groupId>
+            <artifactId>ps-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.uas.ps</groupId>
+            <artifactId>ps-sync-entity</artifactId>
+        </dependency>
+
+        <!-- spring boot -->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-actuator</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-jdbc</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-configuration-processor</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.oracle</groupId>
+            <artifactId>ojdbc6</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>druid</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <resources>
+            <!-- 必须包含这个 -->
+            <resource>
+                <directory>${project.basedir}/src/main/resources</directory>
+                <includes>
+                    <include>**/*</include>
+                </includes>
+            </resource>
+            <!-- 替换Dockerfile占位符 -->
+            <resource>
+                <directory>${project.basedir}/src/main/docker</directory>
+                <filtering>true</filtering>
+                <includes>
+                    <include>**/Dockerfile</include>
+                </includes>
+                <targetPath>${docker.directory}</targetPath>
+            </resource>
+        </resources>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>com.spotify</groupId>
+                <artifactId>docker-maven-plugin</artifactId>
+                <version>${docker.plugin.version}</version>
+                <executions>
+                    <execution>
+                        <id>build-image</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>build</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <configuration>
+                    <registryUrl>${docker.registry}</registryUrl>
+                    <!-- <dockerHost>${docker.host}</dockerHost>
+                    <baseImage>${docker.registry}/java</baseImage>
+                    <entryPoint>["java", "-jar", "/${project.build.finalName}.jar"]</entryPoint>
+                     -->
+                    <dockerDirectory>${docker.directory}</dockerDirectory>
+                    <imageName>${docker.registry}/${project.artifactId}</imageName>
+                    <pushImage>${docker.pushImage}</pushImage>
+                    <resources>
+                        <resource>
+                            <targetPath>/</targetPath>
+                            <directory>${project.build.directory}</directory>
+                            <include>${project.build.finalName}.jar</include>
+                        </resource>
+                    </resources>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 6 - 0
ps-sync-consumer/src/main/docker/Dockerfile

@@ -0,0 +1,6 @@
+FROM @docker.registry@/java
+VOLUME /tmp
+ADD @project.build.finalName@.jar app.jar
+RUN bash -c 'touch /app.jar'
+EXPOSE 28001
+ENTRYPOINT ["java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "app.jar"]

+ 38 - 0
ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/Application.java

@@ -0,0 +1,38 @@
+package com.uas.ps.sync.consumer;
+
+import com.uas.ps.core.util.ContextUtils;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.event.ApplicationPreparedEvent;
+import org.springframework.context.ApplicationListener;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+
+/**
+ * 应用入口
+ *
+ * @author sunyj
+ * @since 2017年8月16日 下午4:00:03
+ */
+@SpringBootApplication
+public class Application {
+    public static void main(String[] args) throws FileNotFoundException {
+        File logFile = new File("logs/log.log");
+        if (!logFile.getParentFile().exists()) {
+            logFile.getParentFile().mkdir();
+        }
+        System.setErr(new PrintStream(new FileOutputStream(logFile, true)));
+        SpringApplication application = new SpringApplication(Application.class);
+        application.addListeners(new ApplicationListener<ApplicationPreparedEvent>() {
+            @Override
+            public void onApplicationEvent(ApplicationPreparedEvent event) {
+                ContextUtils.setApplicationContext(event.getApplicationContext());
+            }
+        });
+        application.run(args);
+    }
+}

+ 247 - 0
ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/DruidDBConfiguration.java

@@ -0,0 +1,247 @@
+package com.uas.ps.sync.consumer;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Primary;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.stereotype.Component;
+
+import java.sql.SQLException;
+
+@Component
+@ConfigurationProperties(prefix = "datasource")
+public class DruidDBConfiguration {
+
+    private Logger logger = LoggerFactory.getLogger(DruidDBConfiguration.class);
+
+    private String url;
+
+    private String username;
+
+    private String password;
+
+    private String driverClassName;
+
+    private int initialSize;
+
+    private int minIdle;
+
+    private int maxActive;
+
+    private int maxWait;
+
+    private int timeBetweenEvictionRunsMillis;
+
+    private int minEvictableIdleTimeMillis;
+
+    private String validationQuery;
+
+    private boolean testWhileIdle;
+
+    private boolean testOnBorrow;
+
+    private boolean testOnReturn;
+
+    private int timeBetweenLogStatsMillis;
+
+    private boolean poolPreparedStatements;
+
+    private int maxPoolPreparedStatementPerConnectionSize;
+
+    private String filters;
+
+    private String connectionProperties;
+
+    @Bean
+    @Primary
+    public DruidDataSource dataSource() {
+        DruidDataSource dataSource = new DruidDataSource();
+
+        dataSource.setUrl(url);
+        dataSource.setUsername(username);
+        dataSource.setPassword(password);
+        dataSource.setDriverClassName(driverClassName);
+
+        // configuration
+        dataSource.setInitialSize(initialSize);
+        dataSource.setMinIdle(minIdle);
+        dataSource.setMaxActive(maxActive);
+        dataSource.setMaxWait(maxWait);
+        dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
+        dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
+        dataSource.setValidationQuery(validationQuery);
+        dataSource.setTestWhileIdle(testWhileIdle);
+        dataSource.setTestOnBorrow(testOnBorrow);
+        dataSource.setTestOnReturn(testOnReturn);
+        dataSource.setTimeBetweenLogStatsMillis(timeBetweenLogStatsMillis);
+        dataSource.setPoolPreparedStatements(poolPreparedStatements);
+        dataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
+        try {
+            dataSource.setFilters(filters);
+        } catch (SQLException e) {
+            logger.error("数据源初始化失败: setFilters", e);
+        }
+        dataSource.setConnectionProperties(connectionProperties);
+        return dataSource;
+    }
+
+    @Bean
+    public JdbcTemplate jdbcTemplate() {
+        return new JdbcTemplate(dataSource());
+    }
+
+    public String getUrl() {
+        return url;
+    }
+
+    public void setUrl(String url) {
+        this.url = url;
+    }
+
+    public String getUsername() {
+        return username;
+    }
+
+    public void setUsername(String username) {
+        this.username = username;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public void setPassword(String password) {
+        this.password = password;
+    }
+
+    public String getDriverClassName() {
+        return driverClassName;
+    }
+
+    public void setDriverClassName(String driverClassName) {
+        this.driverClassName = driverClassName;
+    }
+
+    public int getInitialSize() {
+        return initialSize;
+    }
+
+    public void setInitialSize(int initialSize) {
+        this.initialSize = initialSize;
+    }
+
+    public int getMinIdle() {
+        return minIdle;
+    }
+
+    public void setMinIdle(int minIdle) {
+        this.minIdle = minIdle;
+    }
+
+    public int getMaxActive() {
+        return maxActive;
+    }
+
+    public void setMaxActive(int maxActive) {
+        this.maxActive = maxActive;
+    }
+
+    public int getMaxWait() {
+        return maxWait;
+    }
+
+    public void setMaxWait(int maxWait) {
+        this.maxWait = maxWait;
+    }
+
+    public int getTimeBetweenEvictionRunsMillis() {
+        return timeBetweenEvictionRunsMillis;
+    }
+
+    public void setTimeBetweenEvictionRunsMillis(int timeBetweenEvictionRunsMillis) {
+        this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
+    }
+
+    public int getMinEvictableIdleTimeMillis() {
+        return minEvictableIdleTimeMillis;
+    }
+
+    public void setMinEvictableIdleTimeMillis(int minEvictableIdleTimeMillis) {
+        this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
+    }
+
+    public String getValidationQuery() {
+        return validationQuery;
+    }
+
+    public void setValidationQuery(String validationQuery) {
+        this.validationQuery = validationQuery;
+    }
+
+    public boolean isTestWhileIdle() {
+        return testWhileIdle;
+    }
+
+    public void setTestWhileIdle(boolean testWhileIdle) {
+        this.testWhileIdle = testWhileIdle;
+    }
+
+    public boolean isTestOnBorrow() {
+        return testOnBorrow;
+    }
+
+    public void setTestOnBorrow(boolean testOnBorrow) {
+        this.testOnBorrow = testOnBorrow;
+    }
+
+    public boolean isTestOnReturn() {
+        return testOnReturn;
+    }
+
+    public void setTestOnReturn(boolean testOnReturn) {
+        this.testOnReturn = testOnReturn;
+    }
+
+    public int getTimeBetweenLogStatsMillis() {
+        return timeBetweenLogStatsMillis;
+    }
+
+    public void setTimeBetweenLogStatsMillis(int timeBetweenLogStatsMillis) {
+        this.timeBetweenLogStatsMillis = timeBetweenLogStatsMillis;
+    }
+
+    public boolean isPoolPreparedStatements() {
+        return poolPreparedStatements;
+    }
+
+    public void setPoolPreparedStatements(boolean poolPreparedStatements) {
+        this.poolPreparedStatements = poolPreparedStatements;
+    }
+
+    public int getMaxPoolPreparedStatementPerConnectionSize() {
+        return maxPoolPreparedStatementPerConnectionSize;
+    }
+
+    public void setMaxPoolPreparedStatementPerConnectionSize(int maxPoolPreparedStatementPerConnectionSize) {
+        this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize;
+    }
+
+    public String getFilters() {
+        return filters;
+    }
+
+    public void setFilters(String filters) {
+        this.filters = filters;
+    }
+
+    public String getConnectionProperties() {
+        return connectionProperties;
+    }
+
+    public void setConnectionProperties(String connectionProperties) {
+        this.connectionProperties = connectionProperties;
+    }
+}

+ 326 - 0
ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/KafkaConsumer.java

@@ -0,0 +1,326 @@
+package com.uas.ps.sync.consumer;
+
+import com.alibaba.fastjson.JSONObject;
+import com.uas.ps.core.util.ArrayUtils;
+import com.uas.ps.core.util.Assert;
+import com.uas.ps.core.util.DateFormatUtils;
+import com.uas.ps.core.util.StringUtils;
+import com.uas.ps.sync.entity.SyncMessage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.stereotype.Component;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * @author sunyj
+ * @since 2018/1/14 17:34
+ */
+@Component
+public class KafkaConsumer {
+
+    private static final String LOG_DIR = System.getProperty("java.io.tmpdir");
+    private static final String LOG_FILE_PATH = (LOG_DIR.endsWith(File.separator) ? LOG_DIR : LOG_DIR + "/") + "consumer-update-error.log";
+    private final Logger logger = LoggerFactory.getLogger(KafkaConsumer.class);
+    private final JdbcTemplate jdbcTemplate;
+    private final KafkaTemplate<String, String> kafkaTemplate;
+    private final KafkaProperties kafkaProperties;
+    private ConcurrentMap<String, Integer> batchCodeCount = new ConcurrentHashMap<>();
+
+    @Autowired
+    public KafkaConsumer(JdbcTemplate jdbcTemplate, KafkaTemplate<String, String> kafkaTemplate, ConcurrentKafkaListenerContainerFactory<String, SyncMessage> kafkaListenerContainerFactory, KafkaProperties kafkaProperties) {
+        this.jdbcTemplate = jdbcTemplate;
+        this.kafkaTemplate = kafkaTemplate;
+        kafkaListenerContainerFactory.setBatchListener(true);
+        this.kafkaProperties = kafkaProperties;
+    }
+
+    @KafkaListener(topics = "${kafka.consumer.topics}")
+    public void listen(List<SyncMessage> messages) {
+        try {
+            logger.info("records size: " + messages.size());
+            Map<String, ArgsMessage> operations = new HashMap<>();
+            for (SyncMessage message : messages) {
+                MessageResult messageResult;
+                try {
+                    messageResult = processMessage(message);
+                } catch (Exception e) {
+                    logger.error("处理失败", e);
+                    continue;
+                }
+                String sql = messageResult.getSql();
+                ArgsMessage argsMessage = operations.get(sql);
+                if (argsMessage == null) {
+                    argsMessage = new ArgsMessage();
+                    operations.put(sql, argsMessage);
+                }
+                argsMessage.add(messageResult.getArgs(), message);
+            }
+
+            Set<Map.Entry<String, ArgsMessage>> entries = operations.entrySet();
+            for (Map.Entry<String, ArgsMessage> entry : entries) {
+                String sql = entry.getKey();
+                ArgsMessage argsMessage = entry.getValue();
+                List<Object[]> argsList = argsMessage.getArgsList();
+                List<SyncMessage> messageList = argsMessage.getMessages();
+                logger.info("batchUpdate... " + argsList.size());
+                logger.info("sql... " + sql);
+                logger.info("argsList... " + JSONObject.toJSONString(argsList));
+                try {
+                    // 先进行批量更新
+                    int[] results = jdbcTemplate.batchUpdate(sql, argsList);
+                    logger.info("results... " + Arrays.toString(results));
+                } catch (Exception e) {
+                    // 批量更新失败,进行单条更新
+                    logger.error("批量更新失败", e);
+                    logger.info("尝试单条更新");
+                    for (int i = 0; i < argsList.size(); i++) {
+                        Object[] args = argsList.get(i);
+                        try {
+                            int result = jdbcTemplate.update(sql, args);
+                            logger.info("results... " + result);
+                        } catch (Exception e1) {
+                            // 单条更新失败,单独记录日志
+                            logger.error("单条更新失败:" + messageList.get(i), e1);
+                            try (FileWriter fileWriter = new FileWriter(LOG_FILE_PATH, true)) {
+                                fileWriter.write(DateFormatUtils.DATETIME_FORMAT.format(new Date()) + " " + messageList.get(i) + "\n");
+                                fileWriter.flush();
+                            } catch (IOException e2) {
+                                logger.error("日志写入失败", e2);
+                            }
+                        }
+                    }
+                }
+            }
+        } catch (Exception e) {
+            logger.error("消息处理出错", e);
+        }
+    }
+
+    private MessageResult processMessage(SyncMessage syncMessage) {
+        logger.info("receiving message... " + syncMessage.toString());
+        String batchCode = syncMessage.getBatchCode();
+        String sourceApp = syncMessage.getSourceApp();
+        String groupId = kafkaProperties.getConsumer().getGroupId();
+        // batchCode 不为空并且来源应用与消费者的 group id 相同时,需要生成回复消息,以告知数据同步已经完成(只剩一个消费者,暂时去掉这个限制)
+        if (!StringUtils.isEmpty(batchCode)) {
+            Integer count = batchCodeCount.get(batchCode);
+            if (count == null) {
+                count = 1;
+            } else {
+                count++;
+            }
+            logger.info("consuming... " + count + " for batch " + batchCode);
+            if (count.intValue() == syncMessage.getBatchSize()) {
+                logger.info(String.format("batch %s processed: %d, sending message...", batchCode, count));
+                kafkaTemplate.sendDefault(batchCode);
+                logger.info(String.format("message sent: batchCode=%s, count=%d", batchCode, count));
+            } else {
+                batchCodeCount.put(batchCode, count);
+            }
+        }
+        String methodType = syncMessage.getMethodType();
+        Assert.hasText(methodType, "methodType is empty: " + methodType);
+        // 根据不同的更改类型执行不同的操作
+        switch (methodType.toLowerCase()) {
+            case "delete":
+                return delete(syncMessage);
+            case "update":
+                return update(syncMessage);
+            case "insert":
+                return insert(syncMessage);
+            default:
+                throw new IllegalArgumentException("Unsupported method type: " + methodType);
+        }
+    }
+
+    /**
+     * 根据消息删除相应的记录
+     */
+    private MessageResult delete(SyncMessage syncMessage) {
+        String tableName = syncMessage.getTableName();
+        JSONObject dataKey = syncMessage.getDataKey();
+        Assert.hasText(tableName, "tableName is empty: " + tableName);
+        Assert.notEmpty(dataKey, "dataKey is empty: " + dataKey);
+        logger.info("deleting... table=" + tableName + ", key=" + dataKey);
+
+        // 构造 sql
+        // delete from ${tableName} where key3 = ? and key4 = ?
+        StringBuilder sqlBuilder = new StringBuilder("delete from ");
+        sqlBuilder.append(tableName);
+
+        Object[] args = where(sqlBuilder, dataKey);
+        return new MessageResult(sqlBuilder.toString(), args);
+    }
+
+    /**
+     * 根据消息更新相应的记录
+     */
+    private MessageResult update(SyncMessage syncMessage) {
+        String tableName = syncMessage.getTableName();
+        JSONObject dataKey = syncMessage.getDataKey();
+        JSONObject data = syncMessage.getData();
+        Assert.hasText(tableName, "tableName is empty: " + tableName);
+        Assert.notEmpty(dataKey, "dataKey is empty: " + dataKey);
+        Assert.notEmpty(data, "data is empty: " + data);
+        logger.info("updating... table=" + tableName + ", key=" + dataKey);
+
+        // 构造 sql
+        // update ${tableName} set key1 = ?, key2 = ? where key3 = ? and key4 = ?
+        StringBuilder sqlBuilder = new StringBuilder("update ");
+        sqlBuilder.append(tableName).append(" set ");
+
+        Object[] updateArgs = connect(sqlBuilder, data, ", ", false);
+        Object[] whereArgs = where(sqlBuilder, dataKey);
+        return new MessageResult(sqlBuilder.toString(), ArrayUtils.concat(updateArgs, whereArgs));
+    }
+
+    /**
+     * 根据消息插入相应的记录
+     */
+    private MessageResult insert(SyncMessage syncMessage) {
+        String tableName = syncMessage.getTableName();
+        JSONObject dataKey = syncMessage.getDataKey();
+        JSONObject data = syncMessage.getData();
+        Assert.hasText(tableName, "tableName is empty: " + tableName);
+        Assert.notEmpty(dataKey, "dataKey is empty: " + dataKey);
+        Assert.notEmpty(data, "data is empty: " + data);
+        logger.info("inserting... table=" + tableName);
+
+        // 构造 sql
+        // insert into ${tableName} (key1, key2) values(?, ?)
+        StringBuilder sqlBuilder = new StringBuilder("insert into ");
+        sqlBuilder.append(tableName).append(" (");
+        StringBuilder valuesBuilder = new StringBuilder(" values(");
+        JSONObject map = dataKey;
+        map.putAll(data);
+        Object[] args = new Object[map.size()];
+        Iterator<Map.Entry<String, Object>> iterator = map.entrySet().iterator();
+        for (int i = 0; i < map.size(); i++) {
+            Map.Entry<String, Object> entry = iterator.next();
+            Object value = entry.getValue();
+            if (i != 0) {
+                sqlBuilder.append(", ");
+                valuesBuilder.append(", ");
+            }
+            sqlBuilder.append(entry.getKey());
+            valuesBuilder.append("?");
+            args[i] = mayConvertToDate(value);
+        }
+        sqlBuilder.append(")");
+        valuesBuilder.append(")");
+        sqlBuilder.append(valuesBuilder);
+        return new MessageResult(sqlBuilder.toString(), args);
+    }
+
+    /**
+     * 构造 where 条件
+     *
+     * @param sqlBuilder 未构造完成的 sql
+     * @param map        条件
+     * @return 修改后的 sql 中的参数(${sqlBuilder} where key1 = ? and key2 =?)
+     */
+    private Object[] where(StringBuilder sqlBuilder, JSONObject map) {
+        sqlBuilder.append(" where ");
+        // 用作 where 中的值不可为 null
+        return connect(sqlBuilder, map, " and ", true);
+    }
+
+    /**
+     * 使用指定的分隔符连接键值对
+     *
+     * @param sqlBuilder    未构造完成的 sql
+     * @param map           键值对
+     * @param separator     分隔符
+     * @param assertNotNull 是否检查键值对中的值不为 null
+     * @return 修改后的 sql 中的参数(${sqlBuilder} key1 = ?${separator}key2 =?)
+     */
+    private Object[] connect(StringBuilder sqlBuilder, JSONObject map, String separator, boolean assertNotNull) {
+        Object[] args = new Object[map.size()];
+        Iterator<Map.Entry<String, Object>> iterator = map.entrySet().iterator();
+        for (int i = 0; i < map.size(); i++) {
+            Map.Entry<String, Object> entry = iterator.next();
+            Object value = entry.getValue();
+            if (i != 0) {
+                sqlBuilder.append(separator);
+            }
+            sqlBuilder.append(entry.getKey()).append(" = ?");
+            if (assertNotNull) {
+                Assert.notNull(value, "value is null: " + entry);
+            }
+            args[i] = mayConvertToDate(value);
+        }
+        return args;
+    }
+
+
+    /**
+     * 转换日期类型的值
+     */
+    private Object mayConvertToDate(Object value) {
+        // 数据源为 oracle 时,将日期字符串转为日期类型
+        // TODO isOracle
+        // if (dataSource.isOracle() && value != null) {
+        if (value != null) {
+            try {
+                String str = value.toString();
+                if (DateFormatUtils.isDate(str)) {
+                    return DateFormatUtils.DATE_FORMAT.parse(str);
+                } else if (DateFormatUtils.isDateTime(str)) {
+                    return DateFormatUtils.DATETIME_FORMAT.parse(str);
+                }
+            } catch (ParseException e) {
+                throw new IllegalStateException("日期转换出错:" + value);
+            }
+        }
+        return value;
+    }
+
+    private class MessageResult {
+        private String sql;
+        private Object[] args;
+
+        private MessageResult(String sql, Object[] args) {
+            this.sql = sql;
+            this.args = args;
+        }
+
+        private String getSql() {
+            return sql;
+        }
+
+        private Object[] getArgs() {
+            return args;
+        }
+    }
+
+    private class ArgsMessage {
+        private List<Object[]> argsList = new ArrayList<>();
+        private List<SyncMessage> messages = new ArrayList<>();
+
+        private void add(Object[] args, SyncMessage message) {
+            argsList.add(args);
+            messages.add(message);
+        }
+
+        private List<Object[]> getArgsList() {
+            return argsList;
+        }
+
+        private List<SyncMessage> getMessages() {
+            return messages;
+        }
+    }
+}

+ 15 - 0
ps-sync-consumer/src/main/java/com/uas/ps/sync/consumer/SyncMessageDeserializer.java

@@ -0,0 +1,15 @@
+package com.uas.ps.sync.consumer;
+
+import com.uas.ps.sync.entity.SyncMessage;
+import org.springframework.kafka.support.serializer.JsonDeserializer;
+
+/**
+ * @author sunyj
+ * @since 2018/1/14 18:23
+ */
+public class SyncMessageDeserializer extends JsonDeserializer {
+
+    public SyncMessageDeserializer() {
+        super(SyncMessage.class);
+    }
+}

+ 70 - 0
ps-sync-consumer/src/main/resources/application.yml

@@ -0,0 +1,70 @@
+spring:
+ kafka:
+  bootstrap-servers: 10.10.100.11:9292,10.10.100.12:9292,10.10.100.13:9292,10.10.100.14:9292,10.10.100.15:9292,10.10.100.16:9292
+#  consumer
+  consumer:
+   key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+   value-deserializer: com.uas.ps.sync.consumer.SyncMessageDeserializer
+   max-poll-records: 500
+   auto-offset-reset: latest
+#  producer
+  producer:
+   key-serializer: org.apache.kafka.common.serialization.StringSerializer
+   value-serializer: org.apache.kafka.common.serialization.StringSerializer
+  template:
+    default-topic: RESPONSE
+  listener:
+    poll-timeout: 3000
+ profiles:
+   active: copy
+
+kafka:
+ consumer:
+  topics: PUBLIC
+
+---
+spring:
+ profiles: b2b
+ kafka:
+  consumer:
+   group-id: b2b
+
+---
+spring:
+ profiles: b2b-mysql
+ kafka:
+  consumer:
+   group-id: b2b-mysql
+
+---
+spring:
+ profiles: b2b-prod
+ kafka:
+  bootstrap-servers: 10.10.0.69:9291,10.10.0.148:9292,10.10.0.98:9293
+  consumer:
+   group-id: b2b
+
+---
+spring:
+ profiles: b2b-mysql-pre
+ kafka:
+  bootstrap-servers: 10.10.0.69:9291,10.10.0.148:9292,10.10.0.98:9293
+  consumer:
+   group-id: b2b-mysql
+
+---
+spring:
+ profiles: b2c
+ kafka:
+  consumer:
+   group-id: b2c
+
+---
+spring:
+ profiles: copy
+ kafka:
+  consumer:
+   group-id: copy
+kafka:
+ consumer:
+  topics: PUBLIC_DEV

+ 19 - 0
ps-sync-consumer/src/main/resources/config/application-b2b-mysql-pre.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://10.10.0.76:3306/b2b_prod?characterEncoding=UTF-8&allowMultiQueries=true&rewriteBatchedStatements=true
+datasource.username=root
+datasource.password=select
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-consumer/src/main/resources/config/application-b2b-mysql.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://192.168.253.12:3306/b2b_mysql_test?characterEncoding=UTF-8&allowMultiQueries=true&rewriteBatchedStatements=true
+datasource.username=root
+datasource.password=select111***
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-consumer/src/main/resources/config/application-b2b-prod.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:oracle:thin:@ubtob.com:1521:orcl
+datasource.username=platform$b2b
+datasource.password=select*fromuu
+datasource.driverClassName=oracle.jdbc.driver.OracleDriver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-consumer/src/main/resources/config/application-b2b.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:oracle:thin:@192.168.253.6:1521:orcl
+datasource.username=uuplatformdemo
+datasource.password=selectuuplatform
+datasource.driverClassName=oracle.jdbc.driver.OracleDriver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-consumer/src/main/resources/config/application-b2c.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://192.168.253.6:3306/mall_test_dev?characterEncoding=utf-8&useSSL=false
+datasource.username=root
+datasource.password=select111***
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=100
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=300000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-consumer/src/main/resources/config/application-copy.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://192.168.253.6:3306/public_resources_copy?characterEncoding=utf-8&useSSL=false
+datasource.username=root
+datasource.password=select111***
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 44 - 0
ps-sync-consumer/src/main/resources/logback.xml

@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+	<appender name="FILE"
+		class="ch.qos.logback.core.rolling.RollingFileAppender">
+		<File>logs/log.log</File>
+		<encoder>
+			<pattern>
+				%date{yyyy-MM-dd HH:mm:ss:SSS} [%relative ms] %-5level [%50.50(%logger{36}.%method:%line)] ---- %msg%n
+			</pattern>
+			<charset>UTF-8</charset> <!-- 此处设置字符集 -->
+		</encoder>
+		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+			<!-- daily rollover -->
+			<FileNamePattern>logs/log.%d{yyyy-MM-dd}.log</FileNamePattern>
+			<!-- keep 10 days' worth of history -->
+			<maxHistory>10</maxHistory>
+		</rollingPolicy>
+		<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+			<level>INFO</level>
+		</filter>
+	</appender>
+
+	<!-- Console output -->
+	<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+		<!-- encoder defaults to ch.qos.logback.classic.encoder.PatternLayoutEncoder -->
+		<encoder>
+			<pattern>
+				%date{yyyy-MM-dd HH:mm:ss:SSS} [%relative ms] %-5level [%50.50(%logger{36}.%method:%line)] ---- %msg%n
+			</pattern>
+			<charset>UTF-8</charset> <!-- 此处设置字符集 -->
+		</encoder>
+		<!-- Only log level WARN and above -->
+		<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+			<level>INFO</level>
+		</filter>
+	</appender>
+
+	<!-- Enable FILE and STDOUT appenders for all log messages. By default, 
+		only log at level INFO and above. -->
+	<root level="INFO">
+		<appender-ref ref="FILE" />
+		<appender-ref ref="STDOUT" />
+	</root>
+</configuration>

+ 43 - 0
ps-sync-core/pom.xml

@@ -0,0 +1,43 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.uas.ps</groupId>
+        <artifactId>ps-sync-parent</artifactId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+    <artifactId>ps-sync-core</artifactId>
+    <packaging>jar</packaging>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.aspectj</groupId>
+            <artifactId>aspectjweaver</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-jpa</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.uas.ps</groupId>
+            <artifactId>ps-core</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-javadoc-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-source-plugin</artifactId>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 117 - 0
ps-sync-core/src/main/java/com/uas/ps/sync/core/MessageProducer.java

@@ -0,0 +1,117 @@
+package com.uas.ps.sync.core;
+
+import com.uas.ps.core.util.Assert;
+import com.uas.ps.sync.core.annotation.Sync;
+import com.uas.ps.sync.core.util.ReflectUtils;
+import org.aspectj.lang.ProceedingJoinPoint;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.data.jpa.repository.support.JpaEntityInformation;
+import org.springframework.data.jpa.repository.support.SimpleJpaRepository;
+import org.springframework.data.repository.CrudRepository;
+import org.springframework.kafka.core.KafkaTemplate;
+
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import java.io.Serializable;
+import java.util.Arrays;
+
+/**
+ * 消息生产者
+ *
+ * @author sunyj
+ * @since 2018/1/26 10:48
+ */
+public class MessageProducer {
+
+    private final Logger logger = LoggerFactory.getLogger(getClass());
+
+    private KafkaTemplate kafkaTemplate;
+
+    @PersistenceContext
+    private EntityManager entityManager;
+
+    public MessageProducer(KafkaTemplate kafkaTemplate) {
+        Assert.notNull(kafkaTemplate, "kafkaTemplate is null");
+        this.kafkaTemplate = kafkaTemplate;
+    }
+
+    public void addSaveMessage(Object result) {
+        if (result != null && result.getClass().isAnnotationPresent(Sync.class)) {
+            if (result instanceof Iterable) {
+                Iterable iterable = (Iterable) result;
+                for (Object data : iterable) {
+                    save(data);
+                }
+            } else {
+                save(result);
+            }
+        }
+    }
+
+    public void addDeleteMessage(ProceedingJoinPoint proceedingJoinPoint) {
+        logger.info("aroundDeleting: " + proceedingJoinPoint);
+        Object proxy = proceedingJoinPoint.getThis();
+        Object targetObject = null;
+        try {
+            targetObject = ReflectUtils.getTargetObject(proxy);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        System.out.println(targetObject instanceof CrudRepository);
+        if (targetObject instanceof SimpleJpaRepository) {
+            SimpleJpaRepository<?, ? extends Serializable> simpleJpaRepository = (SimpleJpaRepository<?, ? extends Serializable>) targetObject;
+            JpaEntityInformation<?, ? extends Serializable> entityInformation = (JpaEntityInformation<?, ? extends Serializable>) ReflectUtils.getValue("entityInformation", simpleJpaRepository);
+
+            Class<?> domainClass = entityInformation.getJavaType();
+            Class<? extends Serializable> idType = entityInformation.getIdType();
+
+            if (domainClass.isAnnotationPresent(Sync.class)) {
+                Object[] args = proceedingJoinPoint.getArgs();
+                if (args != null) {
+                    System.out.println(Arrays.toString(args));
+                    if (args.length == 0) {
+                        System.out.println("deleteAll ...");
+                    } else if (args.length == 1) {
+                        Object arg = args[0];
+                        if (arg.getClass() == idType) {
+                            // deleteById can't cascade publish delete event,
+                            // so first findOne(id), then use deleteEntity to cascade publish delete event
+                            // Object e = simpleJpaRepository.findOne((Serializable)arg); // CrudMethodMetadata metadata is advised, and call simpleJpaRepository's method will skip normal setter step, and this will cause NullPointException
+                            Object e = findOne(domainClass, arg);
+                            if (e != null) {
+                                delete(e);
+                            }
+                        } else if (arg.getClass() == domainClass) {
+                            delete(arg);
+                        } else if (arg instanceof Iterable) {
+                            Iterable iterable = (Iterable) arg;
+                            for (Object data : iterable) {
+                                delete(data);
+                            }
+                        }
+                    } else {
+                        throw new IllegalStateException("The intercept of delete method in repository can support most one parameter. Currently we only support intercept method in " + CrudRepository.class);
+                    }
+                }
+            }
+        }
+    }
+
+    private Object findOne(Class<?> domainClass, Object id) {
+        return entityManager.find(domainClass, id);
+    }
+
+    private void save(Object data) {
+        logger.info("afterSaved: " + data);
+        SyncMessage message = new SyncMessage(data.getClass(), SyncMessage.Type.SAVE, data);
+        kafkaTemplate.sendDefault(message);
+        logger.info("sent: " + message);
+    }
+
+    private void delete(Object data) {
+        SyncMessage message = new SyncMessage(data.getClass(), SyncMessage.Type.DELETE, data);
+        kafkaTemplate.sendDefault(message);
+        logger.info("sent: " + message);
+    }
+}

+ 100 - 0
ps-sync-core/src/main/java/com/uas/ps/sync/core/SyncMessage.java

@@ -0,0 +1,100 @@
+package com.uas.ps.sync.core;
+
+import java.io.Serializable;
+import java.util.Date;
+
+/**
+ * 记录变化的消息
+ *
+ * @author sunyj
+ * @since 2018/1/14 16:35
+ */
+public class SyncMessage implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * 实体类
+     */
+    private Class clazz;
+
+    /**
+     * 更改类型
+     */
+    private Type type;
+
+    /**
+     * 更改的数据
+     */
+    private Object data;
+
+    /**
+     * 生成时间
+     */
+    private Date time;
+
+    public SyncMessage() {
+        this.time = new Date();
+    }
+
+    public SyncMessage(Class clazz, Type type, Object data) {
+        this.clazz = clazz;
+        this.type = type;
+        this.data = data;
+        this.time = new Date();
+    }
+
+    public Class getClazz() {
+        return clazz;
+    }
+
+    public void setClazz(Class clazz) {
+        this.clazz = clazz;
+    }
+
+    public Type getType() {
+        return type;
+    }
+
+    public void setType(Type type) {
+        this.type = type;
+    }
+
+    public Object getData() {
+        return data;
+    }
+
+    public void setData(Object data) {
+        this.data = data;
+    }
+
+    public Date getTime() {
+        return time;
+    }
+
+    @Override
+    public String toString() {
+        return "SyncMessage{" +
+                "clazz=" + clazz +
+                ", type=" + type +
+                ", data=" + data +
+                ", time=" + time +
+                '}';
+    }
+
+    /**
+     * 变更类型
+     */
+    public enum Type {
+        /**
+         * 保存
+         */
+        SAVE,
+
+        /**
+         * 删除
+         */
+        DELETE
+    }
+
+}

+ 17 - 0
ps-sync-core/src/main/java/com/uas/ps/sync/core/annotation/Sync.java

@@ -0,0 +1,17 @@
+package com.uas.ps.sync.core.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * 标识实体类的 JPA CRUD 操作需要同步到私有库
+ *
+ * @author sunyj
+ * @since 2018/1/24 15:36
+ */
+@Target(ElementType.TYPE)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Sync {
+}

+ 41 - 0
ps-sync-core/src/main/java/com/uas/ps/sync/core/intercept/CRUDIntercept.java

@@ -0,0 +1,41 @@
+package com.uas.ps.sync.core.intercept;
+
+import com.uas.ps.sync.core.MessageProducer;
+import org.aspectj.lang.ProceedingJoinPoint;
+import org.aspectj.lang.annotation.AfterReturning;
+import org.aspectj.lang.annotation.Around;
+import org.aspectj.lang.annotation.Aspect;
+import org.aspectj.lang.annotation.Pointcut;
+import org.springframework.kafka.core.KafkaTemplate;
+
+/**
+ * 监听 CRUD 操作
+ *
+ * @author sunyj
+ * @since 2018/1/24 11:42
+ */
+@Aspect
+public class CRUDIntercept {
+
+    private MessageProducer producer;
+
+    public CRUDIntercept(KafkaTemplate kafkaTemplate) {
+        producer = new MessageProducer(kafkaTemplate);
+    }
+
+    @Pointcut("target(org.springframework.data.repository.CrudRepository)")
+    public void crud() {
+    }
+
+    @AfterReturning(pointcut = "(execution(* save(*)) || execution(* saveAndFlush(*))) && crud()", returning = "result")
+    public void afterSaved(Object result) {
+        producer.addSaveMessage(result);
+    }
+
+    @Around("execution(* delete*(..)) && crud()")
+    public Object aroundDeleting(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
+        producer.addDeleteMessage(proceedingJoinPoint);
+        return proceedingJoinPoint.proceed();
+    }
+
+}

+ 71 - 0
ps-sync-core/src/main/java/com/uas/ps/sync/core/util/ReflectUtils.java

@@ -0,0 +1,71 @@
+package com.uas.ps.sync.core.util;
+
+import org.springframework.aop.framework.Advised;
+import org.springframework.aop.support.AopUtils;
+
+import java.lang.reflect.Field;
+
+/**
+ * @author sunyj
+ * @since 2018/1/26 9:31
+ */
+public class ReflectUtils {
+
+    /**
+     * 获取指定类的指定字段
+     *
+     * @param field 指定字段
+     * @param clazz 指定类
+     * @return 指定字段
+     * @throws IllegalArgumentException
+     */
+    public static Field getDeclaredField(String field, Class<?> clazz) throws IllegalArgumentException {
+        try {
+            return clazz.getDeclaredField(field);
+        } catch (NoSuchFieldException e) {
+            throw new IllegalArgumentException(clazz + "中不存在字段:" + field);
+        }
+    }
+
+    /**
+     * 利用反射获取指定对象的指定字段的值
+     *
+     * @param field 指定字段
+     * @param k     指定对象
+     * @return 指定字段的值
+     */
+    public static <K> Object getValue(Field field, K k) throws IllegalStateException {
+        try {
+            Object value;
+            if (!field.isAccessible()) {
+                field.setAccessible(true);
+                value = field.get(k);
+                field.setAccessible(false);
+            } else {
+                value = field.get(k);
+            }
+            return value;
+        } catch (SecurityException | IllegalArgumentException | IllegalAccessException e) {
+            throw new IllegalStateException("通过反射取值失败", e);
+        }
+    }
+
+    /**
+     * 利用反射获取指定对象的指定字段的值
+     *
+     * @param field 指定字段
+     * @param k     指定对象
+     * @return 指定字段的值
+     */
+    public static <K> Object getValue(String field, K k) throws IllegalStateException {
+        Field declaredField = getDeclaredField(field, k.getClass());
+        return getValue(declaredField, k);
+    }
+
+    public static <T> T getTargetObject(Object proxy) throws Exception {
+        if (AopUtils.isJdkDynamicProxy(proxy)) {
+            return (T) getTargetObject(((Advised) proxy).getTargetSource().getTarget());
+        }
+        return (T) proxy; // expected to be cglib proxy then, which is simply a specialized class
+    }
+}

+ 47 - 0
ps-sync-entity/pom.xml

@@ -0,0 +1,47 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.uas.ps</groupId>
+        <artifactId>ps-sync-parent</artifactId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+    <artifactId>ps-sync-entity</artifactId>
+    <packaging>jar</packaging>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.hibernate.javax.persistence</groupId>
+            <artifactId>hibernate-jpa-2.1-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.uas.ps</groupId>
+            <artifactId>ps-core</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-javadoc-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-source-plugin</artifactId>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 92 - 0
ps-sync-entity/src/main/java/com/uas/ps/sync/converter/JpaJsonDocumentsConverter.java

@@ -0,0 +1,92 @@
+package com.uas.ps.sync.converter;
+
+import com.alibaba.fastjson.JSONObject;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.uas.ps.core.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.persistence.AttributeConverter;
+import java.io.IOException;
+
+/**
+ * JSON 转换
+ *
+ * @author sunyj
+ * @since 2018/1/15 15:10
+ */
+public class JpaJsonDocumentsConverter implements
+        AttributeConverter<JSONObject, String> {
+
+    // ObjectMapper is thread safe
+    private final static ObjectMapper objectMapper = new ObjectMapper();
+
+    private Logger logger = LoggerFactory.getLogger(getClass());
+
+    {
+        objectMapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true);
+        objectMapper.configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true);
+    }
+
+    @Override
+    public String convertToDatabaseColumn(JSONObject meta) {
+        String jsonString = "";
+        try {
+            logger.debug("Start convertToDatabaseColumn");
+
+            // convert list of POJO to json
+            jsonString = objectMapper.writeValueAsString(meta);
+            logger.debug("convertToDatabaseColumn" + jsonString);
+
+        } catch (JsonProcessingException ex) {
+            logger.error(ex.getMessage());
+        }
+        // 在 convertToEntityAttribute 中,对反斜杠进行了转换,因此这里进行还原
+        jsonString = jsonString.replace("\\\\", "\\");
+        return jsonString;
+    }
+
+    @Override
+    public JSONObject convertToEntityAttribute(String dbData) {
+        JSONObject jsonObject = new JSONObject();
+        try {
+            logger.debug("Start convertToEntityAttribute");
+
+            // convert json to list of POJO
+            if (StringUtils.isEmpty(dbData)) {
+                return null;
+            }
+            // 转换反斜杠,防止 '\C', '\中' 等无法正常解析
+            String originDbData = dbData;
+            int i = 0;
+            while (i < dbData.length()) {
+                // 遍历到反斜杠
+                if (dbData.charAt(i) == '\\') {
+                    // 不能以反斜杠结尾
+                    if (i >= dbData.length() - 1) {
+                        throw new IllegalArgumentException("Illegal JSON string(ends with \\): " + originDbData);
+                    }
+                    // 三种情况下需在当前反斜杠之后添加一个反斜杠
+                    // 1. 反斜杠之后并非双引号,如 "key1" : "\a"
+                    // 2. 反斜杠之后是双引号,并且是当前键值对的结尾,之后还有其他键值对,如 "key1" : "\", "key2" : "value2"
+                    // 3. 反斜杠之后是双引号,并且是当前 json 的结尾,之后没有其他键值对,如 "key1" : "\" }
+                    if (dbData.substring(i + 1).matches("^[^\"]+?[\\s\\S]+?$|^\"[\\s]*?,[\\s]*?\"[\\s\\S]+?$|^\"[\\s]*?}[\\s]*?$")) {
+                        dbData = dbData.substring(0, i + 1) + "\\" + dbData.substring(i + 1);
+                        i++;
+                    }
+                }
+                i++;
+
+            }
+            jsonObject = objectMapper.readValue(dbData, JSONObject.class);
+            logger.debug("JsonDocumentsConverter.convertToDatabaseColumn" + jsonObject);
+
+        } catch (IOException ex) {
+            logger.error(ex.getMessage());
+        }
+        return jsonObject;
+    }
+}

+ 207 - 0
ps-sync-entity/src/main/java/com/uas/ps/sync/entity/SyncMessage.java

@@ -0,0 +1,207 @@
+package com.uas.ps.sync.entity;
+
+import com.alibaba.fastjson.JSONObject;
+import com.uas.ps.sync.converter.JpaJsonDocumentsConverter;
+
+import javax.persistence.*;
+import java.io.Serializable;
+import java.util.Date;
+
+/**
+ * 记录变化的消息
+ *
+ * @author sunyj
+ * @since 2018/1/14 16:35
+ */
+@Entity
+@Table(name = "sync$message")
+public class SyncMessage implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    @Id
+    @Column(name = "me_id")
+    private Long id;
+
+    /**
+     * 表名
+     */
+    @Column(name = "me_table_name")
+    private String tableName;
+
+    /**
+     * 更改类型
+     */
+    @Column(name = "me_method_type")
+    private String methodType;
+
+    /**
+     * 决定数据唯一的条件(JSON 格式)
+     */
+    @Convert(converter = JpaJsonDocumentsConverter.class)
+    @Column(name = "me_data_key")
+    private JSONObject dataKey;
+
+    /**
+     * 更改的数据(JSON 格式),delete 情况下可为空
+     */
+    @Convert(converter = JpaJsonDocumentsConverter.class)
+    @Column(name = "me_data")
+    private JSONObject data;
+
+    /**
+     * 优先级
+     */
+    @Column(name = "me_priority")
+    private Long priority;
+
+    /**
+     * 尝试次数
+     */
+    @Column(name = "me_retry_count")
+    private Long retryCount;
+
+    /**
+     * 请求来自哪个应用
+     */
+    @Column(name = "me_source_app")
+    private String sourceApp;
+
+    /**
+     * 批次 code
+     */
+    @Column(name = "me_batch_code")
+    private String batchCode;
+
+    /**
+     * 批次数量
+     */
+    @Column(name = "me_batch_size")
+    private Integer batchSize;
+
+    /**
+     * 批次序号
+     */
+    @Column(name = "me_batch_detno")
+    private Integer batchDetno;
+
+    /**
+     * 时间
+     */
+    @Column(name = "me_create_time")
+    private Date createTime;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    public String getTableName() {
+        return tableName;
+    }
+
+    public void setTableName(String tableName) {
+        this.tableName = tableName;
+    }
+
+    public String getMethodType() {
+        return methodType;
+    }
+
+    public void setMethodType(String methodType) {
+        this.methodType = methodType;
+    }
+
+    public JSONObject getDataKey() {
+        return dataKey;
+    }
+
+    public void setDataKey(JSONObject dataKey) {
+        this.dataKey = dataKey;
+    }
+
+    public JSONObject getData() {
+        return data;
+    }
+
+    public void setData(JSONObject data) {
+        this.data = data;
+    }
+
+    public Long getPriority() {
+        return priority;
+    }
+
+    public void setPriority(Long priority) {
+        this.priority = priority;
+    }
+
+    public Long getRetryCount() {
+        return retryCount;
+    }
+
+    public void setRetryCount(Long retryCount) {
+        this.retryCount = retryCount;
+    }
+
+    public Date getCreateTime() {
+        return createTime;
+    }
+
+    public void setCreateTime(Date createTime) {
+        this.createTime = createTime;
+    }
+
+    public String getSourceApp() {
+        return sourceApp;
+    }
+
+    public void setSourceApp(String sourceApp) {
+        this.sourceApp = sourceApp;
+    }
+
+    public String getBatchCode() {
+        return batchCode;
+    }
+
+    public void setBatchCode(String batchCode) {
+        this.batchCode = batchCode;
+    }
+
+    public Integer getBatchSize() {
+        return batchSize;
+    }
+
+    public void setBatchSize(Integer batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    public Integer getBatchDetno() {
+        return batchDetno;
+    }
+
+    public void setBatchDetno(Integer batchDetno) {
+        this.batchDetno = batchDetno;
+    }
+
+    @Override
+    public String toString() {
+        return "SyncMessage{" +
+                "id=" + id +
+                ", tableName='" + tableName + '\'' +
+                ", methodType='" + methodType + '\'' +
+                ", dataKey=" + dataKey +
+                ", data=" + data +
+                ", priority=" + priority +
+                ", retryCount=" + retryCount +
+                ", sourceApp='" + sourceApp + '\'' +
+                ", batchCode='" + batchCode + '\'' +
+                ", batchSize=" + batchSize +
+                ", batchDetno=" + batchDetno +
+                ", createTime=" + createTime +
+                '}';
+    }
+}

+ 14 - 0
ps-sync-producer/README.md

@@ -0,0 +1,14 @@
+# Description
+This is Sync Producer.
+
+# Deploy
+### Test
+* Visit [Jenkins](http://10.10.100.200:5001/job/ps-sync-producer.test/)
+* Click "立即构建"
+
+### Production
+* Visit [Jenkins](http://10.10.100.200:5001/job/ps-sync-producer.prod.package/)
+* Click "立即构建"
+* Visit [Jenkins](http://119.147.37.222:9091/job/ps-sync-producer/)
+* Click "Build with Parameters"
+* Click "开始构建"

+ 75 - 0
ps-sync-producer/pom.xml

@@ -0,0 +1,75 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.uas.ps</groupId>
+        <artifactId>ps-sync-parent</artifactId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+    <artifactId>ps-sync-producer</artifactId>
+    <packaging>jar</packaging>
+
+    <dependencies>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>com.uas.ps</groupId>
+            <artifactId>ps-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.uas.ps</groupId>
+            <artifactId>ps-sync-entity</artifactId>
+        </dependency>
+
+        <!-- spring boot -->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-actuator</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-jpa</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-security</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-configuration-processor</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>druid</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 79 - 0
ps-sync-producer/sql/product$storestaus.sql

@@ -0,0 +1,79 @@
+-- ----------------------------
+-- Triggers structure for table product$storestaus
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$product_storestaus_i`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$product_storestaus_i` AFTER INSERT ON `product$storestaus` FOR EACH ROW begin
+	  declare v_table_name varchar(64) default 'product$storestaus';
+    declare v_method_type varchar(6) default 'insert';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', new.pr_id, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat('"pr_enuu": ', case when new.pr_enuu is null then 'null' else new.pr_enuu end),
+      concat(',"pr_status": ', case when new.pr_status is null then 'null' else new.pr_status end),
+      concat(',"pr_useruu": ', case when new.pr_useruu is null then 'null' else new.pr_useruu end),
+      -- datetime
+      concat(',"pr_date": ', case when new.pr_date is null then 'null' else concat('"', replace(new.pr_date, '"', '\\"'), '"') end),
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table product$storestaus
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$product_storestaus_u`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$product_storestaus_u` AFTER UPDATE ON `product$storestaus` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'product$storestaus';
+    declare v_method_type varchar(6) default 'update';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', old.pr_id, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat('"pr_enuu": ', case when new.pr_enuu is null then 'null' else new.pr_enuu end),
+      concat(',"pr_status": ', case when new.pr_status is null then 'null' else new.pr_status end),
+      concat(',"pr_useruu": ', case when new.pr_useruu is null then 'null' else new.pr_useruu end),
+      -- datetime
+      concat(',"pr_date": ', case when new.pr_date is null then 'null' else concat('"', replace(new.pr_date, '"', '\\"'), '"') end),
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table product$storestaus
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$product_storestaus_d`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$product_storestaus_d` AFTER DELETE ON `product$storestaus` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'product$storestaus';
+    declare v_method_type varchar(6) default 'delete';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', old.pr_id, '}');
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;

+ 81 - 0
ps-sync-producer/sql/product$users.sql

@@ -0,0 +1,81 @@
+-- ----------------------------
+-- Triggers structure for table product$users
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$product_users_i`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$product_users_i` AFTER INSERT ON `product$users` FOR EACH ROW begin
+	  declare v_table_name varchar(64) default 'product$users';
+    declare v_method_type varchar(6) default 'insert';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"PU_ID": ', new.PU_ID, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat('"PU_USERUU": ', case when new.PU_USERUU is null then 'null' else new.PU_USERUU end),
+      concat(',"PU_ENUU": ', case when new.PU_ENUU is null then 'null' else new.PU_ENUU end),
+      concat(',"PU_PRID": ', case when new.PU_PRID is null then 'null' else new.PU_PRID end),
+      concat(',"PU_ERPID": ', case when new.PU_ERPID is null then 'null' else new.PU_ERPID end),
+      -- datetime
+      concat(',"PU_DATE": ', case when new.PU_DATE is null then 'null' else concat('"', replace(new.PU_DATE, '"', '\\"'), '"') end),
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table product$users
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$product_users_u`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$product_users_u` AFTER UPDATE ON `product$users` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'product$users';
+    declare v_method_type varchar(6) default 'update';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"PU_ID": ', old.PU_ID, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat('"PU_USERUU": ', case when new.PU_USERUU is null then 'null' else new.PU_USERUU end),
+      concat(',"PU_ENUU": ', case when new.PU_ENUU is null then 'null' else new.PU_ENUU end),
+      concat(',"PU_PRID": ', case when new.PU_PRID is null then 'null' else new.PU_PRID end),
+      concat(',"PU_ERPID": ', case when new.PU_ERPID is null then 'null' else new.PU_ERPID end),
+      -- datetime
+      concat(',"PU_DATE": ', case when new.PU_DATE is null then 'null' else concat('"', replace(new.PU_DATE, '"', '\\"'), '"') end),
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table product$users
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$product_users_d`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$product_users_d` AFTER DELETE ON `product$users` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'product$users';
+    declare v_method_type varchar(6) default 'delete';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"PU_ID": ', old.PU_ID, '}');
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;

+ 101 - 0
ps-sync-producer/sql/productmatchresults.sql

@@ -0,0 +1,101 @@
+-- ----------------------------
+-- Triggers structure for table productmatchresults
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$productmatchresults_i`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$productmatchresults_i` AFTER INSERT ON `productmatchresults` FOR EACH ROW begin
+	  declare v_table_name varchar(64) default 'productmatchresults';
+    declare v_method_type varchar(6) default 'insert';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', new.pr_id, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      concat('"pr_brandcn": ', case when new.pr_brandcn is null then 'null' else concat('"', replace(new.pr_brandcn, '"', '\\"'), '"') end),
+      concat(',"pr_branden": ', case when new.pr_branden is null then 'null' else concat('"', replace(new.pr_branden, '"', '\\"'), '"') end),
+      concat(',"pr_cmpcode": ', case when new.pr_cmpcode is null then 'null' else concat('"', replace(new.pr_cmpcode, '"', '\\"'), '"') end),
+      concat(',"pr_uuid": ', case when new.pr_uuid is null then 'null' else concat('"', replace(new.pr_uuid, '"', '\\"'), '"') end),
+      concat(',"pr_encapsulation": ', case when new.pr_encapsulation is null then 'null' else concat('"', replace(new.pr_encapsulation, '"', '\\"'), '"') end),
+      concat(',"pr_kindcn": ', case when new.pr_kindcn is null then 'null' else concat('"', replace(new.pr_kindcn, '"', '\\"'), '"') end),
+      concat(',"pr_kinden": ', case when new.pr_kinden is null then 'null' else concat('"', replace(new.pr_kinden, '"', '\\"'), '"') end),
+      concat(',"pr_pbranduuid": ', case when new.pr_pbranduuid is null then 'null' else concat('"', replace(new.pr_pbranduuid, '"', '\\"'), '"') end),
+      concat(',"pr_uuid": ', case when new.pr_uuid is null then 'null' else concat('"', replace(new.pr_uuid, '"', '\\"'), '"') end),
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat(',"pr_brid": ', case when new.pr_brid is null then 'null' else new.pr_brid end),
+      concat(',"pr_cmpid": ', case when new.pr_cmpid is null then 'null' else new.pr_cmpid end),
+      concat(',"pr_enuu": ', case when new.pr_enuu is null then 'null' else new.pr_enuu end),
+      concat(',"pr_kindid": ', case when new.pr_kindid is null then 'null' else new.pr_kindid end),
+      concat(',"pr_number": ', case when new.pr_number is null then 'null' else new.pr_number end),
+      concat(',"pr_prid": ', case when new.pr_prid is null then 'null' else new.pr_prid end),
+      -- datetime
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table productmatchresults
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$productmatchresults_u`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$productmatchresults_u` AFTER UPDATE ON `productmatchresults` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'productmatchresults';
+    declare v_method_type varchar(6) default 'update';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', old.pr_id, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      concat('"pr_brandcn": ', case when new.pr_brandcn is null then 'null' else concat('"', replace(new.pr_brandcn, '"', '\\"'), '"') end),
+      concat(',"pr_branden": ', case when new.pr_branden is null then 'null' else concat('"', replace(new.pr_branden, '"', '\\"'), '"') end),
+      concat(',"pr_cmpcode": ', case when new.pr_cmpcode is null then 'null' else concat('"', replace(new.pr_cmpcode, '"', '\\"'), '"') end),
+      concat(',"pr_uuid": ', case when new.pr_uuid is null then 'null' else concat('"', replace(new.pr_uuid, '"', '\\"'), '"') end),
+      concat(',"pr_encapsulation": ', case when new.pr_encapsulation is null then 'null' else concat('"', replace(new.pr_encapsulation, '"', '\\"'), '"') end),
+      concat(',"pr_kindcn": ', case when new.pr_kindcn is null then 'null' else concat('"', replace(new.pr_kindcn, '"', '\\"'), '"') end),
+      concat(',"pr_kinden": ', case when new.pr_kinden is null then 'null' else concat('"', replace(new.pr_kinden, '"', '\\"'), '"') end),
+      concat(',"pr_pbranduuid": ', case when new.pr_pbranduuid is null then 'null' else concat('"', replace(new.pr_pbranduuid, '"', '\\"'), '"') end),
+      concat(',"pr_uuid": ', case when new.pr_uuid is null then 'null' else concat('"', replace(new.pr_uuid, '"', '\\"'), '"') end),
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat(',"pr_brid": ', case when new.pr_brid is null then 'null' else new.pr_brid end),
+      concat(',"pr_cmpid": ', case when new.pr_cmpid is null then 'null' else new.pr_cmpid end),
+      concat(',"pr_enuu": ', case when new.pr_enuu is null then 'null' else new.pr_enuu end),
+      concat(',"pr_kindid": ', case when new.pr_kindid is null then 'null' else new.pr_kindid end),
+      concat(',"pr_number": ', case when new.pr_number is null then 'null' else new.pr_number end),
+      concat(',"pr_prid": ', case when new.pr_prid is null then 'null' else new.pr_prid end),
+      -- datetime
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table productmatchresults
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$productmatchresults_d`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$productmatchresults_d` AFTER DELETE ON `productmatchresults` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'productmatchresults';
+    declare v_method_type varchar(6) default 'delete';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', old.pr_id, '}');
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;

+ 167 - 0
ps-sync-producer/sql/products.sql

@@ -0,0 +1,167 @@
+-- ----------------------------
+-- Triggers structure for table products
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$products_i`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$products_i` AFTER INSERT ON `products` FOR EACH ROW begin
+	  declare v_table_name varchar(64) default 'products';
+    declare v_method_type varchar(6) default 'insert';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', new.pr_id, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      concat('"pr_title": ', case when new.pr_title is null then 'null' else concat('"', replace(new.pr_title, '"', '\\"'), '"') end),
+      concat(',"pr_code": ', case when new.pr_code is null then 'null' else concat('"', replace(new.pr_code, '"', '\\"'), '"') end),
+      concat(',"pr_spec": ', case when new.pr_spec is null then 'null' else concat('"', replace(new.pr_spec, '"', '\\"'), '"') end),
+      concat(',"pr_unit": ', case when new.pr_unit is null then 'null' else concat('"', replace(new.pr_unit, '"', '\\"'), '"') end),
+      concat(',"pr_brand": ', case when new.pr_brand is null then 'null' else concat('"', replace(new.pr_brand, '"', '\\"'), '"') end),
+      concat(',"pr_cmpcode": ', case when new.pr_cmpcode is null then 'null' else concat('"', replace(new.pr_cmpcode, '"', '\\"'), '"') end),
+      concat(',"pr_cmpuuid": ', case when new.pr_cmpuuid is null then 'null' else concat('"', replace(new.pr_cmpuuid, '"', '\\"'), '"') end),
+      concat(',"pr_sourceapp": ', case when new.pr_sourceapp is null then 'null' else concat('"', replace(new.pr_sourceapp, '"', '\\"'), '"') end),
+      concat(',"pr_kind": ', case when new.pr_kind is null then 'null' else concat('"', replace(new.pr_kind, '"', '\\"'), '"') end),
+      concat(',"pr_kinden": ', case when new.pr_kinden is null then 'null' else concat('"', replace(new.pr_kinden, '"', '\\"'), '"') end),
+      concat(',"pr_pbrand": ', case when new.pr_pbrand is null then 'null' else concat('"', replace(new.pr_pbrand, '"', '\\"'), '"') end),
+      concat(',"pr_pbranden": ', case when new.pr_pbranden is null then 'null' else concat('"', replace(new.pr_pbranden, '"', '\\"'), '"') end),
+      concat(',"pr_pbranduuid": ', case when new.pr_pbranduuid is null then 'null' else concat('"', replace(new.pr_pbranduuid, '"', '\\"'), '"') end),
+      concat(',"pr_pcmpcode": ', case when new.pr_pcmpcode is null then 'null' else concat('"', replace(new.pr_pcmpcode, '"', '\\"'), '"') end),
+      concat(',"pr_attachment": ', case when new.pr_attachment is null then 'null' else concat('"', replace(new.pr_attachment, '"', '\\"'), '"') end),
+      concat(',"pr_encapsulation": ', case when new.pr_encapsulation is null then 'null' else concat('"', replace(new.pr_encapsulation, '"', '\\"'), '"') end),
+      concat(',"pr_packaging": ', case when new.pr_packaging is null then 'null' else concat('"', replace(new.pr_packaging, '"', '\\"'), '"') end),
+      concat(',"pr_cmpimg": ', case when new.pr_cmpimg is null then 'null' else concat('"', replace(new.pr_cmpimg, '"', '\\"'), '"') end),
+      concat(',"pr_goodsnover": ', case when new.pr_goodsnover is null then 'null' else concat('"', replace(new.pr_goodsnover, '"', '\\"'), '"') end),
+      concat(',"pr_goodstaxno": ', case when new.pr_goodstaxno is null then 'null' else concat('"', replace(new.pr_goodstaxno, '"', '\\"'), '"') end),
+      concat(',"pr_taxpre": ', case when new.pr_taxpre is null then 'null' else concat('"', replace(new.pr_taxpre, '"', '\\"'), '"') end),
+      concat(',"pr_taxprecon": ', case when new.pr_taxprecon is null then 'null' else concat('"', replace(new.pr_taxprecon, '"', '\\"'), '"') end),
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat(',"pr_enuu": ', case when new.pr_enuu is null then 'null' else new.pr_enuu end),
+      concat(',"pr_useruu": ', case when new.pr_useruu is null then 'null' else new.pr_useruu end),
+      concat(',"pr_minpack": ', case when new.pr_minpack is null then 'null' else new.pr_minpack end),
+      concat(',"pr_minorder": ', case when new.pr_minorder is null then 'null' else new.pr_minorder end),
+      concat(',"pr_leadtime": ', case when new.pr_leadtime is null then 'null' else new.pr_leadtime end),
+      concat(',"pr_ltinstock": ', case when new.pr_ltinstock is null then 'null' else new.pr_ltinstock end),
+      concat(',"pr_reserve": ', case when new.pr_reserve is null then 'null' else new.pr_reserve end),
+      concat(',"pr_price": ', case when new.pr_price is null then 'null' else new.pr_price end),
+      concat(',"pr_sourceid": ', case when new.pr_sourceid is null then 'null' else new.pr_sourceid end),
+      concat(',"pr_kindid": ', case when new.pr_kindid is null then 'null' else new.pr_kindid end),
+      concat(',"pr_pbrandid": ', case when new.pr_pbrandid is null then 'null' else new.pr_pbrandid end),
+      concat(',"pr_issale": ', case when new.pr_issale is null then 'null' else new.pr_issale end),
+      concat(',"pr_ispurchase": ', case when new.pr_ispurchase is null then 'null' else new.pr_ispurchase end),
+      concat(',"pr_isshow": ', case when new.pr_isshow is null then 'null' else new.pr_isshow end),
+      concat(',"pr_ispubsale": ', case when new.pr_ispubsale is null then 'null' else new.pr_ispubsale end),
+      concat(',"pr_standard": ', case when new.pr_standard is null then 'null' else new.pr_standard end),
+      concat(',"pr_matchstatus": ', case when new.pr_matchstatus is null then 'null' else new.pr_matchstatus end),
+      concat(',"pr_matchsize": ', case when new.pr_matchsize is null then 'null' else new.pr_matchsize end),
+      concat(',"pr_downloadstatus": ', case when new.pr_downloadstatus is null then 'null' else new.pr_downloadstatus end),
+      concat(',"pr_maxdelivery": ', case when new.pr_maxdelivery is null then 'null' else new.pr_maxdelivery end),
+      concat(',"pr_mindelivery": ', case when new.pr_mindelivery is null then 'null' else new.pr_mindelivery end),
+      concat(',"pr_isbreakup": ', case when new.pr_isbreakup is null then 'null' else new.pr_isbreakup end),
+      -- datetime
+      concat(',"pr_erpdate": ', case when new.pr_erpdate is null then 'null' else concat('"', replace(new.pr_erpdate, '"', '\\"'), '"') end),
+      concat(',"pr_create_time": ', case when new.pr_create_time is null then 'null' else concat('"', replace(new.pr_create_time, '"', '\\"'), '"') end),
+      concat(',"pr_tostandard": ', case when new.pr_tostandard is null then 'null' else concat('"', replace(new.pr_tostandard, '"', '\\"'), '"') end),
+      concat(',"pr_manufacturedate": ', case when new.pr_manufacturedate is null then 'null' else concat('"', replace(new.pr_manufacturedate, '"', '\\"'), '"') end),
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table products
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$products_u`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$products_u` AFTER UPDATE ON `products` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'products';
+    declare v_method_type varchar(6) default 'update';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', old.pr_id, '}');
+    select concat(
+      concat('{'),
+      -- varchar
+      concat('"pr_title": ', case when new.pr_title is null then 'null' else concat('"', replace(new.pr_title, '"', '\\"'), '"') end),
+      concat(',"pr_code": ', case when new.pr_code is null then 'null' else concat('"', replace(new.pr_code, '"', '\\"'), '"') end),
+      concat(',"pr_spec": ', case when new.pr_spec is null then 'null' else concat('"', replace(new.pr_spec, '"', '\\"'), '"') end),
+      concat(',"pr_unit": ', case when new.pr_unit is null then 'null' else concat('"', replace(new.pr_unit, '"', '\\"'), '"') end),
+      concat(',"pr_brand": ', case when new.pr_brand is null then 'null' else concat('"', replace(new.pr_brand, '"', '\\"'), '"') end),
+      concat(',"pr_cmpcode": ', case when new.pr_cmpcode is null then 'null' else concat('"', replace(new.pr_cmpcode, '"', '\\"'), '"') end),
+      concat(',"pr_cmpuuid": ', case when new.pr_cmpuuid is null then 'null' else concat('"', replace(new.pr_cmpuuid, '"', '\\"'), '"') end),
+      concat(',"pr_sourceapp": ', case when new.pr_sourceapp is null then 'null' else concat('"', replace(new.pr_sourceapp, '"', '\\"'), '"') end),
+      concat(',"pr_kind": ', case when new.pr_kind is null then 'null' else concat('"', replace(new.pr_kind, '"', '\\"'), '"') end),
+      concat(',"pr_kinden": ', case when new.pr_kinden is null then 'null' else concat('"', replace(new.pr_kinden, '"', '\\"'), '"') end),
+      concat(',"pr_pbrand": ', case when new.pr_pbrand is null then 'null' else concat('"', replace(new.pr_pbrand, '"', '\\"'), '"') end),
+      concat(',"pr_pbranden": ', case when new.pr_pbranden is null then 'null' else concat('"', replace(new.pr_pbranden, '"', '\\"'), '"') end),
+      concat(',"pr_pbranduuid": ', case when new.pr_pbranduuid is null then 'null' else concat('"', replace(new.pr_pbranduuid, '"', '\\"'), '"') end),
+      concat(',"pr_pcmpcode": ', case when new.pr_pcmpcode is null then 'null' else concat('"', replace(new.pr_pcmpcode, '"', '\\"'), '"') end),
+      concat(',"pr_attachment": ', case when new.pr_attachment is null then 'null' else concat('"', replace(new.pr_attachment, '"', '\\"'), '"') end),
+      concat(',"pr_encapsulation": ', case when new.pr_encapsulation is null then 'null' else concat('"', replace(new.pr_encapsulation, '"', '\\"'), '"') end),
+      concat(',"pr_packaging": ', case when new.pr_packaging is null then 'null' else concat('"', replace(new.pr_packaging, '"', '\\"'), '"') end),
+      concat(',"pr_cmpimg": ', case when new.pr_cmpimg is null then 'null' else concat('"', replace(new.pr_cmpimg, '"', '\\"'), '"') end),
+      concat(',"pr_goodsnover": ', case when new.pr_goodsnover is null then 'null' else concat('"', replace(new.pr_goodsnover, '"', '\\"'), '"') end),
+      concat(',"pr_goodstaxno": ', case when new.pr_goodstaxno is null then 'null' else concat('"', replace(new.pr_goodstaxno, '"', '\\"'), '"') end),
+      concat(',"pr_taxpre": ', case when new.pr_taxpre is null then 'null' else concat('"', replace(new.pr_taxpre, '"', '\\"'), '"') end),
+      concat(',"pr_taxprecon": ', case when new.pr_taxprecon is null then 'null' else concat('"', replace(new.pr_taxprecon, '"', '\\"'), '"') end),
+      -- bit(1), smallint(6), int(11), bigint(20), double
+      concat(',"pr_enuu": ', case when new.pr_enuu is null then 'null' else new.pr_enuu end),
+      concat(',"pr_useruu": ', case when new.pr_useruu is null then 'null' else new.pr_useruu end),
+      concat(',"pr_minpack": ', case when new.pr_minpack is null then 'null' else new.pr_minpack end),
+      concat(',"pr_minorder": ', case when new.pr_minorder is null then 'null' else new.pr_minorder end),
+      concat(',"pr_leadtime": ', case when new.pr_leadtime is null then 'null' else new.pr_leadtime end),
+      concat(',"pr_ltinstock": ', case when new.pr_ltinstock is null then 'null' else new.pr_ltinstock end),
+      concat(',"pr_reserve": ', case when new.pr_reserve is null then 'null' else new.pr_reserve end),
+      concat(',"pr_price": ', case when new.pr_price is null then 'null' else new.pr_price end),
+      concat(',"pr_sourceid": ', case when new.pr_sourceid is null then 'null' else new.pr_sourceid end),
+      concat(',"pr_kindid": ', case when new.pr_kindid is null then 'null' else new.pr_kindid end),
+      concat(',"pr_pbrandid": ', case when new.pr_pbrandid is null then 'null' else new.pr_pbrandid end),
+      concat(',"pr_issale": ', case when new.pr_issale is null then 'null' else new.pr_issale end),
+      concat(',"pr_ispurchase": ', case when new.pr_ispurchase is null then 'null' else new.pr_ispurchase end),
+      concat(',"pr_isshow": ', case when new.pr_isshow is null then 'null' else new.pr_isshow end),
+      concat(',"pr_ispubsale": ', case when new.pr_ispubsale is null then 'null' else new.pr_ispubsale end),
+      concat(',"pr_standard": ', case when new.pr_standard is null then 'null' else new.pr_standard end),
+      concat(',"pr_matchstatus": ', case when new.pr_matchstatus is null then 'null' else new.pr_matchstatus end),
+      concat(',"pr_matchsize": ', case when new.pr_matchsize is null then 'null' else new.pr_matchsize end),
+      concat(',"pr_downloadstatus": ', case when new.pr_downloadstatus is null then 'null' else new.pr_downloadstatus end),
+      concat(',"pr_maxdelivery": ', case when new.pr_maxdelivery is null then 'null' else new.pr_maxdelivery end),
+      concat(',"pr_mindelivery": ', case when new.pr_mindelivery is null then 'null' else new.pr_mindelivery end),
+      concat(',"pr_isbreakup": ', case when new.pr_isbreakup is null then 'null' else new.pr_isbreakup end),
+      -- datetime
+      concat(',"pr_erpdate": ', case when new.pr_erpdate is null then 'null' else concat('"', replace(new.pr_erpdate, '"', '\\"'), '"') end),
+      concat(',"pr_create_time": ', case when new.pr_create_time is null then 'null' else concat('"', replace(new.pr_create_time, '"', '\\"'), '"') end),
+      concat(',"pr_tostandard": ', case when new.pr_tostandard is null then 'null' else concat('"', replace(new.pr_tostandard, '"', '\\"'), '"') end),
+      concat(',"pr_manufacturedate": ', case when new.pr_manufacturedate is null then 'null' else concat('"', replace(new.pr_manufacturedate, '"', '\\"'), '"') end),
+      -- text
+      -- json
+      concat('}')
+    ) into v_data;
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;
+
+-- ----------------------------
+-- Triggers structure for table products
+-- ----------------------------
+DROP TRIGGER IF EXISTS `sync$products_d`;
+delimiter ;;
+CREATE DEFINER = `root`@`%` TRIGGER `sync$products_d` AFTER DELETE ON `products` FOR EACH ROW begin
+	declare v_table_name varchar(64) default 'products';
+    declare v_method_type varchar(6) default 'delete';
+    declare v_data_key text;
+    declare v_data text;
+    declare v_priority int default 1;
+
+    set v_data_key=concat('{"pr_id": ', old.pr_id, '}');
+    call sync$enqueue_message(v_table_name, v_method_type, v_data_key, v_data, v_priority);
+end
+;;
+delimiter ;

+ 96 - 0
ps-sync-producer/sql/trigger-mysql.sql

@@ -0,0 +1,96 @@
+-- 创建消息表,用于记录变化,同步到私有库里
+drop table if exists sync$message;
+create table sync$message(
+    me_id bigint not null primary key auto_increment,
+    me_table_name varchar(64) not null,
+    me_method_type varchar(6) not null,
+    me_data_key text not null,
+    me_data text,
+    me_priority int default 0,
+    me_retry_count int default 0,
+    me_source_app varchar(64),
+    me_batch_code varchar(64),
+    me_batch_size int,
+    me_batch_detno int,
+    me_create_time datetime not null
+);
+
+-- 创建消息历史表
+drop table if exists sync$message_history;
+create table sync$message_history(
+    mh_id bigint not null primary key auto_increment,
+    mh_dequeue_time datetime not null,
+    me_id bigint not null,
+    me_table_name varchar(64) not null,
+    me_method_type varchar(6) not null,
+    me_data_key text not null,
+    me_data text,
+    me_priority int default 0,
+    me_source_app varchar(64),
+    me_batch_code varchar(64),
+    me_batch_size int,
+    me_batch_detno int,
+    me_create_time datetime not null
+);
+
+-- 创建存储过程 入队消息
+drop procedure if exists sync$enqueue_message;
+delimiter $$
+create procedure sync$enqueue_message(p_table_name varchar(64), p_method_type varchar(6), p_data_key text, p_data text, p_priority int)
+begin
+  if @source_app is not null then
+    if @batch_detno is null then
+      set @batch_detno = 0;
+    end if;
+    set @batch_detno = @batch_detno + 1;
+    insert into sync$message (me_table_name, me_method_type, me_data_key, me_data, me_priority, me_create_time, me_source_app, me_batch_code, me_batch_size, me_batch_detno) values(p_table_name, p_method_type, p_data_key, p_data, p_priority, sysdate(), @source_app, @batch_code, @batch_detno, @batch_detno);
+    -- 动态计算批次数量,在本次 session 中每累加一次,就更新一次数量
+    update sync$message set me_batch_size = @batch_detno where me_batch_code = @batch_code;
+  else
+    insert into sync$message (me_table_name, me_method_type, me_data_key, me_data, me_priority, me_create_time) values(p_table_name, p_method_type, p_data_key, p_data, p_priority, sysdate());
+  end if;
+end;$$
+delimiter ;
+
+-- 创建存储过程 出队消息
+drop procedure if exists sync$dequeue_message;
+delimiter $$
+create procedure sync$dequeue_message(p_id bigint)
+begin
+	insert into sync$message_history (mh_dequeue_time, me_id, me_table_name, me_method_type, me_data_key, me_data, me_priority, me_create_time, me_source_app, me_batch_code, me_batch_size, me_batch_detno) select sysdate(), me_id, me_table_name, me_method_type, me_data_key, me_data, me_priority, me_create_time, me_source_app, me_batch_code, me_batch_size, me_batch_detno from sync$message where me_id = p_id;
+	delete from sync$message where me_id = p_id;
+end;$$
+delimiter ;
+
+
+-- 创建存储过程 设置 session variable
+drop procedure if exists sync$set_session_variable;
+delimiter $$
+create procedure sync$set_session_variable(p_source_app varchar(64), p_batch_code varchar(64))
+begin
+  set @source_app = p_source_app;
+  set @batch_code = p_batch_code;
+  set @batch_detno = 0;
+end;$$
+delimiter ;
+
+
+-- 创建存储过程 取消设置 session variable
+drop procedure if exists sync$unset_session_variable;
+delimiter $$
+create procedure sync$unset_session_variable()
+begin
+  set @source_app = null;
+  set @batch_code = null;
+  set @batch_detno = null;
+end;$$
+delimiter ;
+
+-- 创建存储过程 获取批次大小
+drop procedure if exists sync$get_batch_size;
+delimiter $$
+create procedure sync$get_batch_size(out p_batch_size int)
+begin
+  set p_batch_size = @batch_detno;
+end;$$
+delimiter ;

+ 50 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/Application.java

@@ -0,0 +1,50 @@
+package com.uas.ps.sync.producer;
+
+import com.uas.ps.core.util.ContextUtils;
+import com.uas.ps.sync.producer.jms.JmsListener;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.event.ApplicationPreparedEvent;
+import org.springframework.context.ApplicationListener;
+import org.springframework.web.servlet.config.annotation.EnableWebMvc;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+
+/**
+ * 应用入口
+ *
+ * @author sunyj
+ * @since 2017年8月16日 下午4:00:03
+ */
+@SpringBootApplication
+@EnableWebMvc
+public class Application {
+    public static void main(String[] args) throws FileNotFoundException {
+        File logFile = new File("logs/log.log");
+        if (!logFile.getParentFile().exists()) {
+            logFile.getParentFile().mkdir();
+        }
+        System.setErr(new PrintStream(new FileOutputStream(logFile, true)));
+        SpringApplication application = new SpringApplication(Application.class);
+        application.addListeners(new ApplicationListener<ApplicationPreparedEvent>() {
+            @Override
+            public void onApplicationEvent(ApplicationPreparedEvent event) {
+                ContextUtils.setApplicationContext(event.getApplicationContext());
+            }
+        });
+        application.run(args);
+        startTask();
+    }
+
+    /**
+     * 开启定时任务
+     */
+    public static void startTask() {
+        JmsListener jmsListener = ContextUtils.getBean(JmsListener.class);
+        jmsListener.start(null);
+    }
+}

+ 261 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/DruidDBConfiguration.java

@@ -0,0 +1,261 @@
+package com.uas.ps.sync.producer;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.alibaba.druid.support.http.StatViewServlet;
+import com.alibaba.druid.support.http.WebStatFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.boot.web.servlet.FilterRegistrationBean;
+import org.springframework.boot.web.servlet.ServletRegistrationBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Primary;
+import org.springframework.stereotype.Component;
+
+import javax.sql.DataSource;
+import java.sql.SQLException;
+
+@Component
+@ConfigurationProperties(prefix = "datasource")
+public class DruidDBConfiguration {
+
+    private Logger logger = LoggerFactory.getLogger(DruidDBConfiguration.class);
+
+    private String url;
+
+    private String username;
+
+    private String password;
+
+    private String driverClassName;
+
+    private int initialSize;
+
+    private int minIdle;
+
+    private int maxActive;
+
+    private int maxWait;
+
+    private int timeBetweenEvictionRunsMillis;
+
+    private int minEvictableIdleTimeMillis;
+
+    private String validationQuery;
+
+    private boolean testWhileIdle;
+
+    private boolean testOnBorrow;
+
+    private boolean testOnReturn;
+
+    private int timeBetweenLogStatsMillis;
+
+    private boolean poolPreparedStatements;
+
+    private int maxPoolPreparedStatementPerConnectionSize;
+
+    private String filters;
+
+    private String connectionProperties;
+
+    @Bean
+    @Primary
+    public DataSource dataSource() {
+        DruidDataSource dataSource = new DruidDataSource();
+
+        dataSource.setUrl(url);
+        dataSource.setUsername(username);
+        dataSource.setPassword(password);
+        dataSource.setDriverClassName(driverClassName);
+
+        // configuration
+        dataSource.setInitialSize(initialSize);
+        dataSource.setMinIdle(minIdle);
+        dataSource.setMaxActive(maxActive);
+        dataSource.setMaxWait(maxWait);
+        dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
+        dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
+        dataSource.setValidationQuery(validationQuery);
+        dataSource.setTestWhileIdle(testWhileIdle);
+        dataSource.setTestOnBorrow(testOnBorrow);
+        dataSource.setTestOnReturn(testOnReturn);
+        dataSource.setTimeBetweenLogStatsMillis(timeBetweenLogStatsMillis);
+        dataSource.setPoolPreparedStatements(poolPreparedStatements);
+        dataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
+        try {
+            dataSource.setFilters(filters);
+        } catch (SQLException e) {
+            logger.error("数据源初始化失败: setFilters", e);
+        }
+        dataSource.setConnectionProperties(connectionProperties);
+        return dataSource;
+    }
+
+    @Bean
+    public ServletRegistrationBean servletRegistrationBean() {
+        return new ServletRegistrationBean(new StatViewServlet(), "/druid/*");
+    }
+
+    @Bean
+    public FilterRegistrationBean filterRegistrationBean() {
+        FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
+        filterRegistrationBean.setFilter(new WebStatFilter());
+        filterRegistrationBean.addUrlPatterns("/*");
+        filterRegistrationBean.addInitParameter("exclusions",
+                "*.js,*.gif,*.jpg,*.png,*.bmp,*.css,*.ico,*.html,/druid/*");
+        return filterRegistrationBean;
+    }
+
+    public String getUrl() {
+        return url;
+    }
+
+    public void setUrl(String url) {
+        this.url = url;
+    }
+
+    public String getUsername() {
+        return username;
+    }
+
+    public void setUsername(String username) {
+        this.username = username;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public void setPassword(String password) {
+        this.password = password;
+    }
+
+    public String getDriverClassName() {
+        return driverClassName;
+    }
+
+    public void setDriverClassName(String driverClassName) {
+        this.driverClassName = driverClassName;
+    }
+
+    public int getInitialSize() {
+        return initialSize;
+    }
+
+    public void setInitialSize(int initialSize) {
+        this.initialSize = initialSize;
+    }
+
+    public int getMinIdle() {
+        return minIdle;
+    }
+
+    public void setMinIdle(int minIdle) {
+        this.minIdle = minIdle;
+    }
+
+    public int getMaxActive() {
+        return maxActive;
+    }
+
+    public void setMaxActive(int maxActive) {
+        this.maxActive = maxActive;
+    }
+
+    public int getMaxWait() {
+        return maxWait;
+    }
+
+    public void setMaxWait(int maxWait) {
+        this.maxWait = maxWait;
+    }
+
+    public int getTimeBetweenEvictionRunsMillis() {
+        return timeBetweenEvictionRunsMillis;
+    }
+
+    public void setTimeBetweenEvictionRunsMillis(int timeBetweenEvictionRunsMillis) {
+        this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
+    }
+
+    public int getMinEvictableIdleTimeMillis() {
+        return minEvictableIdleTimeMillis;
+    }
+
+    public void setMinEvictableIdleTimeMillis(int minEvictableIdleTimeMillis) {
+        this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
+    }
+
+    public String getValidationQuery() {
+        return validationQuery;
+    }
+
+    public void setValidationQuery(String validationQuery) {
+        this.validationQuery = validationQuery;
+    }
+
+    public boolean isTestWhileIdle() {
+        return testWhileIdle;
+    }
+
+    public void setTestWhileIdle(boolean testWhileIdle) {
+        this.testWhileIdle = testWhileIdle;
+    }
+
+    public boolean isTestOnBorrow() {
+        return testOnBorrow;
+    }
+
+    public void setTestOnBorrow(boolean testOnBorrow) {
+        this.testOnBorrow = testOnBorrow;
+    }
+
+    public boolean isTestOnReturn() {
+        return testOnReturn;
+    }
+
+    public void setTestOnReturn(boolean testOnReturn) {
+        this.testOnReturn = testOnReturn;
+    }
+
+    public int getTimeBetweenLogStatsMillis() {
+        return timeBetweenLogStatsMillis;
+    }
+
+    public void setTimeBetweenLogStatsMillis(int timeBetweenLogStatsMillis) {
+        this.timeBetweenLogStatsMillis = timeBetweenLogStatsMillis;
+    }
+
+    public boolean isPoolPreparedStatements() {
+        return poolPreparedStatements;
+    }
+
+    public void setPoolPreparedStatements(boolean poolPreparedStatements) {
+        this.poolPreparedStatements = poolPreparedStatements;
+    }
+
+    public int getMaxPoolPreparedStatementPerConnectionSize() {
+        return maxPoolPreparedStatementPerConnectionSize;
+    }
+
+    public void setMaxPoolPreparedStatementPerConnectionSize(int maxPoolPreparedStatementPerConnectionSize) {
+        this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize;
+    }
+
+    public String getFilters() {
+        return filters;
+    }
+
+    public void setFilters(String filters) {
+        this.filters = filters;
+    }
+
+    public String getConnectionProperties() {
+        return connectionProperties;
+    }
+
+    public void setConnectionProperties(String connectionProperties) {
+        this.connectionProperties = connectionProperties;
+    }
+}

+ 29 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/RepositoryConfiguration.java

@@ -0,0 +1,29 @@
+package com.uas.ps.sync.producer;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.orm.jpa.EntityManagerFactoryBuilder;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
+import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
+
+import javax.sql.DataSource;
+
+/**
+ * @author sunyj
+ * @since 2018/1/13 16:26
+ */
+@Configuration
+@EnableJpaRepositories
+public class RepositoryConfiguration {
+
+    @Autowired
+    private DataSource dataSource;
+
+    @Bean
+    @Primary
+    public LocalContainerEntityManagerFactoryBean entityManagerFactory(EntityManagerFactoryBuilder builder) {
+        return builder.dataSource(dataSource).packages("com.uas.ps.sync.entity").build();
+    }
+}

+ 43 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/WebAppConfiguration.java

@@ -0,0 +1,43 @@
+package com.uas.ps.sync.producer;
+
+import com.alibaba.fastjson.serializer.SerializerFeature;
+import com.alibaba.fastjson.support.config.FastJsonConfig;
+import com.alibaba.fastjson.support.spring.FastJsonHttpMessageConverter;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.MediaType;
+import org.springframework.http.converter.HttpMessageConverter;
+import org.springframework.http.converter.StringHttpMessageConverter;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
+
+import java.nio.charset.Charset;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Web相关配置
+ *
+ * @author sunyj
+ * @since 2017年2月17日 下午5:45:38
+ */
+@Configuration
+@ComponentScan
+public class WebAppConfiguration extends WebMvcConfigurerAdapter {
+
+    @Override
+    public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
+        FastJsonHttpMessageConverter fastJsonHttpMessageConverter = new FastJsonHttpMessageConverter();
+        fastJsonHttpMessageConverter.setSupportedMediaTypes(Collections.singletonList(MediaType.APPLICATION_JSON_UTF8));
+        FastJsonConfig fastJsonConfig = new FastJsonConfig();
+        fastJsonConfig.setSerializerFeatures(SerializerFeature.DisableCircularReferenceDetect);
+        fastJsonHttpMessageConverter.setFastJsonConfig(fastJsonConfig);
+        converters.add(fastJsonHttpMessageConverter);
+
+        StringHttpMessageConverter stringHttpMessageConverter = new StringHttpMessageConverter(
+                Charset.forName("UTF-8"));
+        stringHttpMessageConverter.setSupportedMediaTypes(Collections.singletonList(MediaType.TEXT_HTML));
+        converters.add(stringHttpMessageConverter);
+        super.configureMessageConverters(converters);
+    }
+
+}

+ 67 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/JmsController.java

@@ -0,0 +1,67 @@
+package com.uas.ps.sync.producer.jms;
+
+import com.uas.ps.sync.entity.SyncMessage;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+
+import javax.servlet.http.HttpServletRequest;
+
+/**
+ * 索引创建相关请求
+ *
+ * @author sunyj
+ * @since 2016年8月5日 上午11:42:54
+ */
+@Controller
+@RequestMapping("/jms")
+public class JmsController {
+
+    @Autowired
+    private SyncMessageDao syncMessageDao;
+
+    @Autowired
+    private SyncMessageService syncMessageService;
+
+    @Autowired
+    private JmsListener jmsListener;
+
+
+    @RequestMapping("/listen/start")
+    @ResponseBody
+    public String startListen(Long interval, HttpServletRequest request) {
+        jmsListener.start(interval);
+        return "开启成功";
+    }
+
+    @RequestMapping("/listen/stop")
+    @ResponseBody
+    public String stopListen(HttpServletRequest request) {
+        jmsListener.stop();
+        return "关闭成功";
+    }
+
+    @RequestMapping("/listen/restart")
+    @ResponseBody
+    public String restartListen(Long interval, HttpServletRequest request) {
+        if (jmsListener.isRunning()) {
+            jmsListener.stop();
+        }
+        jmsListener.start(interval);
+        return "重启成功";
+    }
+
+    @RequestMapping("/listen/details")
+    @ResponseBody
+    public SPage<SyncMessage> listenDetails(Integer page, Integer size, HttpServletRequest request) {
+        return syncMessageService.findAll(page, size);
+    }
+
+    @RequestMapping("/dequeue")
+    @ResponseBody
+    public boolean dequeueLuceneQueueMessage(Long id, HttpServletRequest request) {
+        syncMessageDao.dequeueMessage(id);
+        return true;
+    }
+}

+ 173 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/JmsListener.java

@@ -0,0 +1,173 @@
+package com.uas.ps.sync.producer.jms;
+
+import com.alibaba.fastjson.JSONObject;
+import com.uas.ps.core.util.Assert;
+import com.uas.ps.core.util.CollectionUtils;
+import com.uas.ps.core.util.StringUtils;
+import com.uas.ps.sync.entity.SyncMessage;
+import com.uas.ps.sync.producer.schedule.Executable;
+import com.uas.ps.sync.producer.schedule.TaskInformation;
+import com.uas.ps.sync.producer.schedule.TaskService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+
+/**
+ * 对数据库的消息队列进行实时监听
+ *
+ * @author sunyj
+ * @since 2018/1/14 16:29
+ */
+@Service
+public class JmsListener {
+
+    /**
+     * 第一次执行的延迟时间间隔为 3 秒,以防止日志交杂错乱,不好查看
+     */
+    private static final long INITIAL_DELAY = 3000;
+    /**
+     * 两次任务之间的等待时间间隔为 1 秒钟
+     */
+    private static final long INTERVAL = 1000;
+
+    @Autowired
+    private SyncMessageDao syncMessageDao;
+
+    @Autowired
+    private TaskService taskService;
+
+    @Autowired
+    private SyncMessageService syncMessageService;
+
+    @Autowired
+    private KafkaTemplate<String, SyncMessage> kafkaTemplate;
+
+    private TaskInformation taskInformation;
+
+    private Logger logger = LoggerFactory.getLogger(getClass());
+
+    /**
+     * 开启监听
+     *
+     * @param interval 每次接收到jms消息后等待的时间(秒)
+     * @return 开启成功与否的提示信息
+     */
+    public void start(Long interval) {
+        if (interval == null) {
+            interval = INTERVAL;
+        } else {
+            interval *= 1000;
+        }
+        if (interval <= 0) {
+            throw new IllegalArgumentException("interval 不合法:" + interval);
+        }
+
+        if (isRunning()) {
+            throw new IllegalStateException("已存在运行的监听服务");
+        }
+
+        try {
+            String title = "监听消息";
+            Executable command = new Executable() {
+                @Override
+                public String execute() {
+                    SPage<SyncMessage> sPage = syncMessageService.findAll(1, 100);
+                    List<SyncMessage> syncMessages = sPage.getContent();
+                    if (CollectionUtils.isEmpty(syncMessages)) {
+                        return "无消息";
+                    }
+                    for (SyncMessage syncMessage : syncMessages) {
+                        try {
+                            process(syncMessage);
+                        } catch (Exception e) {
+                            logger.error("消息处理失败", e);
+                        }
+                    }
+                    // 如果消息不止一页,立即消费下一页
+                    if (sPage.getTotalPage() > 1) {
+                        execute();
+                    }
+                    return "正常";
+                }
+            };
+            taskInformation = new TaskInformation(title, command, INITIAL_DELAY, interval, TaskInformation.ScheduleType.FixedDelay);
+            taskService.newTask(taskInformation);
+            if (!taskService.isStopped()) {
+                taskService.stop();
+            }
+            taskService.start();
+        } catch (Exception e) {
+            if (taskInformation != null) {
+                taskInformation = null;
+            }
+            throw new IllegalStateException("开启失败", e);
+        }
+    }
+
+    /**
+     * 关闭监听
+     *
+     * @return 关闭成功与否的提示信息
+     */
+    public void stop() {
+        if (!isRunning()) {
+            throw new IllegalStateException("监听服务未开启或已关闭");
+        } else {
+            taskService.remove(taskInformation.getCode());
+            taskInformation = null;
+            if (!taskService.isStopped()) {
+                taskService.stop();
+            }
+            taskService.start();
+        }
+    }
+
+    /**
+     * @return 监听服务是否正在运行
+     */
+    public boolean isRunning() {
+        return taskInformation != null && taskService.exist(taskInformation.getCode());
+    }
+
+    /**
+     * 将消息推到 kafka
+     *
+     * @param syncMessage 消息
+     */
+    private void process(SyncMessage syncMessage) {
+        logger.info("Sending message... " + syncMessage.toString());
+        Long id = syncMessage.getId();
+        syncMessageDao.markRetryCount(id);
+
+        JSONObject dataKey = syncMessage.getDataKey();
+        JSONObject data = syncMessage.getData();
+        String methodType = syncMessage.getMethodType();
+        // 根据不同的更改类型对字段进行检查
+        switch (methodType.toLowerCase()) {
+            case "delete":
+                Assert.notEmpty(dataKey, "dataKey is empty: " + dataKey);
+                break;
+            case "update":
+            case "insert":
+                Assert.notEmpty(dataKey, "dataKey is empty: " + dataKey);
+                Assert.notEmpty(data, "data is empty: " + data);
+                break;
+            default:
+                throw new IllegalArgumentException("Unsupported method type: " + methodType);
+        }
+
+        // 发送消息
+        String batchCode = syncMessage.getBatchCode();
+        // 同一个 batchCode 的消息发送到同一分区
+        if (!StringUtils.isEmpty(batchCode)) {
+            kafkaTemplate.sendDefault(batchCode, syncMessage);
+        } else {
+            kafkaTemplate.sendDefault(syncMessage);
+        }
+        syncMessageDao.dequeueMessage(id);
+    }
+}

+ 100 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SPage.java

@@ -0,0 +1,100 @@
+package com.uas.ps.sync.producer.jms;
+
+import java.io.Serializable;
+import java.util.List;
+
+public class SPage<T> implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    private int totalPage;
+
+    private long totalElement;
+
+    private int page;
+
+    private int size;
+
+    private boolean first;
+
+    private boolean last;
+
+    private List<T> content;
+
+    public SPage() {
+        super();
+    }
+
+    public SPage(int totalPage, long totalElement, int page, int size, boolean first, boolean last) {
+        super();
+        this.totalPage = totalPage;
+        this.totalElement = totalElement;
+        this.page = page;
+        this.size = size;
+        this.first = first;
+        this.last = last;
+    }
+
+    public int getTotalPage() {
+        return totalPage;
+    }
+
+    public void setTotalPage(int totalPage) {
+        this.totalPage = totalPage;
+    }
+
+    public long getTotalElement() {
+        return totalElement;
+    }
+
+    public void setTotalElement(long totalElement) {
+        this.totalElement = totalElement;
+    }
+
+    public int getPage() {
+        return page;
+    }
+
+    public void setPage(int page) {
+        this.page = page;
+    }
+
+    public int getSize() {
+        return size;
+    }
+
+    public void setSize(int size) {
+        this.size = size;
+    }
+
+    public boolean isFirst() {
+        return first;
+    }
+
+    public void setFirst(boolean first) {
+        this.first = first;
+    }
+
+    public boolean isLast() {
+        return last;
+    }
+
+    public void setLast(boolean last) {
+        this.last = last;
+    }
+
+    public List<T> getContent() {
+        return content;
+    }
+
+    public void setContent(List<T> content) {
+        this.content = content;
+    }
+
+    @Override
+    public String toString() {
+        return "SPage [totalPage=" + totalPage + ", totalElement=" + totalElement + ", page=" + page + ", size=" + size
+                + ", first=" + first + ", last=" + last + ", content=" + content + "]";
+    }
+
+}

+ 52 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/ScheduleController.java

@@ -0,0 +1,52 @@
+package com.uas.ps.sync.producer.jms;
+
+import com.uas.ps.sync.producer.schedule.TaskInformation;
+import com.uas.ps.sync.producer.schedule.TaskService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.List;
+
+@Controller
+@RequestMapping("/schedule")
+public class ScheduleController {
+
+    @Autowired
+    private TaskService taskService;
+
+    @RequestMapping("/tasks")
+    @ResponseBody
+    public List<TaskInformation> allTaskInformations(HttpServletRequest request) {
+        return taskService.allTaskInformations();
+    }
+
+    @RequestMapping("/start")
+    @ResponseBody
+    public String start(HttpServletRequest request) {
+        return taskService.start();
+    }
+
+    @RequestMapping("/stop")
+    @ResponseBody
+    public String stop(HttpServletRequest request) {
+        return taskService.stop();
+    }
+
+    @RequestMapping("/restart")
+    @ResponseBody
+    public String restart(HttpServletRequest request) {
+        if (!taskService.isStopped()) {
+            taskService.stop();
+        }
+        return taskService.start();
+    }
+
+    @RequestMapping("/isStopped")
+    @ResponseBody
+    public boolean isStopped(HttpServletRequest request) {
+        return taskService.isStopped();
+    }
+}

+ 55 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SyncMessageDao.java

@@ -0,0 +1,55 @@
+package com.uas.ps.sync.producer.jms;
+
+import com.uas.ps.sync.entity.SyncMessage;
+import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
+import org.springframework.data.jpa.repository.Modifying;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.jpa.repository.query.Procedure;
+import org.springframework.stereotype.Repository;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.util.List;
+
+/**
+ * @author sunyj
+ * @since 2018/1/14 16:39
+ */
+@Repository
+public interface SyncMessageDao extends JpaSpecificationExecutor<SyncMessage>, JpaRepository<SyncMessage, Long> {
+
+    /**
+     * 获取指定数目的数据
+     *
+     * @param start 开始的记录
+     * @param size  指定数目
+     * @return 数据
+     */
+    @Query(value = "select * from sync$message where me_retry_count < 5 order by me_priority desc, me_id limit ?1, ?2", nativeQuery = true)
+    List<SyncMessage> findList(Integer start, Integer size);
+
+    /**
+     * 统计行数
+     *
+     * @return 行数
+     */
+    @Query(value = "select count(1) from sync$message where me_retry_count < 5", nativeQuery = true)
+    long count();
+
+    /**
+     * 消息的尝试次数加 1
+     */
+    @Transactional
+    @Modifying
+    @Query(value = "update sync$message set me_retry_count = me_retry_count + 1 where me_id = ?1", nativeQuery = true)
+    void markRetryCount(Long id);
+
+    /**
+     * 出队消息
+     *
+     * @param id 消息 id
+     */
+    @Transactional
+    @Procedure(procedureName = "sync$dequeue_message")
+    void dequeueMessage(Long id);
+}

+ 19 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SyncMessageService.java

@@ -0,0 +1,19 @@
+package com.uas.ps.sync.producer.jms;
+
+import com.uas.ps.sync.entity.SyncMessage;
+
+/**
+ * @author sunyj
+ * @since 2018/1/14 16:41
+ */
+public interface SyncMessageService {
+
+    /**
+     * 获取消息
+     *
+     * @param page 页码
+     * @param size 页大小
+     * @return 消息
+     */
+    SPage<SyncMessage> findAll(Integer page, Integer size);
+}

+ 66 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/jms/SyncMessageServiceImpl.java

@@ -0,0 +1,66 @@
+package com.uas.ps.sync.producer.jms;
+
+import com.uas.ps.sync.entity.SyncMessage;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+
+/**
+ * @author sunyj
+ * @since 2018/1/14 16:43
+ */
+@Service
+public class SyncMessageServiceImpl implements SyncMessageService {
+
+    /**
+     * 默认的页码
+     */
+    private static final int PAGE_INDEX = 1;
+
+    /**
+     * 默认每页的大小
+     */
+    private static final int PAGE_SIZE = 20;
+
+    @Autowired
+    private SyncMessageDao syncMessageDao;
+
+    @Override
+    public SPage<SyncMessage> findAll(Integer page, Integer size) {
+        // 处理分页信息
+        if (page == null || page <= 0) {
+            page = PAGE_INDEX;
+        }
+        if (size == null || size <= 0) {
+            size = PAGE_SIZE;
+        }
+
+        SPage<SyncMessage> sPage = new SPage<>();
+        long totalElement = syncMessageDao.count();
+        sPage.setTotalElement(totalElement);
+        // 总数目为0,返回
+        if (totalElement == 0) {
+            return sPage;
+        }
+
+        int totalPage = (int) Math.ceil(totalElement / (1.0 * size));
+        sPage.setTotalPage(totalPage);
+        // 如果页码过大
+        if (page > totalPage) {
+            page = totalPage;
+        }
+        int start = (page - 1) * size;
+        sPage.setPage(page);
+        sPage.setSize(size);
+        if (page == 1) {
+            sPage.setFirst(true);
+        }
+        if (page == totalPage) {
+            sPage.setLast(true);
+        }
+        List<SyncMessage> messages = syncMessageDao.findList(start, size);
+        sPage.setContent(messages);
+        return sPage;
+    }
+}

+ 17 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/Executable.java

@@ -0,0 +1,17 @@
+package com.uas.ps.sync.producer.schedule;
+
+/**
+ * 可执行的,用于执行定时任务
+ *
+ * @author sunyj
+ * @since 2016年12月19日 上午11:31:39
+ */
+public interface Executable {
+
+    /**
+     * 执行定时任务
+     *
+     * @return 任务执行结果
+     */
+    public String execute();
+}

+ 36 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/NumberGenerator.java

@@ -0,0 +1,36 @@
+package com.uas.ps.sync.producer.schedule;
+
+import java.util.Calendar;
+import java.util.Date;
+
+/**
+ * 数字生成工具类
+ *
+ * @author sunyj
+ * @since 2017年8月22日 上午11:34:44
+ */
+public class NumberGenerator {
+
+    /**
+     * 生成id时的偏移量
+     */
+    private static int offset = 0;
+
+    /**
+     * @return 产生唯一的id(一毫秒内不超过100个则不重复)
+     */
+    public static String generateId() {
+        // 偏移量,不超过100个
+        offset = (offset + 1) % 100;
+        // 当前毫秒数
+        long now = new Date().getTime();
+        // 起始时间 2017-01-01 00:00:00:000
+        Calendar startCalendar = Calendar.getInstance();
+        startCalendar.set(2017, 0, 1, 0, 0, 0);
+        startCalendar.set(Calendar.MILLISECOND, 0);
+        // 偏移量加16,保证得到的是两位16进制字符
+        String hex = Long.toHexString(now - startCalendar.getTimeInMillis()) + Integer.toHexString(offset + 16);
+        // 最后反转并转为大写
+        return hex.toUpperCase();
+    }
+}

+ 154 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskInformation.java

@@ -0,0 +1,154 @@
+package com.uas.ps.sync.producer.schedule;
+
+import java.util.Objects;
+
+/**
+ * 定时任务信息
+ *
+ * @author sunyj
+ * @since 2016年12月19日 上午10:21:28
+ */
+public class TaskInformation {
+
+    /**
+     * 任务 code
+     */
+    private String code;
+
+    /**
+     * 任务标题
+     */
+    private String title;
+
+    /**
+     * 所执行的任务
+     */
+    private Executable command;
+
+    /**
+     * 第一次执行的延迟时间间隔(毫秒)
+     */
+    private long initialDelay;
+
+    /**
+     * 两次任务之间的等待时间间隔(毫秒)
+     */
+    private long interval;
+
+    private ScheduleType scheduleType;
+
+    public TaskInformation() {
+        super();
+    }
+
+    public TaskInformation(String title, Executable command, long initialDelay, long interval, ScheduleType scheduleType) {
+        init();
+        this.title = title;
+        this.command = command;
+        this.initialDelay = initialDelay;
+        this.interval = interval;
+        this.scheduleType = scheduleType;
+    }
+
+    public void init() {
+        code = NumberGenerator.generateId();
+    }
+
+    public String getCode() {
+        return code;
+    }
+
+    public void setCode(String code) {
+        this.code = code;
+    }
+
+    public String getTitle() {
+        return title;
+    }
+
+    public void setTitle(String title) {
+        this.title = title;
+    }
+
+    public Executable getCommand() {
+        return command;
+    }
+
+    public void setCommand(Executable command) {
+        this.command = command;
+    }
+
+    public long getInitialDelay() {
+        return initialDelay;
+    }
+
+    public void setInitialDelay(long initialDelay) {
+        this.initialDelay = initialDelay;
+    }
+
+    public long getInterval() {
+        return interval;
+    }
+
+    public void setInterval(long interval) {
+        this.interval = interval;
+    }
+
+    public ScheduleType getScheduleType() {
+        return scheduleType;
+    }
+
+    public void setScheduleType(ScheduleType scheduleType) {
+        this.scheduleType = scheduleType;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null || getClass() != obj.getClass() || !(obj instanceof TaskInformation)) {
+            return false;
+        }
+        TaskInformation other = (TaskInformation) obj;
+        // command不好比较,不进行比较,也不比较 code
+        return Objects.equals(title, other.getTitle()) && initialDelay == other.getInitialDelay()
+                && interval == other.getInterval() && scheduleType == other.getScheduleType();
+    }
+
+    @Override
+    public String toString() {
+        return "TaskInformation [code=" + code + ", title=" + title + ", initialDelay=" + initialDelay + ", interval=" + interval + ", scheduleType=" + scheduleType + "]";
+    }
+
+    /**
+     * 定时的间隔类型
+     */
+    public enum ScheduleType {
+        /**
+         * Creates and executes a periodic action that becomes enabled first
+         * after the given initial delay, and subsequently with the given
+         * period; that is executions will commence after
+         * <tt>initialDelay</tt> then <tt>initialDelay+period</tt>, then
+         * <tt>initialDelay + 2 * period</tt>, and so on.
+         * If any execution of the task
+         * encounters an exception, subsequent executions are suppressed.
+         * Otherwise, the task will only terminate via cancellation or
+         * termination of the executor.  If any execution of this task
+         * takes longer than its period, then subsequent executions
+         * may start late, but will not concurrently execute.
+         */
+        FixedRate,
+
+        /**
+         * Creates and executes a periodic action that becomes enabled first
+         * after the given initial delay, and subsequently with the
+         * given delay between the termination of one execution and the
+         * commencement of the next.  If any execution of the task
+         * encounters an exception, subsequent executions are suppressed.
+         * Otherwise, the task will only terminate via cancellation or
+         * termination of the executor.
+         */
+        FixedDelay
+    }
+}

+ 74 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskLog.java

@@ -0,0 +1,74 @@
+package com.uas.ps.sync.producer.schedule;
+
+import com.alibaba.fastjson.JSON;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * 定时任务的执行日志
+ *
+ * @author sunyj
+ * @since 2016年12月19日 上午10:25:50
+ */
+public class TaskLog {
+
+    /**
+     * 定时任务的信息
+     */
+    private TaskInformation taskInformation;
+
+    /**
+     * 任务执行时间
+     */
+    private Date executeTime;
+
+    /**
+     * 任务执行结果
+     */
+    private String result;
+
+    public TaskLog(TaskInformation taskInformation, Date executeTime, String result) {
+        super();
+        this.taskInformation = taskInformation;
+        this.executeTime = executeTime;
+        this.result = result;
+    }
+
+    public TaskInformation getTaskInformation() {
+        return taskInformation;
+    }
+
+    public void setTaskInformation(TaskInformation taskInformation) {
+        this.taskInformation = taskInformation;
+    }
+
+    public Date getExecuteTime() {
+        return executeTime;
+    }
+
+    public void setExecuteTime(Date executeTime) {
+        this.executeTime = executeTime;
+    }
+
+    public String getResult() {
+        return result;
+    }
+
+    public void setResult(String result) {
+        this.result = result;
+    }
+
+    public String toJSONString() {
+        return "{\"taskInformation\": " + JSON.toJSONString(taskInformation) + ", \"executeTime\": \""
+                + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(executeTime) + "\", \"result\": \"" + result
+                + "\"}";
+    }
+
+    @Override
+    public String toString() {
+        return "TaskLog [taskInformation=" + taskInformation + ", executeTime=" + executeTime + ", result=" + result
+                + "]";
+    }
+
+}

+ 62 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskService.java

@@ -0,0 +1,62 @@
+package com.uas.ps.sync.producer.schedule;
+
+import java.util.List;
+
+/**
+ * 管理定时任务
+ *
+ * @author sunyj
+ * @since 2016年12月19日 上午10:52:29
+ */
+public interface TaskService {
+    /**
+     * 建立定时任务
+     *
+     * @param taskInformation 定时任务的信息
+     */
+    public void newTask(TaskInformation taskInformation);
+
+    /**
+     * 根据指定的 code,移除定时任务
+     *
+     * @param code 任务的 code
+     */
+    void remove(String code);
+
+    /**
+     * 是否存在定时任务
+     *
+     * @param code 任务的 code
+     * @return true 则存在
+     */
+    boolean exist(String code);
+
+    /**
+     * 获取定时任务信息
+     *
+     * @return
+     */
+    public List<TaskInformation> allTaskInformations();
+
+    /**
+     * 开启定时任务
+     *
+     * @return 返回的结果
+     */
+    public String start();
+
+    /**
+     * 关闭定时任务
+     *
+     * @return 返回的结果
+     */
+    public String stop();
+
+    /**
+     * 定时任务是否停止
+     *
+     * @return
+     */
+    public boolean isStopped();
+
+}

+ 209 - 0
ps-sync-producer/src/main/java/com/uas/ps/sync/producer/schedule/TaskServiceImpl.java

@@ -0,0 +1,209 @@
+package com.uas.ps.sync.producer.schedule;
+
+import com.uas.ps.core.util.CollectionUtils;
+import com.uas.ps.core.util.DateFormatUtils;
+import com.uas.ps.core.util.ExceptionUtils;
+import com.uas.ps.core.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+import org.springframework.util.Assert;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * 管理定时任务
+ *
+ * @author sunyj
+ * @since 2016年12月19日 上午10:54:43
+ */
+@Service
+public class TaskServiceImpl implements TaskService {
+
+    /**
+     * 任务执行日志文件的路径
+     */
+    private static final String LOG_DIR = System.getProperty("java.io.tmpdir");
+    /**
+     * 存储定时任务信息
+     */
+    private List<TaskInformation> taskInformations = new ArrayList<>();
+    /**
+     * 定时任务调度
+     */
+    private ScheduledExecutorService scheduledExecutorService;
+    private Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class);
+
+    @Override
+    public void newTask(TaskInformation taskInformation) {
+        if (taskInformation == null || StringUtils.isEmpty(taskInformation.getTitle())
+                || taskInformation.getCommand() == null) {
+            throw new IllegalArgumentException("定时任务的 title 和 command 不可为空");
+        }
+        if (contain(taskInformation)) {
+            throw new IllegalStateException("任务已存在:" + taskInformation);
+        }
+        taskInformations.add(taskInformation);
+    }
+
+    @Override
+    public void remove(String code) {
+        Assert.hasText(code, "code 为空");
+        for (TaskInformation d : taskInformations) {
+            if (Objects.equals(code, d.getCode())) {
+                taskInformations.remove(d);
+                return;
+            }
+        }
+        throw new IllegalArgumentException("定时任务不存在:" + code);
+    }
+
+    @Override
+    public boolean exist(String code) {
+        Assert.hasText(code, "code 为空");
+        for (TaskInformation d : taskInformations) {
+            if (Objects.equals(code, d.getCode())) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * 判断定时任务是否已经存在
+     *
+     * @param taskInformation
+     * @return
+     */
+    private boolean contain(TaskInformation taskInformation) {
+        if (CollectionUtils.isEmpty(taskInformations)) {
+            return false;
+        }
+        for (TaskInformation d : taskInformations) {
+            if (taskInformation.equals(d)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * 保存日志到本地文件
+     *
+     * @param log
+     */
+    private void saveLog(TaskLog log) {
+        if (log == null) {
+            throw new NullPointerException();
+        }
+        FileWriter fileWriter = null;
+        try {
+            String logFilePath = (LOG_DIR.endsWith(File.separator) ? LOG_DIR : LOG_DIR + "/") + "sync-producer-task-log.log";
+            fileWriter = new FileWriter(logFilePath, true);
+            fileWriter.write(String.format("%s [Task: %s] %s\n", DateFormatUtils.DATETIME_FORMAT.format(log.getExecuteTime()),
+                    log.getTaskInformation().getTitle(), log.getResult()));
+            fileWriter.flush();
+        } catch (IOException e) {
+            logger.error("", e);
+        } finally {
+            if (fileWriter != null) {
+                try {
+                    fileWriter.close();
+                } catch (IOException e) {
+                    logger.error("", e);
+                }
+            }
+        }
+    }
+
+    @Override
+    public List<TaskInformation> allTaskInformations() {
+        return taskInformations;
+    }
+
+    @Override
+    public String start() {
+        String message;
+        if (!isStopped()) {
+            message = "已存在运行的定时任务";
+            logger.error(message);
+            return message;
+        }
+        if (!CollectionUtils.isEmpty(taskInformations)) {
+            // 线程数为1,以免多个创建索引的任务互相影响,导致索引创建失败
+            scheduledExecutorService = Executors.newScheduledThreadPool(1);
+            for (TaskInformation taskInformation : taskInformations) {
+                logger.info("New task: " + taskInformation);
+                switch (taskInformation.getScheduleType()) {
+                    case FixedRate:
+                        scheduledExecutorService.scheduleAtFixedRate(getCommand(taskInformation),
+                                taskInformation.getInitialDelay(), taskInformation.getInterval(), TimeUnit.MILLISECONDS);
+                        break;
+                    case FixedDelay:
+                        scheduledExecutorService.scheduleWithFixedDelay(getCommand(taskInformation),
+                                taskInformation.getInitialDelay(), taskInformation.getInterval(), TimeUnit.MILLISECONDS);
+                        break;
+                }
+            }
+            message = "已开启定时任务:" + taskInformations;
+            logger.info(message + "\n");
+            return message;
+        } else {
+            message = "定时任务为空";
+            logger.error(message + "\n");
+            return message;
+        }
+    }
+
+    /**
+     * 获取任务需执行的命令
+     *
+     * @param taskInformation
+     * @return
+     */
+    private Runnable getCommand(final TaskInformation taskInformation) {
+        return new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    Executable command = taskInformation.getCommand();
+                    String result = command.execute();
+                    saveLog(new TaskLog(taskInformation, new Date(), result));
+                } catch (Exception e) {
+                    logger.error("定时任务出错", e);
+                    saveLog(new TaskLog(taskInformation, new Date(),
+                            "定时任务出错" + ExceptionUtils.getDetailedMessage(e)));
+                }
+            }
+        };
+    }
+
+    @Override
+    public String stop() {
+        String message;
+        if (isStopped()) {
+            message = "定时任务已经停止或者未开启过";
+            logger.error(message);
+            return message;
+        }
+        logger.info("Remove old tasks...");
+        scheduledExecutorService.shutdownNow();
+        message = "已关闭定时任务";
+        logger.info(message + "\n");
+        return message;
+    }
+
+    @Override
+    public boolean isStopped() {
+        return scheduledExecutorService == null || scheduledExecutorService.isShutdown() || scheduledExecutorService.isTerminated();
+    }
+}

+ 33 - 0
ps-sync-producer/src/main/resources/application.yml

@@ -0,0 +1,33 @@
+spring:
+ kafka:
+  bootstrap-servers: 10.10.100.11:9292,10.10.100.12:9292,10.10.100.13:9292,10.10.100.14:9292,10.10.100.15:9292,10.10.100.16:9292
+  producer:
+   key-serializer: org.apache.kafka.common.serialization.StringSerializer
+   value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
+  template:
+    default-topic: PUBLIC
+ profiles:
+   active: dev
+
+security:
+ basic:
+  enabled: true
+  path: /**
+ user:
+  name: sync-producer-admin
+  password: select111***
+  role: ADMIN
+ ignored: false
+
+---
+spring:
+ profiles: cloud
+ kafka:
+  bootstrap-servers: 10.10.0.69:9291,10.10.0.148:9292,10.10.0.98:9293
+
+---
+spring:
+ profiles: dev
+ kafka:
+  template:
+    default-topic: PUBLIC_DEV

+ 19 - 0
ps-sync-producer/src/main/resources/config/application-cloud.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://10.10.0.208:3306/mall_prod?characterEncoding=utf-8&useSSL=false
+datasource.username=sa
+datasource.password=Select123!#%*(
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=100
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=300000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-producer/src/main/resources/config/application-dev.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://192.168.253.6:3306/public_resources?characterEncoding=utf-8&useSSL=false
+datasource.username=root
+datasource.password=select111***
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 19 - 0
ps-sync-producer/src/main/resources/config/application-test.properties

@@ -0,0 +1,19 @@
+datasource.url=jdbc:mysql://192.168.253.6:3306/mall_test_dev?characterEncoding=utf-8&useSSL=false
+datasource.username=root
+datasource.password=select111***
+datasource.driverClassName=com.mysql.jdbc.Driver
+datasource.initialSize=1
+datasource.minIdle=1
+datasource.maxActive=20
+datasource.maxWait=60000
+datasource.timeBetweenEvictionRunsMillis=60000
+datasource.minEvictableIdleTimeMillis=300000
+datasource.validationQuery=SELECT 1 FROM DUAL
+datasource.testWhileIdle=true
+datasource.testOnBorrow=true
+datasource.testOnReturn=false
+datasource.poolPreparedStatements=true
+datasource.timeBetweenLogStatsMillis=60000
+datasource.maxPoolPreparedStatementPerConnectionSize=20
+datasource.filters=stat,slf4j
+datasource.connectionProperties=druid.stat.mergeSql=false;druid.stat.slowSqlMillis=5000

+ 44 - 0
ps-sync-producer/src/main/resources/logback.xml

@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+	<appender name="FILE"
+		class="ch.qos.logback.core.rolling.RollingFileAppender">
+		<File>logs/log.log</File>
+		<encoder>
+			<pattern>
+				%date{yyyy-MM-dd HH:mm:ss:SSS} [%relative ms] %-5level [%50.50(%logger{36}.%method:%line)] ---- %msg%n
+			</pattern>
+			<charset>UTF-8</charset> <!-- 此处设置字符集 -->
+		</encoder>
+		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+			<!-- daily rollover -->
+			<FileNamePattern>logs/log.%d{yyyy-MM-dd}.log</FileNamePattern>
+			<!-- keep 10 days' worth of history -->
+			<maxHistory>10</maxHistory>
+		</rollingPolicy>
+		<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+			<level>INFO</level>
+		</filter>
+	</appender>
+
+	<!-- Console output -->
+	<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+		<!-- encoder defaults to ch.qos.logback.classic.encoder.PatternLayoutEncoder -->
+		<encoder>
+			<pattern>
+				%date{yyyy-MM-dd HH:mm:ss:SSS} [%relative ms] %-5level [%50.50(%logger{36}.%method:%line)] ---- %msg%n
+			</pattern>
+			<charset>UTF-8</charset> <!-- 此处设置字符集 -->
+		</encoder>
+		<!-- Only log level WARN and above -->
+		<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+			<level>INFO</level>
+		</filter>
+	</appender>
+
+	<!-- Enable FILE and STDOUT appenders for all log messages. By default, 
+		only log at level INFO and above. -->
+	<root level="INFO">
+		<appender-ref ref="FILE" />
+		<appender-ref ref="STDOUT" />
+	</root>
+</configuration>