瀏覽代碼

wait response of sync in /product/update

sunyj 7 年之前
父節點
當前提交
7e1048aa3a

+ 114 - 0
src/main/java/com/uas/ps/product/MessageConsumer.java

@@ -0,0 +1,114 @@
+package com.uas.ps.product;
+
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * @author sunyj
+ * @since 2018/1/17 19:27
+ */
+@Service
+@Configuration
+public class MessageConsumer {
+
+    private static final List<String> topics = Collections.singletonList("RESPONSE");
+
+    private final Logger logger = LoggerFactory.getLogger(getClass());
+
+    @Autowired
+    private KafkaProperties kafkaProperties;
+
+    @Value("${kafka.consumer.max-poll-interval-millis}")
+    private Long maxPollIntervalMillis;
+
+    private List<String> batchCodes = new ArrayList<>();
+
+    @Bean
+    public Properties kafkaConfig() {
+        Properties config = new Properties();
+        // 配置kafka集群机器
+        setProperty(config, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
+        KafkaProperties.Consumer kafkaPropertiesConsumer = kafkaProperties.getConsumer();
+        // 消费者分组
+        setProperty(config, ConsumerConfig.GROUP_ID_CONFIG, kafkaPropertiesConsumer.getGroupId());
+        setProperty(config, ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, kafkaPropertiesConsumer.getKeyDeserializer());
+        setProperty(config, ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, kafkaPropertiesConsumer.getValueDeserializer());
+
+        // 消费者自动提交已消费消息的 offset
+        setProperty(config, ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, kafkaPropertiesConsumer.getEnableAutoCommit());
+        // 自动提交的时间间隔
+        setProperty(config, ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, kafkaPropertiesConsumer.getAutoCommitInterval());
+        // 每次 poll 的最大数据个数
+        setProperty(config, ConsumerConfig.MAX_POLL_RECORDS_CONFIG, kafkaPropertiesConsumer.getMaxPollRecords());
+        // 设置使用最开始的 offset 偏移量为该 group.id的 最早
+        // 如果不设置,则会是 latest 即该 topic 最新一个消息的 offset
+        // 如果采用 latest,消费者只能得到其启动后,生产者生产的消息
+        // 一般配置 earliest 或者 latest 值
+        setProperty(config, ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, kafkaPropertiesConsumer.getAutoOffsetReset());
+        return config;
+    }
+
+    private void setProperty(Properties properties, Object key, Object value) {
+        if (value != null) {
+            properties.put(key, value);
+        }
+    }
+
+    /**
+     * 如果groupId之前存在 , 则从之前提交的最后消费数据的offset处继续开始消费数据
+     * 如果groupId之前不存在,则从当前分区的最后位置开始消费
+     * <p>
+     * 注意如果enable.auto.commit 设置为false,如果消费完数据没有提交已消费数据的offset,
+     * 则会出现重复消费数据的情况
+     */
+    public void waitResponse(String batchCode) {
+        logger.info("waiting batchCode: " + batchCode);
+        if(batchCodes.contains(batchCode)){
+            batchCodes.remove(batchCode);
+            return;
+        }
+        final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(kafkaConfig());
+        // 订阅topic,并实现 ConsumerRebalanceListener
+        consumer.subscribe(topics);
+        boolean processed = false;
+        while (!processed) {
+            try {
+                ConsumerRecords<String, String> records = consumer.poll(maxPollIntervalMillis);
+                logger.info("topic: " + topics + " pool return records size: " + records.count());
+                for (ConsumerRecord<String, String> record : records) {
+                    logger.info("receiving batchCode... " + batchCode);
+                    String value = record.value();
+                    if(value.equals(batchCode)){
+                        processed = true;
+                    } else{
+                        batchCodes.add(batchCode);
+                    }
+                    // 手动提交已消费数据的 offset
+                    Boolean enableAutoCommit = kafkaProperties.getConsumer().getEnableAutoCommit();
+                    if (enableAutoCommit != null && !enableAutoCommit) {
+                        consumer.commitSync();
+                    }
+                }
+            } catch (Exception e) {
+                logger.error("消息处理出错", e);
+            }
+        }
+        consumer.unsubscribe();
+        consumer.close();
+    }
+}

+ 20 - 7
src/main/java/com/uas/ps/product/controller/ProductController.java

@@ -2,25 +2,22 @@ package com.uas.ps.product.controller;
 
 import com.alibaba.fastjson.JSONObject;
 import com.uas.ps.core.util.CollectionUtils;
-import com.uas.ps.core.util.StringUtils;
 import com.uas.ps.entity.Product;
 import com.uas.ps.entity.Status;
+import com.uas.ps.product.MessageConsumer;
 import com.uas.ps.product.entity.Prod;
 import com.uas.ps.product.entity.ProductSaler;
+import com.uas.ps.product.repository.ProductDao;
 import com.uas.ps.product.service.ProductService;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.ui.ModelMap;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.ResponseBody;
-import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.bind.annotation.*;
 
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.UUID;
 
 /**
  * 物料处理
@@ -35,6 +32,12 @@ public class ProductController {
     @Autowired
     private ProductService productService;
 
+    @Autowired
+    private ProductDao productDao;
+
+    @Autowired
+    private MessageConsumer consumer;
+
 //    @Autowired
 //    private UserDao userDao;
 
@@ -99,6 +102,12 @@ public class ProductController {
         List<Product> productInfo = JSONObject.parseArray(data, Product.class);
         List<Long> resultProducts = new ArrayList<>();
 
+        boolean fromB2B = sourceApp != null && sourceApp.equalsIgnoreCase("B2B");
+        String batchCode = UUID.randomUUID().toString().replace("-", "");
+        if (fromB2B) {
+            productDao.setSessionVariable(sourceApp, batchCode, productInfo.size());
+        }
+
         for (Product product : productInfo) {
             List<Product> products = productService.findByEnUUAndPCmpCodeAndPBrandEn(product.getEnUU(),product.getpCmpCode(),product.getpBrandEn());
             if (org.apache.commons.collections.CollectionUtils.isEmpty(products)) {
@@ -108,6 +117,10 @@ public class ProductController {
                 resultProducts.add(products.get(0).getId());
             }
         }
+        productDao.unsetSessionVariable();
+        if (fromB2B) {
+            consumer.waitResponse(batchCode);
+        }
 //        logger.log("更新物料", "[" + productInfo.getUserUU() + "]更新了id为" + productInfo.getId() + "的["
 //                + productInfo.getTitle() + "]");
         return resultProducts;

+ 13 - 0
src/main/java/com/uas/ps/product/repository/ProductDao.java

@@ -5,6 +5,7 @@ import org.springframework.data.jpa.repository.JpaRepository;
 import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
 import org.springframework.data.jpa.repository.Modifying;
 import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.jpa.repository.query.Procedure;
 import org.springframework.data.repository.query.Param;
 import org.springframework.stereotype.Repository;
 
@@ -138,6 +139,18 @@ public interface ProductDao extends JpaSpecificationExecutor<Product>, JpaReposi
     @Query("update Product p set p.isPurchase = :switchStatus where p.id = :id")
     void updatePurchaseStatusById(@Param("id") Long id, @Param("switchStatus") Integer switchStatus);
 
+    /**
+     * 设置 session 变量
+     */
+    @Procedure(procedureName = "sync$set_session_variable")
+    void setSessionVariable(String sourceApp, String batchCode, Integer batchSize);
+
+    /**
+     * 取消设置 session 变量
+     */
+    @Procedure(procedureName = "sync$unset_session_variable")
+    void unsetSessionVariable();
+
 //    /**
 //     * 通过uu查询非标准器件进行存储
 //     *

+ 13 - 0
src/main/resources/application.yml

@@ -10,6 +10,19 @@ spring:
       factory_class: org.hibernate.cache.ehcache.SingletonEhCacheRegionFactory
     hbm2ddl:
      auto: update
+ kafka:
+  bootstrap-servers: 10.10.100.11:9292,10.10.100.12:9292,10.10.100.13:9292,10.10.100.14:9292,10.10.100.15:9292,10.10.100.16:9292
+#  consumer
+  consumer:
+   key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+   value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+   max-poll-records: 2000
+   auto-offset-reset: latest
+   group-id: PUBLIC
+
+kafka:
+ consumer:
+  max-poll-interval-millis: 5000
 
 security:
  basic: