Sfoglia il codice sorgente

新版合并逻辑

xufei 3 anni fa
parent
commit
f2535a79d6

+ 16 - 0
pom.xml

@@ -151,6 +151,22 @@
             <groupId>org.springframework.boot</groupId>
             <artifactId>spring-boot-starter-data-mongodb</artifactId>
         </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.bbossgroups.plugins</groupId>
+            <artifactId>bboss-elasticsearch-rest-jdbc</artifactId>
+            <version>6.3.9</version>
+        </dependency>
+        <dependency>
+            <groupId>com.bbossgroups.plugins</groupId>
+            <artifactId>bboss-elasticsearch-spring-boot-starter</artifactId>
+            <version>6.3.9</version>
+        </dependency>
     </dependencies>
 
     <dependencyManagement>

+ 0 - 2
src/main/java/com/winhc/Application.java

@@ -5,11 +5,9 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.builder.SpringApplicationBuilder;
 import org.springframework.boot.web.servlet.ServletComponentScan;
 import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
-//import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
 import org.springframework.scheduling.annotation.EnableScheduling;
 
 @SpringBootApplication
-//@EnableDiscoveryClient
 @ServletComponentScan
 @EnableScheduling
 public class Application extends SpringBootServletInitializer {

+ 15 - 0
src/main/java/com/winhc/common/constant/Base.java

@@ -0,0 +1,15 @@
+package com.winhc.common.constant;
+
+
+import java.util.concurrent.ForkJoinPool;
+
+/**
+ * @author π
+ * @Description:
+ * @date 2022/1/10 11:43
+ */
+public class Base {
+    public static ForkJoinPool TASK_FJ_POOL = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * 3);
+    public static String PID_WAIT_MERGE_V9 = "pid_wait_merge_v9";
+
+}

+ 4 - 0
src/main/java/com/winhc/config/ConfigConstant.java

@@ -16,5 +16,9 @@ public class ConfigConstant {
     @Value("${spring.kafka.topic_node_relation_union}")
     public String topic_node_relation_union;
 
+    @Value("${spring.kafka.topic_pid_update_v1}")
+    public String topic_pid_update_v1;
 
+    @Value("${spring.kafka.topic_pid_update_v2}")
+    public String topic_pid_update_v2;
 }

+ 35 - 0
src/main/java/com/winhc/config/MultiESSTartConfigurer.java

@@ -0,0 +1,35 @@
+package com.winhc.config;
+
+import org.frameworkset.elasticsearch.boot.BBossESStarter;
+import org.frameworkset.elasticsearch.client.ClientInterface;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+
+/**
+ * 配置多个es集群
+ * 指定多es数据源profile:multi-datasource
+ */
+@Configuration
+public class MultiESSTartConfigurer {
+    @Primary
+    @Bean(initMethod = "start")
+    @ConfigurationProperties("spring.elasticsearch.bboss.es6")
+    public BBossESStarter bbossESStarterEs6(){
+        return new BBossESStarter();
+    }
+
+    @Bean(initMethod = "start")
+    @ConfigurationProperties("spring.elasticsearch.bboss.es5")
+    public BBossESStarter bbossESStarterEs5(){
+        return new BBossESStarter();
+    }
+
+
+    @Bean
+    public ClientInterface bbossESClient(BBossESStarter bBossESStarter) {
+        return bBossESStarter.getRestClient();
+    }
+
+}

+ 2 - 4
src/main/java/com/winhc/kafka/KafkaProduce.java

@@ -31,17 +31,15 @@ import java.util.Optional;
 @Service
 @AllArgsConstructor
 public class KafkaProduce {
-//    @Autowired
-//    KafkaTemplate kafkaTemplate;
 
     private final KafkaTemplate<String, String> kafkaTemplate;
 
     public void produce(String topic,String message) {
         ListenableFuture<SendResult<String, String>> send = kafkaTemplate.send(topic, message);
         send.addCallback(o -> {
-            System.out.println("send-消息发送成功:" + message);
+            log.info("send success:" + message);
         }, throwable -> {
-            System.out.println("消息发送失败:" + message);
+            log.error("send error:" + message);
         });
     }
 

+ 22 - 1
src/main/java/com/winhc/kafka/consumer/KafkaConsumerNeo4jV2.java

@@ -1,19 +1,25 @@
 package com.winhc.kafka.consumer;
 
 import com.winhc.common.enums.CompanyEnum;
+import com.winhc.config.ConfigConstant;
 import com.winhc.db.mongodb.dataobject.NodeRelationError;
 import com.winhc.db.mongodb.repository.NodeRelatonErrorRepository;
+import com.winhc.kafka.KafkaProduce;
+import com.winhc.service.PidToMongoService;
 import com.winhc.service.RelationService;
 import com.winhc.service.SendMergeService;
 import com.winhc.utils.CompanyUtils;
 import lombok.AllArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Bean;
+import org.springframework.data.mongodb.core.MongoTemplate;
 import org.springframework.kafka.annotation.KafkaListener;
 import org.springframework.kafka.listener.ConsumerAwareListenerErrorHandler;
 import org.springframework.stereotype.Service;
+
 import java.util.List;
 import java.util.Map;
 
@@ -32,6 +38,13 @@ public class KafkaConsumerNeo4jV2 {
     NodeRelatonErrorRepository nodeRelatonErrorRepository;
     @Autowired
     SendMergeService sendMergeService;
+    @Autowired
+    PidToMongoService pidToMongoService;
+    @Autowired
+    KafkaProduce kafkaProduce;
+    @Autowired
+    ConfigConstant configConstant;
+
 
     @KafkaListener(id = "${spring.kafka.topic_node_relation_union}"
             , topics = "${spring.kafka.topic_node_relation_union}"
@@ -46,8 +59,11 @@ public class KafkaConsumerNeo4jV2 {
         this.map.get(CompanyEnum.TopicType.STAFF_RELATION.VALUE).save(CompanyUtils.filterList(listMap, CompanyEnum.TopicType.STAFF_RELATION.CODE));
         this.map.get(CompanyEnum.TopicType.PERSON_NODE_LABEL.VALUE).save(CompanyUtils.filterList(listMap, CompanyEnum.TopicType.PERSON_NODE_LABEL.CODE));
         this.map.get(CompanyEnum.TopicType.PERSON_MERGE_V2.VALUE).save(CompanyUtils.filterList(listMap, CompanyEnum.TopicType.PERSON_MERGE_V2.CODE));
+
+        //todo 等两分钟,再触发合并程序
+        this.pidToMongoService.save(CompanyUtils.getMergeIds2(listMap));
         //发送合并人员kafka
-        this.sendMergeService.save(CompanyUtils.getMergeIds(listMap));
+        //this.sendMergeService.save(CompanyUtils.getMergeIds(listMap));
         this.map.get(CompanyEnum.TopicType.NODE_RELATION_SUCCESS_STATUS.VALUE).save(CompanyUtils.filterList(listMap, CompanyEnum.TopicType.NODE_RELATION_SUCCESS_STATUS.CODE));
 
     }
@@ -59,9 +75,14 @@ public class KafkaConsumerNeo4jV2 {
     @Bean("consumerAwareListenerErrorHandlerV2")
     public ConsumerAwareListenerErrorHandler dealError() {
         return (message, e, consumer) -> {
+            List<String> list = CompanyUtils.toMessage((List<ConsumerRecord>) message.getPayload());
+            for (String msg : list) {
+                kafkaProduce.produce(configConstant.topic_node_relation_union, msg);
+            }
             List<NodeRelationError> nodeRelationErrors = CompanyUtils.toMessage((List<ConsumerRecord>) message.getPayload(), e.getMessage());
             nodeRelatonErrorRepository.saveAll(nodeRelationErrors);
             log.error("consumer error: save mongo size: {} , message: {}", nodeRelationErrors.size(), e.getMessage());
+
             return null;
         };
     }

+ 166 - 0
src/main/java/com/winhc/kafka/consumer/KafkaConsumerPersonIdUpdate.java

@@ -0,0 +1,166 @@
+package com.winhc.kafka.consumer;
+
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONArray;
+import com.alibaba.fastjson.JSONAware;
+import com.alibaba.fastjson.JSONObject;
+import com.mongodb.client.MongoCollection;
+import com.winhc.common.constant.Base;
+import com.winhc.config.ConfigConstant;
+import com.winhc.kafka.KafkaProduce;
+import com.winhc.utils.CompanyUtils;
+import com.winhc.utils.ESUtils;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.bson.Document;
+import org.elasticsearch.common.collect.Tuple;
+import org.frameworkset.elasticsearch.boot.BBossESStarter;
+import org.frameworkset.elasticsearch.client.ClientInterface;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.Bean;
+import org.springframework.data.mongodb.core.MongoTemplate;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.kafka.listener.ConsumerAwareListenerErrorHandler;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.util.*;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * @author π
+ * @Description:pid变化更新映射
+ * @date 2021/1/8 16:04
+ */
+@Slf4j
+@Service
+@AllArgsConstructor
+public class KafkaConsumerPersonIdUpdate {
+
+
+    @Autowired
+    @Qualifier("bbossESStarterEs5")
+    private BBossESStarter bbossESStarterEs5;
+    private ClientInterface restClient;
+    @Autowired
+    KafkaProduce kafkaProduce;
+    private final MongoTemplate mongoTemplate;
+    @Autowired
+    ConfigConstant configConstant;
+
+    @PostConstruct
+    public void init() {
+        restClient = bbossESStarterEs5.getRestClient("es5");
+    }
+
+    @KafkaListener(id = "${spring.kafka.topic_pid_update_v1}"
+            , topics = "${spring.kafka.topic_pid_update_v1}"
+            , groupId = "${spring.kafka.consumer.group-id}", containerFactory = "containerFactory", errorHandler = "handlerV1")
+    public void updatePid(List<String> messages) {
+        List<Tuple<Map<String, String>, JSONObject>> list = messages.stream()
+                .flatMap(m ->
+                        JSONArray.parseArray(m).stream()
+                                .map(x -> {
+                                    JSONObject j = (JSONObject) x;
+                                    Map<String, String> map = new HashMap<>();
+                                    map.put("old_human_pid", j.getOrDefault("old_human_pid", "").toString());
+                                    map.put("new_human_pid", j.getOrDefault("new_human_pid", "").toString());
+                                    return Tuple.tuple(map, j);
+                                })).filter(distinctByKey(m -> m.v1().get("old_human_pid")))
+                .collect(Collectors.toList());
+        try {
+            CompletableFuture.allOf(list.stream().map(this::update).toArray(CompletableFuture[]::new)).get();
+        } catch (Exception e) {
+            list.stream().map(Tuple::v2).forEach(d -> {
+                sendMessage(JSON.toJSONString(Collections.singletonList(d)), configConstant.topic_pid_update_v1);
+            });
+        }
+    }
+
+    private CompletableFuture<Void> update(Tuple<Map<String, String>, JSONObject> update) {
+        String old_human_pid = update.v1().get("old_human_pid");
+        String new_human_pid = update.v1().get("new_human_pid");
+        if (StringUtils.isBlank(old_human_pid) || StringUtils.isBlank(new_human_pid)) {
+            return CompletableFuture.completedFuture(null);
+        }
+
+        CompletableFuture<Boolean> v1 = updateByQuery(old_human_pid, new_human_pid, "company-human-pid-mapping-v1", ESUtils.updateHumanMappingId(old_human_pid, new_human_pid));
+        CompletableFuture<Boolean> v2 = updateByQuery(old_human_pid, new_human_pid, "company-human-relation-v1", ESUtils.updateBossId(old_human_pid));
+
+        return CompletableFuture.allOf(v1, v2).thenApplyAsync(x -> {
+            try {
+                if (v1.get() && v2.get()) {
+                    sendMessage(update.v2().toJSONString(), configConstant.topic_pid_update_v2);
+                } else {
+                    sendMessage(JSON.toJSONString(Collections.singletonList(update.v2())), configConstant.topic_pid_update_v1);
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                log.error("update es error old_human_pid :{} ,new_human_pid:{} ,message:{}", old_human_pid, new_human_pid, e.getMessage());
+                sendMessage(JSON.toJSONString(Collections.singletonList(update.v2())), configConstant.topic_pid_update_v1);
+            }
+            return x;
+        });
+
+    }
+
+    private CompletableFuture<Boolean> updateByQuery(String old_human_pid, String new_human_pid, String index, String query) {
+        return CompletableFuture.supplyAsync(() -> {
+            try {
+                String res = restClient.updateByQuery(index + "/_update_by_query?conflicts=proceed&refresh=true", query);
+                if (res.contains("\"version_conflicts\":0")) {
+                    log.info("update info old_human_pid : {} ,new_human_pid : {} , res : {}", old_human_pid, new_human_pid, res);
+                    return true;
+                } else {
+                    return false;
+                }
+            } catch (Exception e) {
+                return false;
+            }
+        }, Base.TASK_FJ_POOL);
+    }
+
+    private void sendMessage(String message, String topic) {
+        kafkaProduce.produce(topic, message);
+
+    }
+
+    public static <T> Predicate<T> distinctByKey(Function<? super T, ?> keyExtractor) {
+        Set<Object> seen = ConcurrentHashMap.newKeySet();
+        return t -> seen.add(keyExtractor.apply(t));
+    }
+
+
+    /**
+     * 因为手动确认消费,若消费失败,记录重刷
+     */
+    @Bean("handlerV1")
+    public ConsumerAwareListenerErrorHandler dealError() {
+        return (message, e, consumer) -> {
+            List<String> list = CompanyUtils.toMessage((List<ConsumerRecord>) message.getPayload());
+            MongoCollection<Document> col = mongoTemplate.getCollection("xf_ng_rt_pid_change_error");
+            for (String msg : list) {
+                Document document = new Document();
+                document.put("message", msg);
+                try {
+                    col.insertOne(document);
+                } catch (Exception e1) {
+                    log.error(e1.getMessage(), e1);
+                }
+            }
+            log.error("kafkaConsumerPersonIdUpdate error: {}", e.toString());
+            return null;
+        };
+    }
+
+}

+ 43 - 0
src/main/java/com/winhc/pojo/MergePerson.java

@@ -0,0 +1,43 @@
+package com.winhc.pojo;
+
+import com.alibaba.fastjson.annotation.JSONField;
+import com.winhc.utils.DateUtil;
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+import java.util.Date;
+
+/**
+ * @author π
+ * @Description:
+ * @date 2022/1/4 15:45
+ */
+@Data
+@AllArgsConstructor
+@NoArgsConstructor
+public class MergePerson {
+    private String new_human_pid;
+    private String new_human_name;
+    private String old_human_pid;
+    private String old_human_name;
+
+    @JSONField(serialize = false)
+    public String getId() {
+        return new_human_pid + old_human_pid;
+    }
+
+    public String getTime() {
+        return DateUtil.formatDate_YYYY_MM_DD_HH_MM_SS(new Date());
+    }
+
+    @Override
+    public String toString() {
+        return "MergePerson{" +
+                "new_human_pid='" + new_human_pid + '\'' +
+                ", new_human_name='" + new_human_name + '\'' +
+                ", old_human_pid='" + old_human_pid + '\'' +
+                ", old_human_name='" + old_human_name + '\'' +
+                '}';
+    }
+}

+ 13 - 0
src/main/java/com/winhc/service/PidToMongoService.java

@@ -0,0 +1,13 @@
+package com.winhc.service;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author π
+ * @Description:存入mongo
+ * @date 2021/11/19 15:06
+ */
+public interface PidToMongoService {
+    Integer save(List<Map<String,Object>> messages);
+}

+ 16 - 2
src/main/java/com/winhc/service/impl/PersonMergeV2Impl.java

@@ -1,16 +1,21 @@
 package com.winhc.service.impl;
 
 import com.winhc.common.enums.CompanyEnum;
+import com.winhc.config.ConfigConstant;
+import com.winhc.kafka.KafkaProduce;
+import com.winhc.pojo.MergePerson;
 import com.winhc.service.RelationService;
 import com.winhc.utils.CompanyUtils;
 import lombok.AllArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
 import org.neo4j.driver.Driver;
 import org.neo4j.driver.Session;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Service;
 
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -28,6 +33,10 @@ public class PersonMergeV2Impl implements RelationService {
     @Autowired
     @Qualifier("DriverV1")
     Driver driver;
+    @Autowired
+    KafkaProduce kafkaProduce;
+    @Autowired
+    ConfigConstant configConstant;
 
     @Override
     public String save(List<Map<String, Object>> batch_list) {
@@ -48,13 +57,18 @@ public class PersonMergeV2Impl implements RelationService {
                         "CALL apoc.merge.relationship.eager(first_node, TYPE(r), properties(r),{}, x,{}) YIELD rel\n" +
                         "SET first_node:" + CompanyUtils.getIncrPersonLabelV2("新增", CompanyEnum.SPLIT_HOUR) + "\n" +
                         "SET other_node:" + CompanyUtils.getIncrPersonLabelV2("删除", CompanyEnum.SPLIT_HOUR) + "\n" +
-                        "DELETE r";
+                        "DELETE r" + "\n" +
+                        "RETURN first_node.person_id as new_human_pid,first_node.name as new_human_name,other_node.person_id as old_human_pid,other_node.name as old_human_name" + "\n";
         log.info("consumer size: {}, cql:{}", batch_list.size(), cql);
-        String data = CompanyUtils.writeNeo4j(session, cql, new HashMap<String, Object>() {{
+        String data = CompanyUtils.writeNeo4j2(session, cql, new HashMap<String, Object>() {{
             put("batch_list", batch_list);
         }});
         session.close();
         log.info("class:{} | save size:{} | cost:{}", PersonMergeV2Impl.class.getSimpleName(), batch_list.size(), (System.currentTimeMillis() - start));
+        //发送变更人员记录
+        if (StringUtils.isNotBlank(data)) {
+            kafkaProduce.produce(configConstant.topic_pid_update_v1, data);
+        }
         return data;
     }
 }

+ 62 - 0
src/main/java/com/winhc/service/impl/PidToMongoServiceImpl.java

@@ -0,0 +1,62 @@
+package com.winhc.service.impl;
+
+import com.alibaba.fastjson.JSON;
+import com.mongodb.BulkWriteError;
+import com.mongodb.BulkWriteException;
+import com.mongodb.bulk.BulkWriteResult;
+import com.mongodb.client.MongoCollection;
+import com.mongodb.client.model.*;
+import com.winhc.common.constant.Base;
+import com.winhc.service.PidToMongoService;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.bson.Document;
+import org.bson.conversions.Bson;
+import org.springframework.data.mongodb.core.MongoTemplate;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+import java.util.stream.Collectors;
+
+/**
+ * @author π
+ * @Description:案件-新增节点
+ * @date 2021/5/17 10:03
+ */
+@Slf4j
+@Service("pidToMongoServiceImpl")
+@AllArgsConstructor
+public class PidToMongoServiceImpl implements PidToMongoService {
+
+    private final MongoTemplate mongoTemplate;
+
+
+    @Override
+    public Integer save(List<Map<String, Object>> ids) {
+        BulkWriteResult bulkWriteResult = null;
+        if (ids.size() > 0) {
+            MongoCollection<Document> pid = mongoTemplate.getCollection(Base.PID_WAIT_MERGE_V9);
+//            List<Document> documents = messages.stream().map(Document::new).collect(Collectors.toList());
+//            pid.insertMany(documents, new InsertManyOptions().ordered(false));
+            List<WriteModel<Document>> updateDocuments = new ArrayList<>();
+            ids.forEach(m -> {
+                Document filterDocument = new Document("_id", m.get("_id"));
+                Document updateDocument = new Document("$set", new Document(m));
+                UpdateOptions updateOptions = new UpdateOptions().upsert(true).bypassDocumentValidation(true);
+                updateDocuments.add(new UpdateOneModel<>(filterDocument, updateDocument, updateOptions));
+            });
+            try {
+                bulkWriteResult = pid.bulkWrite(updateDocuments, new BulkWriteOptions().ordered(false).bypassDocumentValidation(true));
+            } catch (BulkWriteException e) {
+                List<Map<String, Object>> collect = e.getWriteErrors().stream()
+                        .map(b -> ids.get(b.getIndex()))
+                        .collect(Collectors.toList());
+                log.error("upset error records : {}", JSON.toJSONString(collect));
+            }
+        }
+        return bulkWriteResult != null ? bulkWriteResult.getModifiedCount() : 0;
+    }
+}

+ 63 - 0
src/main/java/com/winhc/task/AsynMergePersonTask.java

@@ -0,0 +1,63 @@
+package com.winhc.task;
+
+import com.alibaba.fastjson.JSON;
+import com.mongodb.client.FindIterable;
+import com.mongodb.client.MongoCollection;
+import com.mongodb.client.model.Filters;
+import com.winhc.common.constant.Base;
+import com.winhc.config.ConfigConstant;
+import com.winhc.kafka.KafkaProduce;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.bson.Document;
+import org.bson.conversions.Bson;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.mongodb.core.MongoTemplate;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+import java.util.ArrayList;
+import java.util.List;
+import static com.mongodb.client.model.Filters.in;
+import static com.winhc.utils.DateUtil.getMinuteTime;
+
+/**
+ * @author π
+ * @Description:拉取合并人员pid
+ * @date 2021/6/22 17:07
+ */
+
+@Component
+@Slf4j
+@EnableScheduling
+@AllArgsConstructor
+public class AsynMergePersonTask {
+
+    private final MongoTemplate mongoTemplate;
+    private final KafkaProduce kafkaProduce;
+    @Autowired
+    ConfigConstant configConstant;
+
+    @Scheduled(cron = "*/10 * * * * ?")
+    //@Scheduled(cron = "0 /2 * * * ? ")
+    public void start() throws InterruptedException {
+        MongoCollection<Document> collection = mongoTemplate.getCollection(Base.PID_WAIT_MERGE_V9);
+        while (true) {
+            //1.查询mongo 2分钟之前数据
+            Bson query = Filters.and(Filters.lte("update_time", getMinuteTime(-2)));
+            FindIterable<Document> documents = collection.find(query).batchSize(200).noCursorTimeout(true);
+            List<String> ids = new ArrayList<>();
+            for (Document d : documents) {
+                ids.add(d.get("_id").toString());
+                kafkaProduce.produce(configConstant.topic_node_relation_union, JSON.toJSONString(d.get("data")));
+            }
+            //2.成功后删除
+            if (!ids.isEmpty()) {
+                long deleteResult = collection.deleteMany(in("_id", ids)).getDeletedCount();
+                log.info("deleted size : {} ,ids : {}", deleteResult, ids);
+            } else {
+                break;
+            }
+        }
+    }
+}

+ 42 - 0
src/main/java/com/winhc/utils/CompanyUtils.java

@@ -2,19 +2,24 @@ package com.winhc.utils;
 
 import cn.hutool.core.lang.Tuple;
 import cn.hutool.json.JSONUtil;
+import com.alibaba.fastjson.JSON;
 import com.alibaba.fastjson.JSONArray;
 import com.alibaba.fastjson.JSONObject;
 import com.alibaba.fastjson.serializer.SerializerFeature;
 import com.google.common.collect.ImmutableMap;
 import com.winhc.common.enums.CompanyEnum;
 import com.winhc.db.mongodb.dataobject.NodeRelationError;
+import com.winhc.pojo.MergePerson;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.neo4j.driver.Record;
 import org.neo4j.driver.Result;
 import org.neo4j.driver.Session;
 
 import java.util.*;
 import java.util.stream.Collectors;
 
+import static com.winhc.utils.DateUtil.formatDate_YYYY_MM_DD_HH_MM_SS;
+
 /**
  * @author π
  * @Description:
@@ -90,6 +95,26 @@ public class CompanyUtils {
                 }).collect(Collectors.toList());
     }
 
+    public static List<Map<String, Object>> getMergeIds2(List<Map<String, Object>> list) {
+        return list.stream()
+                .filter(r -> r.getOrDefault("start_id", "0").toString().length() == 33)
+                .collect(Collectors.toMap(t -> t.getOrDefault("start_id", "0").toString(), t -> t, (n, o) -> n))
+                .values().stream().map(x -> {
+                    Map<String, Object> m = x;
+                    String person_id = m.get("start_id").toString();
+                    String person_name = m.get("start_name").toString();
+                    return ImmutableMap.<String, Object>of(
+                            "_id", person_id,
+                            "update_time", formatDate_YYYY_MM_DD_HH_MM_SS(),
+                            "data", ImmutableMap.of(
+                                    "person_id", person_id
+                                    , "name", person_name
+                                    , "topic_type", "800"
+                            )
+                    );
+                }).collect(Collectors.toList());
+    }
+
     public static List<NodeRelationError> toMessage(List<ConsumerRecord> records, String errorMessage) {
         return records.stream().filter(r -> (r != null && r.value() != null)).map(r -> {
             String consumerMessage = r.value().toString();
@@ -122,6 +147,18 @@ public class CompanyUtils {
         return data;
     }
 
+    public static String writeNeo4j2(Session session, String cql, HashMap<String, Object> parameters) {
+        List<Record> dataList = session.writeTransaction(tx -> {
+            Result result = tx.run(cql, parameters);
+            return result.list();
+        });
+        Map<String, MergePerson> data = dataList.stream()
+                .map(record -> JSON.parseObject(JSON.toJSONString(record.asMap()), MergePerson.class))
+                .collect(Collectors.toMap(MergePerson::getId, t -> t, (n, o) -> n));
+        Collection<MergePerson> values = data.values();
+        return values.size() > 0 ? JSONObject.toJSONString(data.values()) : null;
+    }
+
     public static String getIncrPersonLabel() {
         return CompanyEnum.Lable.新增.code + DateUtil.getDateBefore(-1).replace("-", "");
     }
@@ -141,6 +178,11 @@ public class CompanyUtils {
         return label + DateUtil.getDateBefore(gap).replace("-", "");
     }
 
+    public static Boolean isWindows() {
+        return System.getProperty("os.name").contains("Windows");
+    }
+
+
     public static void main(String[] args) {
         System.out.println(getIncrPersonLabelV2("新增", 10));
         System.out.println(getIncrPersonLabelV2("新增"));

+ 19 - 4
src/main/java/com/winhc/utils/DateUtil.java

@@ -176,6 +176,10 @@ public class DateUtil {
         return v;
     }
 
+    public static final String formatDate_YYYY_MM_DD_HH_MM_SS() {
+        return formatDate_YYYY_MM_DD_HH_MM_SS(new Date());
+    }
+
     /**
      * 日期字符串之间的类型转换.
      * <p>
@@ -383,9 +387,6 @@ public class DateUtil {
         return calendar.getTime();
     }
 
-
-  
-  
     /**
      * 计算时间差值,单位为天 去除时间后的计算结果+1,例如20180410
      * 
@@ -493,8 +494,22 @@ public class DateUtil {
         return formatDate(cal.getTime(), FORMAT_YYYY_MM_DD);
     }
 
+    /**
+     * 获取分钟前后数据
+     * @param minute
+     * @return
+     */
+    public static String getMinuteTime(int minute) {
+        Calendar beforeTime = Calendar.getInstance();
+        beforeTime.add(Calendar.MINUTE, minute);
+        return new SimpleDateFormat(FORMAT_YYYY_MM_DD_HH_MM_SS).format(beforeTime.getTime());
+    }
+
+
+
     public static void main(String[] args) throws Exception {
-        System.out.println(formatDate_YYYYMMDD(new Date()));
+        System.out.println(getMinuteTime(-2));
+        System.out.println(formatDate_YYYY_MM_DD_HH_MM_SS());
     }
 
 }

+ 43 - 0
src/main/java/com/winhc/utils/ESUtils.java

@@ -0,0 +1,43 @@
+package com.winhc.utils;
+
+/**
+ * @author π
+ * @Description:
+ * @date 2022/1/10 14:13
+ */
+public class ESUtils {
+    public static String updateHumanMappingId(String old_human_pid, String new_human_pid) {
+        String dsl = "{\n" +
+                "  \"query\": {\n" +
+                "    \"term\": {\n" +
+                "      \"human_pid\": {\n" +
+                "        \"value\": \"" + old_human_pid + "\"\n" +
+                "      }\n" +
+                "    }\n" +
+                "  },\n" +
+                "  \"script\": {\n" +
+                "    \"inline\": \"ctx._source['human_pid'] = params.new_human_pid \",\n" +
+                "    \"params\": {\n" +
+                "      \"new_human_pid\": \"" + new_human_pid + "\"\n" +
+                "    }\n" +
+                "  }\n" +
+                "}";
+        return dsl;
+    }
+
+    public static String updateBossId(String old_human_pid) {
+        String dsl = "{\n" +
+                "  \"query\": {\n" +
+                "    \"term\": {\n" +
+                "      \"_id\": {\n" +
+                "        \"value\": \"" + old_human_pid + "\"\n" +
+                "      }\n" +
+                "    }\n" +
+                "  },\n" +
+                "  \"script\": {\n" +
+                "    \"inline\": \"ctx._source['deleted'] = '9' \"\n" +
+                "  }\n" +
+                "}";
+        return dsl;
+    }
+}

+ 76 - 3
src/main/resources/application-dev.properties

@@ -10,13 +10,14 @@ spring.data.neo4j.username.v1=neo4j
 spring.data.neo4j.password.v1=neo4j168
 #spring.data.neo4j.uri.v1=bolt://139.196.165.100:7687
 #spring.data.neo4j.uri.v1=bolt://192.168.2.57:7687
-spring.data.neo4j.uri.v1=bolt://139.224.197.164:7687
+#spring.data.neo4j.uri.v1=bolt://139.224.197.164:7687
+spring.data.neo4j.uri.v1=bolt://127.0.0.1:7687
 
 #Neo4j配置(第二台机器)
 spring.data.neo4j.username.v2=neo4j
 spring.data.neo4j.password.v2=neo4j168
-spring.data.neo4j.uri.v2=bolt://139.224.197.164:7687
-#spring.data.neo4j.uri.v2=bolt://127.0.0.1:7687
+#spring.data.neo4j.uri.v2=bolt://139.224.197.164:7687
+spring.data.neo4j.uri.v2=bolt://127.0.0.1:7687
 
 #数据库uri地址
 #spring.data.neo4j.uri=http://10.29.26.76:7474
@@ -41,6 +42,9 @@ scheduling.enabled = false
 spring.kafka.bootstrap-servers=47.101.221.131:9092
 #spring.kafka.bootstrap-servers=192.168.4.239:9092,192.168.4.241:9092,192.168.4.240:9092
 #topic
+spring.kafka.topic_pid_update_v1=test5
+spring.kafka.topic_pid_update_v2=test6
+
 spring.kafka.topic_node_relation_union=test3
 spring.kafka.topic_person_companys=inc_person_companys_dev
 spring.kafka.topic_person_merge=inc_person_merge_dev
@@ -77,3 +81,72 @@ spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.S
 
 #mongo
 spring.data.mongodb.uri=mongodb://itslaw:itslaw_168@dds-uf6ff5dfd9aef3641601-pub.mongodb.rds.aliyuncs.com:3717,dds-uf6ff5dfd9aef3642555-pub.mongodb.rds.aliyuncs.com:3717/itslaw?replicaSet=mgset-6501997
+
+
+
+#bbos 配置
+spring.elasticsearch.bboss.es6.name = es6
+##es6
+spring.elasticsearch.bboss.es6.elasticUser=elastic
+spring.elasticsearch.bboss.es6.elasticPassword=elastic_168
+
+spring.elasticsearch.bboss.es6.elasticsearch.rest.hostNames=es-cn-oew22t8bw002iferu.public.elasticsearch.aliyuncs.com:9200
+#spring.elasticsearch.bboss.es6.elasticsearch.rest.hostNames=es-cn-oew22t8bw002iferu.elasticsearch.aliyuncs.com:9200
+spring.elasticsearch.bboss.es6.elasticsearch.dateFormat=yyyy.MM.dd
+spring.elasticsearch.bboss.es6.elasticsearch.timeZone=Asia/Shanghai
+spring.elasticsearch.bboss.es6.elasticsearch.ttl=2d
+#在控制台输出脚本调试开关showTemplate,false关闭,true打开,同时log4j至少是info级别
+spring.elasticsearch.bboss.es6.elasticsearch.showTemplate=true
+spring.elasticsearch.bboss.es6.elasticsearch.discoverHost=false
+
+##es6连接池配置
+spring.elasticsearch.bboss.es6.http.timeoutConnection = 50000
+spring.elasticsearch.bboss.es6.http.timeoutSocket = 50000
+spring.elasticsearch.bboss.es6.http.connectionRequestTimeout=50000
+spring.elasticsearch.bboss.es6.http.retryTime = 1
+spring.elasticsearch.bboss.es6.http.maxLineLength = -1
+spring.elasticsearch.bboss.es6.http.maxHeaderCount = 200
+spring.elasticsearch.bboss.es6.http.maxTotal = 400
+spring.elasticsearch.bboss.es6.http.defaultMaxPerRoute = 200
+spring.elasticsearch.bboss.es6.http.keystore =
+spring.elasticsearch.bboss.es6.http.keyPassword =
+# ssl 主机名称校验,是否采用es6配置,
+# 如果指定为es6,就采用DefaultHostnameVerifier,否则采用 SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER
+spring.elasticsearch.bboss.es6.http.hostnameVerifier =
+
+
+
+
+##es5集群配置
+spring.elasticsearch.bboss.es5.name = es5
+spring.elasticsearch.bboss.es5.elasticUser=elastic
+spring.elasticsearch.bboss.es5.elasticPassword=elastic_168
+
+spring.elasticsearch.bboss.es5.elasticsearch.rest.hostNames=es-cn-0pp0r32zf000ipovd.public.elasticsearch.aliyuncs.com:9200
+#spring.elasticsearch.bboss.es5.elasticsearch.rest.hostNames=es-cn-0pp0r32zf000ipovd.elasticsearch.aliyuncs.com:9200
+
+#spring.elasticsearch.bboss.default.elasticsearch.rest.hostNames=10.180.211.27:9280,10.180.211.27:9281,10.180.211.27:9282
+spring.elasticsearch.bboss.es5.elasticsearch.dateFormat=yyyy.MM.dd
+spring.elasticsearch.bboss.es5.elasticsearch.timeZone=Asia/Shanghai
+spring.elasticsearch.bboss.es5.elasticsearch.ttl=2d
+#在控制台输出脚本调试开关showTemplate,false关闭,true打开,同时log4j至少是info级别
+spring.elasticsearch.bboss.es5.elasticsearch.showTemplate=true
+spring.elasticsearch.bboss.es5.elasticsearch.discoverHost=false
+
+##es5集群对应的连接池配置
+spring.elasticsearch.bboss.es5.http.timeoutConnection = 50000
+spring.elasticsearch.bboss.es5.http.timeoutSocket = 50000
+spring.elasticsearch.bboss.es5.http.connectionRequestTimeout=50000
+spring.elasticsearch.bboss.es5.http.retryTime = 1
+spring.elasticsearch.bboss.es5.http.maxLineLength = -1
+spring.elasticsearch.bboss.es5.http.maxHeaderCount = 200
+spring.elasticsearch.bboss.es5.http.maxTotal = 400
+spring.elasticsearch.bboss.es5.http.defaultMaxPerRoute = 200
+# https证书配置
+spring.elasticsearch.bboss.es5.http.keystore =
+spring.elasticsearch.bboss.es5.http.keyPassword =
+# ssl 主机名称校验,是否采用default配置,
+# 如果指定为default,就采用DefaultHostnameVerifier,否则采用 SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER
+spring.elasticsearch.bboss.es5.http.hostnameVerifier =
+# dsl配置文件热加载扫描时间间隔,毫秒为单位,默认5秒扫描一次,<= 0时关闭扫描机制
+spring.elasticsearch.bboss.dslfile.refreshInterval = -1

+ 71 - 0
src/main/resources/application-prd.properties

@@ -37,6 +37,9 @@ spring.kafka.bootstrap-servers=192.168.4.239:9092,192.168.4.241:9092,192.168.4.2
 
 
 #topic
+spring.kafka.topic_pid_update_v1=ng_rt_pid_change_v1
+spring.kafka.topic_pid_update_v2=ng_rt_pid_change_v2
+
 spring.kafka.topic_node_relation_union=ng_graph_pre
 spring.kafka.topic_person_companys=inc_person_companys
 spring.kafka.topic_person_merge=inc_person_merge
@@ -71,3 +74,71 @@ spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.S
 #mongo
 #spring.data.mongodb.uri=mongodb://itslaw:itslaw_168@dds-uf6ff5dfd9aef3641601-pub.mongodb.rds.aliyuncs.com:3717,dds-uf6ff5dfd9aef3642555-pub.mongodb.rds.aliyuncs.com:3717/itslaw?replicaSet=mgset-6501997&maxIdleTimeMS=3000
 spring.data.mongodb.uri=mongodb://itslaw:itslaw_168@dds-uf6ff5dfd9aef3641.mongodb.rds.aliyuncs.com:3717,dds-uf6ff5dfd9aef3642.mongodb.rds.aliyuncs.com:3717/itslaw?replicaSet=mgset-6501997
+
+
+#bbos 配置
+spring.elasticsearch.bboss.es6.name = es6
+##es6
+spring.elasticsearch.bboss.es6.elasticUser=elastic
+spring.elasticsearch.bboss.es6.elasticPassword=elastic_168
+
+#spring.elasticsearch.bboss.es6.elasticsearch.rest.hostNames=es-cn-oew22t8bw002iferu.public.elasticsearch.aliyuncs.com:9200
+spring.elasticsearch.bboss.es6.elasticsearch.rest.hostNames=es-cn-oew22t8bw002iferu.elasticsearch.aliyuncs.com:9200
+spring.elasticsearch.bboss.es6.elasticsearch.dateFormat=yyyy.MM.dd
+spring.elasticsearch.bboss.es6.elasticsearch.timeZone=Asia/Shanghai
+spring.elasticsearch.bboss.es6.elasticsearch.ttl=2d
+#在控制台输出脚本调试开关showTemplate,false关闭,true打开,同时log4j至少是info级别
+spring.elasticsearch.bboss.es6.elasticsearch.showTemplate=true
+spring.elasticsearch.bboss.es6.elasticsearch.discoverHost=false
+
+##es6连接池配置
+spring.elasticsearch.bboss.es6.http.timeoutConnection = 50000
+spring.elasticsearch.bboss.es6.http.timeoutSocket = 50000
+spring.elasticsearch.bboss.es6.http.connectionRequestTimeout=50000
+spring.elasticsearch.bboss.es6.http.retryTime = 1
+spring.elasticsearch.bboss.es6.http.maxLineLength = -1
+spring.elasticsearch.bboss.es6.http.maxHeaderCount = 200
+spring.elasticsearch.bboss.es6.http.maxTotal = 400
+spring.elasticsearch.bboss.es6.http.defaultMaxPerRoute = 200
+spring.elasticsearch.bboss.es6.http.keystore =
+spring.elasticsearch.bboss.es6.http.keyPassword =
+# ssl 主机名称校验,是否采用es6配置,
+# 如果指定为es6,就采用DefaultHostnameVerifier,否则采用 SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER
+spring.elasticsearch.bboss.es6.http.hostnameVerifier =
+
+
+
+
+##es5集群配置
+spring.elasticsearch.bboss.es5.name = es5
+spring.elasticsearch.bboss.es5.elasticUser=elastic
+spring.elasticsearch.bboss.es5.elasticPassword=elastic_168
+
+#spring.elasticsearch.bboss.es5.elasticsearch.rest.hostNames=es-cn-0pp0r32zf000ipovd.public.elasticsearch.aliyuncs.com:9200
+spring.elasticsearch.bboss.es5.elasticsearch.rest.hostNames=es-cn-0pp0r32zf000ipovd.elasticsearch.aliyuncs.com:9200
+
+#spring.elasticsearch.bboss.default.elasticsearch.rest.hostNames=10.180.211.27:9280,10.180.211.27:9281,10.180.211.27:9282
+spring.elasticsearch.bboss.es5.elasticsearch.dateFormat=yyyy.MM.dd
+spring.elasticsearch.bboss.es5.elasticsearch.timeZone=Asia/Shanghai
+spring.elasticsearch.bboss.es5.elasticsearch.ttl=2d
+#在控制台输出脚本调试开关showTemplate,false关闭,true打开,同时log4j至少是info级别
+spring.elasticsearch.bboss.es5.elasticsearch.showTemplate=true
+spring.elasticsearch.bboss.es5.elasticsearch.discoverHost=false
+
+##es5集群对应的连接池配置
+spring.elasticsearch.bboss.es5.http.timeoutConnection = 50000
+spring.elasticsearch.bboss.es5.http.timeoutSocket = 50000
+spring.elasticsearch.bboss.es5.http.connectionRequestTimeout=50000
+spring.elasticsearch.bboss.es5.http.retryTime = 1
+spring.elasticsearch.bboss.es5.http.maxLineLength = -1
+spring.elasticsearch.bboss.es5.http.maxHeaderCount = 200
+spring.elasticsearch.bboss.es5.http.maxTotal = 400
+spring.elasticsearch.bboss.es5.http.defaultMaxPerRoute = 200
+# https证书配置
+spring.elasticsearch.bboss.es5.http.keystore =
+spring.elasticsearch.bboss.es5.http.keyPassword =
+# ssl 主机名称校验,是否采用default配置,
+# 如果指定为default,就采用DefaultHostnameVerifier,否则采用 SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER
+spring.elasticsearch.bboss.es5.http.hostnameVerifier =
+# dsl配置文件热加载扫描时间间隔,毫秒为单位,默认5秒扫描一次,<= 0时关闭扫描机制
+spring.elasticsearch.bboss.dslfile.refreshInterval = -1

+ 35 - 34
src/main/resources/logback-spring.xml

@@ -1,41 +1,42 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <configuration>
-  <!-- 控制台打印日志的相关配置 --> 
-  <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> 
-    <!-- 日志格式 -->
-    <encoder>
-        <pattern>%d{yyyy-MM-dd HH:mm:ss} [%level] - %m%n</pattern>
-    </encoder>
-    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-         <level>info</level>
-     </filter>
+    <!-- 控制台打印日志的相关配置 -->
+    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+        <!-- 日志格式 -->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss} [%level] - %m%n</pattern>
+        </encoder>
+        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+            <level>info</level>
+        </filter>
 
-  </appender>
+    </appender>
 
-  <!-- 文件保存日志的相关配置 --> 
-  <appender name="ERROR-OUT" class="ch.qos.logback.core.rolling.RollingFileAppender">
-      <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
-          <level>info</level>
-      </filter>
-     <!-- 保存日志文件的路径 -->
-    <file>logs/info.log</file>
-    <!-- 日志格式 -->
-    <encoder>
-        <pattern>%d{yyyy-MM-dd HH:mm:ss} [%level] [%class:%line] - %m%n</pattern>
-    </encoder>
+    <!-- 文件保存日志的相关配置 -->
+    <appender name="ERROR-OUT" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+            <level>info</level>
+        </filter>
+        <!-- 保存日志文件的路径 -->
+        <file>logs/info.log</file>
+        <!-- 日志格式 -->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss} [%level] [%class:%line] - %m%n</pattern>
+        </encoder>
 
-    <!-- 循环政策:基于时间创建日志文件 -->
-    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-      <!-- 日志文件名格式 -->
-      <fileNamePattern>logs/info.%d{yyyy-MM-dd}.log</fileNamePattern>
-      <!-- 最大保存时间:30天-->
-      <maxHistory>30</maxHistory>
-    </rollingPolicy>
-  </appender>
+        <!-- 循环政策:基于时间创建日志文件 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 日志文件名格式 -->
+            <fileNamePattern>logs/info.%d{yyyy-MM-dd}.log</fileNamePattern>
+            <!-- 最大保存时间:30天-->
+            <maxHistory>30</maxHistory>
+        </rollingPolicy>
+    </appender>
 
-  <!-- 基于dubug处理日志:具体控制台或者文件对日志级别的处理还要看所在appender配置的filter,如果没有配置filter,则使用root配置 -->
-  <root level="debug">
-    <appender-ref ref="STDOUT" />
-    <appender-ref ref="ERROR-OUT" />
-  </root>
+    <!-- 基于dubug处理日志:具体控制台或者文件对日志级别的处理还要看所在appender配置的filter,如果没有配置filter,则使用root配置 -->
+    <root level="debug">
+        <appender-ref ref="STDOUT"/>
+        <appender-ref ref="ERROR-OUT"/>
+    </root>
+    <logger name="org.mongodb.driver" additivity="false"/>
 </configuration>