xufei 4 rokov pred
rodič
commit
23019aa02b

+ 45 - 24
src/test/java/com/winhc/test/TestCreateNode.java

@@ -1,5 +1,12 @@
 package com.winhc.test;
 
+import cn.hutool.core.io.FileUtil;
+import cn.hutool.core.io.file.FileReader;
+import cn.hutool.core.lang.Console;
+import cn.hutool.core.text.csv.CsvData;
+import cn.hutool.core.text.csv.CsvReader;
+import cn.hutool.core.text.csv.CsvRow;
+import cn.hutool.core.text.csv.CsvUtil;
 import com.alibaba.fastjson.JSON;
 import com.winhc.dto.CompanyQueryCondition;
 import com.winhc.entity.Company;
@@ -18,10 +25,7 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.test.context.junit4.SpringRunner;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import static org.neo4j.driver.Values.parameters;
 
@@ -265,10 +269,10 @@ public class TestCreateNode {
             m1.put("personId", "personId" + i);
             m1.put("companyName", "companyName_7_" + i);
             m1.put("personName", "personName_7_" + i);
-            m1.put("relType", "relType_7_" + i+2);
+            m1.put("relType", "relType_7_" + i + 2);
             m1.put("status", "0");
-            m1.put("percent", i * 0.5+"");
-            m1.put("rid", i * 3+"");
+            m1.put("percent", i * 0.5 + "");
+            m1.put("rid", i * 3 + "");
             String msg = JSON.toJSONString(m1);
             System.out.println(msg);
             kafkaProduce.produce(topic, msg);
@@ -282,8 +286,8 @@ public class TestCreateNode {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("start_id", "start_id" + i);
             m1.put("end_id", "end_id" + i);
-            m1.put("startName", "startName" + i+1);
-            m1.put("endName", "endName" + i+1);
+            m1.put("startName", "startName" + i + 1);
+            m1.put("endName", "endName" + i + 1);
             m1.put("deleted", 3);
             m1.put("percent", i * 0.1);
             m1.put("rid", i * 3);
@@ -300,7 +304,7 @@ public class TestCreateNode {
         for (int i = 1; i <= 1000; i++) {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("id", "companyId" + i);
-            m1.put("name", "name" + i+1);
+            m1.put("name", "name" + i + 1);
             m1.put("deleted", 3);
             m1.put("topic_type", "1");
             String msg = JSON.toJSONString(m1);
@@ -313,12 +317,12 @@ public class TestCreateNode {
     public void sendKafkaHolderV1() {
         //String topic = "inc_holder_relation_v1";
         String topic = "inc_node_relation_union";
-        for (int i = 1; i <= 1000; i++) {
+        for (int i = 1; i <= 10; i++) {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("start_id", "start_id" + i);
             m1.put("end_id", "end_id" + i);
-            m1.put("start_name", "startName" + i+1);
-            m1.put("end_name", "endName" + i+1);
+            m1.put("start_name", "startName" + i + 1);
+            m1.put("end_name", "endName" + i + 1);
             m1.put("deleted", 3);
             m1.put("percent", i * 0.1);
             m1.put("topic_type", "2");
@@ -336,8 +340,8 @@ public class TestCreateNode {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("start_id", "start_id" + i);
             m1.put("end_id", "end_id" + i);
-            m1.put("start_name", "startName" + i+1);
-            m1.put("end_name", "endName" + i+1);
+            m1.put("start_name", "startName" + i + 1);
+            m1.put("end_name", "endName" + i + 1);
             m1.put("deleted", 3);
             m1.put("percent", i * 0.1);
             m1.put("topic_type", "3");
@@ -348,17 +352,16 @@ public class TestCreateNode {
     }
 
 
-
     @Test
     public void sendKafkaLegalEntityV1() {
         //String topic = "inc_legal_entity_relation_v1";
         String topic = "inc_node_relation_union";
-        for (int i = 1000; i <= 2000; i++) {
+        for (int i = 20000; i <= 200000; i++) {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("start_id", "start_id" + i);
             m1.put("end_id", "end_id" + i);
-            m1.put("start_name", "startName" + i+1);
-            m1.put("end_name", "endName" + i+1);
+            m1.put("start_name", "startName" + i + 1);
+            m1.put("end_name", "endName" + i + 1);
             m1.put("deleted", 0);
             m1.put("topic_type", "4");
             String msg = JSON.toJSONString(m1);
@@ -375,8 +378,8 @@ public class TestCreateNode {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("start_id", "start_id" + i);
             m1.put("end_id", "end_id" + i);
-            m1.put("start_name", "startName" + i+1);
-            m1.put("end_name", "endName" + i+1);
+            m1.put("start_name", "startName" + i + 1);
+            m1.put("end_name", "endName" + i + 1);
             m1.put("deleted", 0);
             m1.put("topic_type", "5");
             String msg = JSON.toJSONString(m1);
@@ -393,10 +396,10 @@ public class TestCreateNode {
             HashMap<String, Object> m1 = new HashMap<>();
             m1.put("start_id", "start_id" + i);
             m1.put("end_id", "end_id" + i);
-            m1.put("start_name", "startName" + i+1);
-            m1.put("end_name", "endName" + i+1);
+            m1.put("start_name", "startName" + i + 1);
+            m1.put("end_name", "endName" + i + 1);
             m1.put("deleted", 0);
-            m1.put("staff_type", "董事"+i);
+            m1.put("staff_type", "董事" + i);
             m1.put("topic_type", "6");
             String msg = JSON.toJSONString(m1);
             System.out.println(msg);
@@ -404,5 +407,23 @@ public class TestCreateNode {
         }
     }
 
+    @Test
+    public void sendKafkStaff1() {
+        String topic = "inc_node_relation_union";
+        String s = "C:\\Users\\batmr\\Downloads\\8ec932c2-eb74-42c0-bfb0-4e2779383af7.csv";
+        CsvReader reader = CsvUtil.getReader();
+        //从文件中读取CSV数据
+        CsvData data = reader.read(FileUtil.file(s));
+        List<CsvRow> rows = data.getRows();
+        int i = 1;
+        for (CsvRow csvRow : rows) {
+            ++i;
+            if (i <= 2) continue;
+            String mesaage = csvRow.getRawList().get(0).replaceAll("\"\"", "\"");
+            kafkaProduce.produce(topic, mesaage);
+        }
+
+    }
+
 
 }

+ 36 - 0
src/test/java/com/winhc/test/TestMongo.java

@@ -0,0 +1,36 @@
+package com.winhc.test;
+
+import com.winhc.db.mongodb.dataobject.NodeRelationError;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.junit4.SpringRunner;
+import java.util.ArrayList;
+import java.util.Date;
+
+/**
+ * @author π
+ * @Description:
+ * @date 2021/1/15 16:43
+ */
+@RunWith(SpringRunner.class)
+@SpringBootTest
+public class TestMongo {
+    //@Autowired
+    //NodeRelatonErrorRepository nodeRelatonErrorRepository;
+
+    @Test
+    public void saveData() {
+        ArrayList<NodeRelationError> list = new ArrayList<>();
+        NodeRelationError n1 = new NodeRelationError();
+        n1.setCreateTime(new Date());
+        n1.setUpdateTime(new Date());
+        n1.setConsumerMessage("{\"start_id\":\"ccdb46273f28d95206f90ea8e1705457\",\"start_name\":\"上饶市广信区田田圈病虫害防治专业合作社\",\"end_id\":\"005d1d5608f046d3b57f725730354494\",\"end_name\":\"\",\"percent\":0.2333,\"deleted\":0,\"holder_type\":2}");
+        n1.setErrorMessage("zero error");
+        n1.setStatus(1);
+        n1.setTopicType("5");
+        list.add(n1);
+        //nodeRelatonErrorRepository.saveAll(list);
+    }
+}