|
@@ -0,0 +1,408 @@
|
|
|
+package com.winhc.test;
|
|
|
+
|
|
|
+import com.alibaba.fastjson.JSON;
|
|
|
+import com.winhc.dto.CompanyQueryCondition;
|
|
|
+import com.winhc.entity.Company;
|
|
|
+import com.winhc.entity.Person;
|
|
|
+import com.winhc.kafka.KafkaProduce;
|
|
|
+import com.winhc.repository.CompanyRepository;
|
|
|
+import com.winhc.repository.PersonRepository;
|
|
|
+import com.winhc.service.CompanyRelationService;
|
|
|
+import org.apache.commons.logging.Log;
|
|
|
+import org.apache.commons.logging.LogFactory;
|
|
|
+import org.junit.Test;
|
|
|
+import org.junit.runner.RunWith;
|
|
|
+import org.neo4j.driver.*;
|
|
|
+import org.neo4j.ogm.model.Edge;
|
|
|
+import org.springframework.beans.factory.annotation.Autowired;
|
|
|
+import org.springframework.boot.test.context.SpringBootTest;
|
|
|
+import org.springframework.test.context.junit4.SpringRunner;
|
|
|
+
|
|
|
+import java.util.ArrayList;
|
|
|
+import java.util.HashMap;
|
|
|
+import java.util.List;
|
|
|
+import java.util.Map;
|
|
|
+
|
|
|
+import static org.neo4j.driver.Values.parameters;
|
|
|
+
|
|
|
+/**
|
|
|
+ * @author π
|
|
|
+ * @Description:
|
|
|
+ * @date 2020/12/25 16:24
|
|
|
+ */
|
|
|
+@RunWith(SpringRunner.class)
|
|
|
+@SpringBootTest
|
|
|
+public class TestCreateNode {
|
|
|
+ Log log = LogFactory.getLog(TestCreateNode.class);
|
|
|
+ @Autowired
|
|
|
+ CompanyRepository companyRepository;
|
|
|
+ @Autowired
|
|
|
+ PersonRepository personRepository;
|
|
|
+ @Autowired
|
|
|
+ CompanyRelationService CompanyRelationService;
|
|
|
+ @Autowired
|
|
|
+ Driver driver;
|
|
|
+ @Autowired
|
|
|
+ KafkaProduce kafkaProduce;
|
|
|
+
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void pushData() {
|
|
|
+ log.info("start push save !");
|
|
|
+ long start = System.currentTimeMillis();
|
|
|
+
|
|
|
+ Company c1 = new Company();
|
|
|
+ c1.setCompanyId("1");
|
|
|
+ c1.setName("小米");
|
|
|
+ companyRepository.save(c1);
|
|
|
+ Company c2 = new Company();
|
|
|
+ c2.setCompanyId("2");
|
|
|
+ c2.setName("华为");
|
|
|
+ companyRepository.save(c2);
|
|
|
+ Company c3 = new Company();
|
|
|
+ c3.setCompanyId("3");
|
|
|
+ c3.setName("赢火虫");
|
|
|
+ companyRepository.save(c3);
|
|
|
+
|
|
|
+ Person p1 = new Person();
|
|
|
+ p1.setName("张三");
|
|
|
+ p1.setPersonId("4");
|
|
|
+ personRepository.save(p1);
|
|
|
+ Person p2 = new Person();
|
|
|
+ p2.setName("李四");
|
|
|
+ p2.setPersonId("5");
|
|
|
+ personRepository.save(p2);
|
|
|
+ Person p3 = new Person();
|
|
|
+ p3.setName("张三");
|
|
|
+ p3.setPersonId("6");
|
|
|
+ personRepository.save(p3);
|
|
|
+ System.out.println(System.currentTimeMillis() - start);
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void pushDataRelation() {
|
|
|
+ CompanyQueryCondition c = new CompanyQueryCondition();
|
|
|
+ c.setStartId("1");
|
|
|
+ c.setEndId("2");
|
|
|
+ c.setStatus("0");
|
|
|
+ c.setLabel("TOUZI");
|
|
|
+ c.setFlag("1");
|
|
|
+ CompanyRelationService.createCompanyRelation2(c);
|
|
|
+ CompanyQueryCondition c2 = new CompanyQueryCondition();
|
|
|
+ c2.setStartId("2");
|
|
|
+ c2.setEndId("3");
|
|
|
+ c2.setStatus("0");
|
|
|
+ c2.setLabel("TOUZI");
|
|
|
+ c2.setFlag("1");
|
|
|
+ CompanyRelationService.createCompanyRelation2(c2);
|
|
|
+ CompanyQueryCondition c3 = new CompanyQueryCondition();
|
|
|
+ c3.setStartId("4");
|
|
|
+ c3.setEndId("3");
|
|
|
+ c3.setStatus("0");
|
|
|
+ c3.setLabel("TOUZI");
|
|
|
+ CompanyRelationService.createCompanyRelation2(c3);
|
|
|
+ CompanyQueryCondition c4 = new CompanyQueryCondition();
|
|
|
+ c4.setStartId("6");
|
|
|
+ c4.setEndId("1");
|
|
|
+ c4.setStatus("0");
|
|
|
+ c4.setLabel("TOUZI");
|
|
|
+ CompanyRelationService.createCompanyRelation2(c4);
|
|
|
+ CompanyQueryCondition c5 = new CompanyQueryCondition();
|
|
|
+ c5.setStartId("5");
|
|
|
+ c5.setEndId("3");
|
|
|
+ c5.setStatus("0");
|
|
|
+ c5.setLabel("TOUZI");
|
|
|
+ CompanyRelationService.createCompanyRelation2(c5);
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void pushDataRelation2() {
|
|
|
+ try (Session session = driver.session()) {
|
|
|
+ List<Record> result = session.writeTransaction(new TransactionWork<List<Record>>() {
|
|
|
+ @Override
|
|
|
+ public List<Record> execute(Transaction tx) {
|
|
|
+ Result result = tx.run("CALL apoc.periodic.iterate(\n" +
|
|
|
+ "'MATCH (p:PERSON)-[:TOUZI*1..5]-(q:PERSON)\n" +
|
|
|
+ "WHERE p.name=q.name AND ID(p)<>ID(q)\n" +
|
|
|
+ "WITH p,q\n" +
|
|
|
+ "LIMIT 1\n" +
|
|
|
+ "MATCH (q)-[r]-(x)\n" +
|
|
|
+ "WHERE x<>p\n" +
|
|
|
+ "RETURN p,q,r,x',\n" +
|
|
|
+ "'DELETE r\n" +
|
|
|
+ "MERGE (p)-[:TOUZI]-(x)\n" +
|
|
|
+ "SET p:合并\n" +
|
|
|
+ "SET q:删除',\n" +
|
|
|
+ "{batchSize:1000,parallel:false,retries:3,iterateList:true}\n" +
|
|
|
+ ") YIELD batches, total",
|
|
|
+ parameters("message", "xxx"));
|
|
|
+ return result.list();
|
|
|
+ }
|
|
|
+ });
|
|
|
+ System.out.println(result);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void pushDataRelation3() {
|
|
|
+ Session session = driver.session();
|
|
|
+ List<Map<String, Object>> batch_list = new ArrayList<>();
|
|
|
+
|
|
|
+ for (int i = 0; i <= 1000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("companyId", "companyId" + i);
|
|
|
+ m1.put("personId", "personId" + i);
|
|
|
+ m1.put("companyName", "companyName" + i);
|
|
|
+ m1.put("personName", "personName" + i);
|
|
|
+ m1.put("relType", "relType_old" + i);
|
|
|
+ m1.put("status", "1");
|
|
|
+ m1.put("percent", i * 0.5);
|
|
|
+ m1.put("rid", i * 3);
|
|
|
+ batch_list.add(m1);
|
|
|
+ }
|
|
|
+ Map parameters = new HashMap() {{
|
|
|
+ put("batch_list", batch_list);
|
|
|
+ }};
|
|
|
+
|
|
|
+ System.out.println("list: " + batch_list.toString());
|
|
|
+ long start = System.currentTimeMillis();
|
|
|
+// final String cql = "WITH {batch_list} AS batch_list \n" +
|
|
|
+// "UNWIND batch_list AS row \n" +
|
|
|
+// "MERGE(company:COMPANY1{companyId:row.companyId}) \n" +
|
|
|
+// "ON CREATE SET company.name=row.companyName, company.companyId=row.companyId \n" +
|
|
|
+// "MERGE(person:PERSON1{personId:row.personId}) \n" +
|
|
|
+// "ON CREATE SET person.name=row.personName, person.personId=row.personId \n" +
|
|
|
+// "WITH person,company,row\n" +
|
|
|
+// "MERGE(person)-[:投资1]->(company) ";
|
|
|
+
|
|
|
+ final String cql = "WITH {batch_list} AS batch_list \n" +
|
|
|
+ "UNWIND batch_list AS row \n" +
|
|
|
+ "MERGE(company:COMPANY1{companyId:row.companyId}) \n" +
|
|
|
+ "SET company.name=row.companyName, company.companyId=row.companyId \n" +
|
|
|
+ //"FOREACH (_ IN case when row.personId is not null then [1] else [] end|\n" +
|
|
|
+ "MERGE(person:PERSON1{personId:row.personId}) \n" +
|
|
|
+ "SET person.name=row.personName, person.personId=row.personId \n" +
|
|
|
+ //")" +
|
|
|
+ "MERGE(person)-[r:高管]->(company)" +
|
|
|
+ "SET r.percent=row.percent, r.status=row.status \n" +
|
|
|
+ "WITH person,company,row\n" +
|
|
|
+ "CALL apoc.merge.relationship(person, row.relType, {},{percent:row.percent,status:row.status}, company) YIELD rel \n" +
|
|
|
+ "WITH rel,row \n" +
|
|
|
+ "SET rel.status= row.status, rel.percent= row.percent \n";
|
|
|
+
|
|
|
+
|
|
|
+ List<Record> result = session.writeTransaction(new TransactionWork<List<Record>>() {
|
|
|
+ @Override
|
|
|
+ public List<Record> execute(Transaction tx) {
|
|
|
+ Result result = tx.run(cql, parameters);
|
|
|
+ return result.list();
|
|
|
+ }
|
|
|
+ });
|
|
|
+ System.out.println(result);
|
|
|
+ System.out.println("cost" + (System.currentTimeMillis() - start));
|
|
|
+ }
|
|
|
+
|
|
|
+ public void saveEdges(List<Edge> edges) {
|
|
|
+ StringBuilder sb = new StringBuilder();
|
|
|
+
|
|
|
+ sb.append("UNWIND {batch} as row ") //
|
|
|
+ .append(" WITH split(row.properties.from, '/') AS fromInfo, " //
|
|
|
+ + "split(row.properties.to, '/') AS toInfo, row ") //
|
|
|
+ .append(" CALL apoc.cypher.doIt(" //
|
|
|
+ + "'MATCH (from:`' + fromInfo[0] + '` {id: {fromId}})" //
|
|
|
+ + " MATCH (to:`' + toInfo[0] + '` {id: {toId}}) " //
|
|
|
+ + " MERGE (from)-[r:`' + row.properties.label + '` {id: {id}}]->(to) " //
|
|
|
+ + " SET n += {properties}', " //
|
|
|
+ + "{ fromId: row.properties.from, toId: row.properties.to, " //
|
|
|
+ + " properties: row.properties, id: row.properties.id }" //
|
|
|
+ + ") YIELD value") //
|
|
|
+ .append(" RETURN 1 ");
|
|
|
+
|
|
|
+ String statement = sb.toString();
|
|
|
+
|
|
|
+// Map<String, Object> params = new HashMap<>();
|
|
|
+// List<Map<String, Object>> batches = new ArrayList<>();
|
|
|
+// for (Edge e : edges) {
|
|
|
+// Map<String, Object> map = new HashMap<>();
|
|
|
+// map.put("id", e.getId());
|
|
|
+// map.put("from", e.getFrom());
|
|
|
+// map.put("to", e.getTo());
|
|
|
+// map.put("properties", e.getProperties());
|
|
|
+// batches.add(map);
|
|
|
+// }
|
|
|
+// params.put("batch", batches);
|
|
|
+//
|
|
|
+// cypher.query(statement, params, null);
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+//"CALL apoc.create.relationship(person, row.relType,{status:row.status,percent:row.percent}, company) YIELD rel RETURN count(*) \n" ;
|
|
|
+ //Procedure apoc.create.relationship has signature: apoc.create.relationship(from :: NODE?, relType :: STRING?, props :: MAP?, to :: NODE?) :: rel :: RELATIONSHIP?
|
|
|
+ //Procedure apoc.merge.relationship has signature: apoc.merge.relationship(startNode :: NODE?, relationshipType :: STRING?, identProps :: MAP?, props :: MAP?, endNode :: NODE?) :: rel :: RELATIONSHIP?
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafka() {
|
|
|
+ String topic = "compamy_relation3";
|
|
|
+ Long start = System.currentTimeMillis();
|
|
|
+
|
|
|
+// params.put("companyId", "222");
|
|
|
+// params.put("name", "bbb");
|
|
|
+// String msg = JSON.toJSONString(params);
|
|
|
+// System.out.println(msg);
|
|
|
+// kafkaProduce.produce(topic, msg);
|
|
|
+
|
|
|
+// for (int i = 0; i < 100000; i++) {
|
|
|
+// params.put("companyId", "id" + i);
|
|
|
+// params.put("name", "name" + i);
|
|
|
+// String msg = JSON.toJSONString(params);
|
|
|
+// kafkaProduce.produce(topic, msg);
|
|
|
+// }
|
|
|
+// System.out.println("cost: " + (System.currentTimeMillis() - start));
|
|
|
+
|
|
|
+ for (int i = 200000; i <= 300000; i++) {
|
|
|
+ Map<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("companyId", "companyId" + i);
|
|
|
+ m1.put("personId", "personId" + i);
|
|
|
+ m1.put("companyName", "companyName_7_" + i);
|
|
|
+ m1.put("personName", "personName_7_" + i);
|
|
|
+ m1.put("relType", "relType_7_" + i+2);
|
|
|
+ m1.put("status", "0");
|
|
|
+ m1.put("percent", i * 0.5+"");
|
|
|
+ m1.put("rid", i * 3+"");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkaTest() {
|
|
|
+ String topic = "test";
|
|
|
+ for (int i = 260; i <= 270; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("start_id", "start_id" + i);
|
|
|
+ m1.put("end_id", "end_id" + i);
|
|
|
+ m1.put("startName", "startName" + i+1);
|
|
|
+ m1.put("endName", "endName" + i+1);
|
|
|
+ m1.put("deleted", 3);
|
|
|
+ m1.put("percent", i * 0.1);
|
|
|
+ m1.put("rid", i * 3);
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkaCompanyNode() {
|
|
|
+ //String topic = "inc_company_node";
|
|
|
+ String topic = "inc_node_relation_union";
|
|
|
+ for (int i = 1; i <= 1000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("id", "companyId" + i);
|
|
|
+ m1.put("name", "name" + i+1);
|
|
|
+ m1.put("deleted", 3);
|
|
|
+ m1.put("topic_type", "1");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkaHolderV1() {
|
|
|
+ //String topic = "inc_holder_relation_v1";
|
|
|
+ String topic = "inc_node_relation_union";
|
|
|
+ for (int i = 1; i <= 1000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("start_id", "start_id" + i);
|
|
|
+ m1.put("end_id", "end_id" + i);
|
|
|
+ m1.put("start_name", "startName" + i+1);
|
|
|
+ m1.put("end_name", "endName" + i+1);
|
|
|
+ m1.put("deleted", 3);
|
|
|
+ m1.put("percent", i * 0.1);
|
|
|
+ m1.put("topic_type", "2");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkaHolderV2() {
|
|
|
+ //String topic = "inc_holder_relation_v2";
|
|
|
+ String topic = "inc_node_relation_union";
|
|
|
+ for (int i = 1000; i <= 2000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("start_id", "start_id" + i);
|
|
|
+ m1.put("end_id", "end_id" + i);
|
|
|
+ m1.put("start_name", "startName" + i+1);
|
|
|
+ m1.put("end_name", "endName" + i+1);
|
|
|
+ m1.put("deleted", 3);
|
|
|
+ m1.put("percent", i * 0.1);
|
|
|
+ m1.put("topic_type", "3");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkaLegalEntityV1() {
|
|
|
+ //String topic = "inc_legal_entity_relation_v1";
|
|
|
+ String topic = "inc_node_relation_union";
|
|
|
+ for (int i = 1000; i <= 2000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("start_id", "start_id" + i);
|
|
|
+ m1.put("end_id", "end_id" + i);
|
|
|
+ m1.put("start_name", "startName" + i+1);
|
|
|
+ m1.put("end_name", "endName" + i+1);
|
|
|
+ m1.put("deleted", 0);
|
|
|
+ m1.put("topic_type", "4");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkaLegalEntityV2() {
|
|
|
+ //String topic = "inc_legal_entity_relation_v2";
|
|
|
+ String topic = "inc_node_relation_union";
|
|
|
+ for (int i = 1000; i <= 2000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("start_id", "start_id" + i);
|
|
|
+ m1.put("end_id", "end_id" + i);
|
|
|
+ m1.put("start_name", "startName" + i+1);
|
|
|
+ m1.put("end_name", "endName" + i+1);
|
|
|
+ m1.put("deleted", 0);
|
|
|
+ m1.put("topic_type", "5");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Test
|
|
|
+ public void sendKafkStaff() {
|
|
|
+ //String topic = "inc_staff_relation";
|
|
|
+ String topic = "inc_node_relation_union";
|
|
|
+ for (int i = 1000; i <= 2000; i++) {
|
|
|
+ HashMap<String, Object> m1 = new HashMap<>();
|
|
|
+ m1.put("start_id", "start_id" + i);
|
|
|
+ m1.put("end_id", "end_id" + i);
|
|
|
+ m1.put("start_name", "startName" + i+1);
|
|
|
+ m1.put("end_name", "endName" + i+1);
|
|
|
+ m1.put("deleted", 0);
|
|
|
+ m1.put("staff_type", "董事"+i);
|
|
|
+ m1.put("topic_type", "6");
|
|
|
+ String msg = JSON.toJSONString(m1);
|
|
|
+ System.out.println(msg);
|
|
|
+ kafkaProduce.produce(topic, msg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+}
|