浏览代码

feat: add kafka

许家凯 4 年之前
父节点
当前提交
33653f28f3

+ 4 - 0
pom.xml

@@ -185,6 +185,10 @@
             <artifactId>log4j-api</artifactId>
             <version>2.7</version>
         </dependency>
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+        </dependency>
 
 
     </dependencies>

+ 128 - 0
src/main/java/com/winhc/phoenix/example/configuration/KafkaConfiguration.java

@@ -0,0 +1,128 @@
+package com.winhc.phoenix.example.configuration;
+
+import com.google.common.collect.Maps;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.annotation.EnableKafka;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.config.KafkaListenerContainerFactory;
+import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.core.DefaultKafkaProducerFactory;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.core.ProducerFactory;
+
+import java.util.Map;
+
+/**
+ * @author: XuJiakai
+ * 2020/12/22 10:20
+ */
+@Configuration
+@EnableKafka
+@ConditionalOnProperty(value = "spring.profiles.active", havingValue = "prod", matchIfMissing = true)
+public class KafkaConfiguration {
+
+    @Value("${spring.kafka.bootstrap-servers}")
+    private String bootstrapServers;
+
+    @Value("${spring.kafka.consumer.enable-auto-commit}")
+    private Boolean autoCommit;
+
+    @Value("${spring.kafka.consumer.auto-commit-interval}")
+    private Integer autoCommitInterval;
+
+    @Value("${spring.kafka.consumer.group-id}")
+    private String groupId;
+
+    @Value("${spring.kafka.consumer.max-poll-records}")
+    private Integer maxPollRecords;
+
+    @Value("${spring.kafka.consumer.auto-offset-reset}")
+    private String autoOffsetReset;
+
+    @Value("${spring.kafka.producer.retries}")
+    private Integer retries;
+
+    @Value("${spring.kafka.producer.batch-size}")
+    private Integer batchSize;
+
+    @Value("${spring.kafka.producer.buffer-memory}")
+    private Integer bufferMemory;
+
+    /**
+     * 生产者配置信息
+     */
+    @Bean
+    public Map<String, Object> producerConfigs() {
+        Map<String, Object> props = Maps.newHashMap();
+        props.put(ProducerConfig.ACKS_CONFIG, "0");
+        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
+        props.put(ProducerConfig.RETRIES_CONFIG, retries);
+        props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
+        props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
+        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
+        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
+        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
+        return props;
+    }
+
+    /**
+     * 生产者工厂
+     */
+    @Bean
+    public ProducerFactory<String, String> producerFactory() {
+        return new DefaultKafkaProducerFactory<>(producerConfigs());
+    }
+
+    /**
+     * 生产者模板
+     */
+    @Bean
+    public KafkaTemplate<String, String> kafkaTemplate() {
+        return new KafkaTemplate<>(producerFactory());
+    }
+
+    /**
+     * 消费者配置信息
+     */
+    @Bean
+    public Map<String, Object> consumerConfigs() {
+        Map<String, Object> props = Maps.newHashMap();
+        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
+        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
+        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
+        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
+        props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 5000);
+        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 120000);
+        props.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 180000);
+        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+        return props;
+    }
+
+    /**
+     * 消费者批量工程
+     */
+    @Bean
+    public KafkaListenerContainerFactory<?> batchFactory() {
+        ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
+        factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfigs()));
+        //设置为批量消费,每个批次数量在Kafka配置参数中设置ConsumerConfig.MAX_POLL_RECORDS_CONFIG
+        factory.setBatchListener(true);
+        return factory;
+    }
+
+
+    @Bean
+    public KafkaConsumer<String, String> kafkaConsumer() {
+        KafkaConsumer<String,String> kafkaConsumer = new KafkaConsumer<>(consumerConfigs());
+        return kafkaConsumer;
+    }
+}

+ 6 - 0
src/main/java/com/winhc/phoenix/example/controller/HbaseQueryController.java

@@ -49,4 +49,10 @@ public class HbaseQueryController {
             return ResponseVo.failure(start, e.getMessage());
         }
     }
+
+    @ApiOperation(value = "批量获取人名")
+    @PostMapping("bulk-get/human")
+    public Object bulkGet(@RequestBody String[] rowkey) {
+        return hbaseQueryService.humanBulkGet(rowkey);
+    }
 }

+ 33 - 0
src/main/java/com/winhc/phoenix/example/controller/KafkaController.java

@@ -0,0 +1,33 @@
+package com.winhc.phoenix.example.controller;
+
+import com.winhc.phoenix.example.service.WinhcKafkaService;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import lombok.AllArgsConstructor;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.web.bind.annotation.*;
+
+/**
+ * @author: XuJiakai
+ * 2020/12/21 19:17
+ */
+@RestController
+@AllArgsConstructor
+@Api(tags = "kafka测试接口", value = "kafka")
+@RequestMapping("kafka")
+@ConditionalOnProperty(value = "spring.profiles.active", havingValue = "prod", matchIfMissing = true)
+public class KafkaController {
+    private final WinhcKafkaService kafkaService;
+
+    @ApiOperation(value = "发送消息")
+    @PostMapping("{topic}")
+    public Object send(@PathVariable String topic, @RequestBody String body) {
+        return kafkaService.send(topic, body);
+    }
+
+    @ApiOperation(value = "接收消息(不允许大量请求)")
+    @GetMapping("{topic}")
+    public Object get(@PathVariable String topic,@RequestParam(defaultValue = "1") Integer maxSize) {
+        return kafkaService.getMsg(topic,maxSize);
+    }
+}

+ 1 - 1
src/main/java/com/winhc/phoenix/example/controller/SearchController.java

@@ -24,7 +24,7 @@ import java.util.regex.Pattern;
 @AllArgsConstructor
 @RestController
 @Api(tags = "es查询", value = "es")
-@RequestMapping("es/{version}")
+@RequestMapping("es")
 public class SearchController {
     private final Map<String, SearchService> map;
 

+ 4 - 6
src/main/java/com/winhc/phoenix/example/scheduled/ElasticsearchTask.java

@@ -4,14 +4,12 @@ import com.winhc.phoenix.example.service.SearchService;
 import com.winhc.phoenix.example.service.impl.SearchV8SimpServiceImpl;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Component;
 
 /**
  * @author: XuJiakai
  * 2020/12/2 10:11
  */
-@Component
+//@Component
 @Slf4j
 public class ElasticsearchTask {
     public ElasticsearchTask(@Qualifier(SearchV8SimpServiceImpl.index) SearchService searchService) {
@@ -19,10 +17,10 @@ public class ElasticsearchTask {
     }
 
     private final SearchService searchService;
-    private static final String[] keywords = new String[]{"所", "厂", "店", "公司", "县", "市", "省", "区", "场", "会"};
+    private static final String[] keywords = new String[]{"所", "厂", "集团", "店", "公司", "县", "市", "省", "区", "场", "会"};
     private static int i = 0;
 
-    @Scheduled(cron = "0 0/1 * * * ? ")
+//    @Scheduled(cron = "0 0/1 * * * ? ")
     public void preheat() {
         i = ++i % keywords.length;
         int size = (int) (Math.random() * 9 + 1);
@@ -30,7 +28,7 @@ public class ElasticsearchTask {
         String k = keywords[i];
         log.info("preheat index {} {}...", k, size);
         Object query = searchService.query(k, size, size);
-        log.info("{}", query);
+//        log.info("{}", query);
         log.info("preheat successfully !");
     }
 }

+ 2 - 0
src/main/java/com/winhc/phoenix/example/service/HbaseQueryService.java

@@ -13,4 +13,6 @@ public interface HbaseQueryService {
     Object get(String tableName, String rowkey);
 
     void asyncScan(String tableName, String rowPrefix);
+
+    Object humanBulkGet( String[] rowkey);
 }

+ 11 - 0
src/main/java/com/winhc/phoenix/example/service/WinhcKafkaService.java

@@ -0,0 +1,11 @@
+package com.winhc.phoenix.example.service;
+
+/**
+ * @author: XuJiakai
+ * 2020/12/21 17:44
+ */
+public interface WinhcKafkaService {
+    Object getMsg(String topic,Integer maxSize);
+
+    boolean send(String topic, String body);
+}

+ 21 - 0
src/main/java/com/winhc/phoenix/example/service/impl/HbaseQueryServiceImpl.java

@@ -11,6 +11,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.*;
 import org.apache.hadoop.hbase.filter.PageFilter;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.springframework.stereotype.Service;
 
 import java.io.IOException;
@@ -18,6 +19,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 /**
  * @Author: XuJiakai
@@ -32,6 +34,7 @@ public class HbaseQueryServiceImpl implements HbaseQueryService {
 
     @SneakyThrows
     @Override
+
     public List<Object> scan(String tableName, String rowPrefix, Long size) {
         try (Table table = connection.getTable(TableName.valueOf(tableName.toUpperCase()))) {
             Scan scan = new Scan();
@@ -96,6 +99,24 @@ public class HbaseQueryServiceImpl implements HbaseQueryService {
         }
     }
 
+    private static final byte[] f = "F".getBytes();
+    private static final byte[] c = "HUMAN_NAME".getBytes();
+    private static final TableName t = TableName.valueOf("COMPANY_HUMAN_RELATION");
+
+    @Override
+    @SneakyThrows
+    public Object humanBulkGet(String[] rowkey) {
+        List<Get> gets = Arrays.stream(rowkey).map(String::getBytes).map(Get::new).peek(g -> g.addColumn(f, c)).collect(Collectors.toList());
+        try (Table table = connection.getTable(t)) {
+            Result[] results = table.get(gets);
+            Map<String, String> collect = Arrays.stream(results)
+                    .filter(r -> !r.isEmpty())
+                    .collect(Collectors.toMap(r -> Bytes.toString(r.getRow()), r -> Bytes.toString(r.getValue(f, c)), (r1, r2) -> r1));
+            results.clone();
+            return collect;
+        }
+    }
+
 
     private byte[] calculateTheClosestNextRowKeyForPrefix(byte[] rowKeyPrefix) {
         // Essentially we are treating it like an 'unsigned very very long' and doing +1 manually.

+ 59 - 0
src/main/java/com/winhc/phoenix/example/service/impl/WinhcKafkaServiceImpl.java

@@ -0,0 +1,59 @@
+package com.winhc.phoenix.example.service.impl;
+
+import com.winhc.phoenix.example.service.WinhcKafkaService;
+import com.winhc.phoenix.example.util.UnicodeBackslashU;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
+import org.springframework.stereotype.Service;
+import org.springframework.util.concurrent.ListenableFuture;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author: XuJiakai
+ * 2020/12/21 17:44
+ */
+@Slf4j
+@Service
+@AllArgsConstructor
+@ConditionalOnProperty(value = "spring.profiles.active", havingValue = "prod", matchIfMissing = true)
+public class WinhcKafkaServiceImpl implements WinhcKafkaService {
+    private final com.winhc.phoenix.example.configuration.KafkaConfiguration kafkaConfiguration;
+    private final KafkaTemplate<String, String> kafkaTemplate;
+
+    @Override
+    public boolean send(String topic, String body) {
+        ListenableFuture<SendResult<String, String>> send = kafkaTemplate.send(topic, body);
+        return true;
+    }
+
+    @Override
+    public Object getMsg(String topic, Integer maxSize) {
+        Map<String, Object> stringObjectMap = kafkaConfiguration.consumerConfigs();
+        stringObjectMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxSize);
+        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(stringObjectMap);
+        List<String> subscribedTopics = new ArrayList<String>();
+        subscribedTopics.add(topic);
+        kafkaConsumer.subscribe(subscribedTopics);
+        ConsumerRecords<String, String> poll = kafkaConsumer.poll(Duration.ofSeconds(5));
+        List<String> rs = new ArrayList<>();
+
+        for (ConsumerRecord<String, String> record : poll) {
+            String value = record.value();
+            rs.add(UnicodeBackslashU.unicodeToCn(value));
+        }
+        kafkaConsumer.commitAsync();
+        kafkaConsumer.unsubscribe();
+        return rs;
+    }
+}

+ 81 - 0
src/main/java/com/winhc/phoenix/example/util/UnicodeBackslashU.java

@@ -0,0 +1,81 @@
+package com.winhc.phoenix.example.util;
+
+import java.util.regex.Pattern;
+
+/**
+ * @author: XuJiakai
+ * 2020/12/23 10:38
+ */
+public final class UnicodeBackslashU {
+    // 单个字符的正则表达式
+    private static final String singlePattern = "[0-9a-fA-F]";
+    // 4个字符的正则表达式
+    private static final Pattern pattern = Pattern.compile(singlePattern + singlePattern +
+            singlePattern + singlePattern);
+
+
+    /**
+     * 把 \\u 开头的单字转成汉字,如 \\u6B65 -> 步
+     *
+     * @param str
+     * @return
+     */
+    private static String ustartToCn(final String str) {
+        StringBuilder sb = new StringBuilder().append("0x")
+                .append(str, 2, 6);
+        int code = Integer.decode(sb.toString());
+        char c = (char) code;
+        return String.valueOf(c);
+    }
+
+    /**
+     * 字符串是否以Unicode字符开头。约定Unicode字符以 \\u开头。
+     *
+     * @param str 字符串
+     * @return true表示以Unicode字符开头.
+     */
+    private static boolean isStartWithUnicode(final String str) {
+        if (null == str || str.length() == 0) {
+            return false;
+        }
+        if (!str.startsWith("\\u")) {
+            return false;
+        }
+        // \u6B65
+        if (str.length() < 6) {
+            return false;
+        }
+        String content = str.substring(2, 6);
+
+
+        boolean isMatch = pattern.matcher(content).matches();
+        return isMatch;
+    }
+
+    /**
+     * 字符串中,所有以 \\u 开头的UNICODE字符串,全部替换成汉字
+     *
+     * @param str
+     * @return
+     */
+    public static String unicodeToCn(final String str) {
+        // 用于构建新的字符串
+        StringBuilder sb = new StringBuilder();
+        // 从左向右扫描字符串。tmpStr是还没有被扫描的剩余字符串。
+        // 下面有两个判断分支:
+        // 1. 如果剩余字符串是Unicode字符开头,就把Unicode转换成汉字,加到StringBuilder中。然后跳过这个Unicode字符。
+        // 2.反之, 如果剩余字符串不是Unicode字符开头,把普通字符加入StringBuilder,向右跳过1.
+        int length = str.length();
+        for (int i = 0; i < length; ) {
+            String tmpStr = str.substring(i);
+            if (isStartWithUnicode(tmpStr)) {
+                sb.append(ustartToCn(tmpStr));
+                i += 6;
+            } else {
+                sb.append(str.charAt(i));
+                i++;
+            }
+        }
+        return sb.toString();
+    }
+}

+ 30 - 0
src/main/resources/application.yml

@@ -40,6 +40,36 @@ spring:
     phoenix:
       server:
         url: http://hb-uf6m8e1nu4ivp06m5-proxy-phoenix.hbase.rds.aliyuncs.com:8765
+  data:
+    mongodb:
+      uri: mongodb://itslaw:itslaw_168@dds-uf6ff5dfd9aef3641.mongodb.rds.aliyuncs.com:3717,dds-uf6ff5dfd9aef3642.mongodb.rds.aliyuncs.com:3717/itslaw?replicaSet=mgset-6501997
+
+  kafka:
+    bootstrap-servers: 192.168.4.237:9092,192.168.4.235:9092,192.168.4.236:9092
+    producer:
+      retries: 3
+      batch-size: 16384
+      buffer-memory: 33554432
+      key-serializer: org.apache.kafka.common.serialization.StringSerializer
+      value-serializer: org.apache.kafka.common.serialization.StringSerializer
+      # acks=0 : 生产者在成功写入消息之前不会等待任何来自服务器的响应。
+      # acks=1 : 只要集群的首领节点收到消息,生产者就会收到一个来自服务器成功响应。
+      # acks=all :只有当所有参与复制的节点全部收到消息时,生产者才会收到一个来自服务器的成功响应。
+      acks: 1
+    consumer:
+      # 该属性指定了消费者在读取一个没有偏移量的分区或者偏移量无效的情况下该作何处理:
+      # latest(默认值)在偏移量无效的情况下,消费者将从最新的记录开始读取数据(在消费者启动之后生成的记录)
+      # earliest :在偏移量无效的情况下,消费者将从起始位置读取分区的记录
+      auto-offset-reset: earliest
+      enable-auto-commit: false
+      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+      group-id: phoenix_example_group_id
+      max-poll-records: 1
+      auto-commit-interval: 1000
+    listener:
+      # 在侦听器容器中运行的线程数。
+      concurrency: 1
 
 es:
   username: elastic

+ 9 - 0
src/test/java/com/winhc/phoenix/example/DeletedHbase.java

@@ -6,6 +6,7 @@ import com.aliyun.odps.data.Record;
 import com.aliyun.odps.task.SQLTask;
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import com.winhc.phoenix.example.service.HbaseOperationService;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Connection;
@@ -30,6 +31,8 @@ public class DeletedHbase {
 
     @Autowired
     Odps odps;
+    @Autowired
+   HbaseOperationService hbaseOperationService;
 
     @Autowired
     private Connection connection;
@@ -70,4 +73,10 @@ public class DeletedHbase {
             log.error(e.getMessage(), e);
         }
     }
+
+    @Test
+    public void r(){
+       String r =  "3046318584_6a1af2b2759975d27be2e38d9a441edd";
+        hbaseOperationService.deleteByRowkey("COMPANY_ZXR_LIST", r);
+    }
 }

+ 57 - 9
src/test/java/com/winhc/phoenix/example/PhoenixExampleApplicationTests.java

@@ -1,9 +1,10 @@
 package com.winhc.phoenix.example;
 
-import com.github.pagehelper.PageHelper;
-import com.winhc.phoenix.example.bean.TestBean;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.winhc.phoenix.example.mapper.TestMapper;
 import com.winhc.phoenix.example.service.HbaseQueryService;
+import lombok.SneakyThrows;
 import lombok.extern.slf4j.Slf4j;
 import org.junit.jupiter.api.Test;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -11,6 +12,7 @@ import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.jdbc.core.JdbcTemplate;
 
 import java.util.List;
+import java.util.Map;
 
 @Slf4j
 @SpringBootTest
@@ -22,7 +24,29 @@ class PhoenixExampleApplicationTests {
 
     @Autowired
     HbaseQueryService hbaseQueryService;
+    private static final ObjectMapper objectMapper = new ObjectMapper()
+            .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
 
+
+    private List<Map<String, Object>> getPage(int limit,int offset) {
+        String sql = "SELECT\n" +
+                "COMPANY_ZXR_RESTRICT.ROWKEY,\n" +
+                "COMPANY_ZXR_RESTRICT.CID,\n" +
+                "COMPANY_ZXR_RESTRICT.CASE_CREATE_TIME,\n" +
+                "COMPANY_ZXR_RESTRICT.CASE_NO\n" +
+                "FROM COMPANY_ZXR_RESTRICT\n" +
+                "WHERE COMPANY_ZXR_RESTRICT.CID = '2330205447'\n" +
+                "AND COMPANY_ZXR_RESTRICT.STATUS = '0'\n" +
+                "AND COMPANY_ZXR_RESTRICT.DELETED = '0'\n" +
+                "ORDER BY COMPANY_ZXR_RESTRICT.CASE_CREATE_TIME DESC\n" +
+                ", COMPANY_ZXR_RESTRICT.rowkey asc\n" +
+                "LIMIT "+limit+" OFFSET "+offset;
+        List<Map<String, Object>> maps = jdbcTemplate.queryForList(sql);
+        return maps;
+    }
+
+
+    @SneakyThrows
     @Test
     void contextLoads() {
 //        hbaseQueryService.asyncScan("COMPANY_EMPLOYMENT", "194650063");
@@ -39,21 +63,45 @@ class PhoenixExampleApplicationTests {
             }
         });
         System.out.println(query1);*/
-        List<String> query = jdbcTemplate.query("select  rowkey, cid, cname,  put_reason, put_date, put_department, remove_reason, remove_date,\n" +
-                "        remove_department, create_time, update_time, deleted\n" +
-                "        from company_abnormal_info\n" +
+
+       /* String sql = "SELECT\n" +
+                "COMPANY_ZXR_RESTRICT.ROWKEY,\n" +
+                "COMPANY_ZXR_RESTRICT.CID,\n" +
+                "COMPANY_ZXR_RESTRICT.CASE_NO\n" +
+                "FROM COMPANY_ZXR_RESTRICT\n" +
+                "WHERE COMPANY_ZXR_RESTRICT.CID = '2330205447'\n" +
+                "AND COMPANY_ZXR_RESTRICT.STATUS = '0'\n" +
+                "AND COMPANY_ZXR_RESTRICT.DELETED = '0'\n" +
+                "ORDER BY COMPANY_ZXR_RESTRICT.CASE_CREATE_TIME DESC\n" +
+                "LIMIT 10 OFFSET 10";
+        List<Map<String, Object>> maps = jdbcTemplate.queryForList(sql);
+        log.info(objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(maps));*/
+
+        List<Map<String, Object>> page = getPage(5, 0);
+        List<Map<String, Object>> page1 = getPage(5, 5);
+
+        log.info(objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(page));
+        log.info(objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(page1));
+
+
+    /*    List<String> query = jdbcTemplate.query("select  rowkey,cid,name" +
+                "        from COMPANY_JUDICIAL_ASSISTANCE_LIST\n" +
                 "         \n" +
-                "             WHERE (  cid = '937712696') \n" +
+                "             WHERE (  cid = '52512141') \n" +
                 "         \n" +
                 "        limit 10", (resultSet, i) -> resultSet.getString(1));
         log.info("end1..");
-        System.out.println(query);
+        System.out.println(query);*/
+
+
+
+
 
-        log.info("start...");
+       /* log.info("start...");
         PageHelper.startPage(1, 2);
         List<TestBean> testBeans = testMapper.selectByExample(TestBean.builder().cid("1093053795").build());
         log.info("end...");
-        testBeans.forEach(System.out::println);
+        testBeans.forEach(System.out::println);*/
     }
 
 }

+ 21 - 0
src/test/java/com/winhc/phoenix/example/dao/impl/SearchDaoImplTest.java

@@ -0,0 +1,21 @@
+package com.winhc.phoenix.example.dao.impl;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+
+import java.util.regex.Pattern;
+
+@SpringBootTest
+class SearchDaoImplTest {
+
+    @Autowired
+    SearchDaoImpl searchDao;
+    Pattern compile = Pattern.compile("^((?!judicase_id).)*$");
+
+    @Test
+    void tips() {
+
+        searchDao.test();
+    }
+}

+ 16 - 0
src/test/java/com/winhc/phoenix/example/job/EsScanJobTest.java

@@ -0,0 +1,16 @@
+package com.winhc.phoenix.example.job;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+public class EsScanJobTest {
+    @Autowired
+    EsScanJob job;
+
+    @Test
+    public void start() {
+        job.start();
+    }
+}

+ 17 - 0
src/test/java/com/winhc/phoenix/example/job/HbaseScanJobTest.java

@@ -0,0 +1,17 @@
+package com.winhc.phoenix.example.job;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+public class HbaseScanJobTest {
+
+    @Autowired
+    HbaseScanJob job;
+
+    @Test
+    public void start() {
+        job.start();
+    }
+}

+ 24 - 0
src/test/java/com/winhc/phoenix/example/service/impl/SearchServiceImplTest.java

@@ -0,0 +1,24 @@
+package com.winhc.phoenix.example.service.impl;
+
+import com.winhc.phoenix.example.service.SearchService;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+class SearchServiceImplTest {
+    @Autowired
+    SearchService searchService;
+
+    @Test
+    void tips() {
+        Object 百度网讯 = searchService.tips("百度网讯");
+        System.out.println(百度网讯);
+    }
+
+    @Test
+    void query() {
+        Object 百度网讯 = searchService.query("北京百度网讯科技有限公司佛山分公司", 0, 10);
+        System.out.println(百度网讯);
+    }
+}