123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566 |
- #eureka.client.serviceUrl.defaultZone= http://106.14.81.247:8900/eureka/
- #Neo4j配置
- spring.data.neo4j.username=neo4j
- spring.data.neo4j.password=neo4j168
- #spring.data.neo4j.uri=http://106.14.211.187:7474
- spring.data.neo4j.uri=bolt://106.14.211.187:7687
- #spring.data.neo4j.uri=bolt://127.0.0.1:7687
- #prod
- #spring.data.neo4j.uri=bolt://10.29.30.244:7687
- #org.neo4j.driver.uri=bolt://106.14.211.187:7687
- #org.neo4j.driver.authentication.username=neo4j
- #org.neo4j.driver.authentication.password=neo4j168
- #spring.data.neo4j.embedded.enabled=true
- #spring.data.neo4j.driver=org.neo4j.ogm.drivers.http.driver.HttpDriver
- #数据库uri地址
- #spring.data.neo4j.uri=http://10.29.26.76:7474
- #spring.data.neo4j.uri=http://47.101.212.122:7474
- #spring.datasource.url = jdbc:mysql://47.100.20.161:3306/prism1?useUnicode=true&characterEncoding=utf-8
- #spring.datasource.username = firefly
- #spring.datasource.password = firefly
- scheduling.enabled = false
- #============== kafka ===================
- # 指定kafka 代理地址,可以多个
- spring.kafka.bootstrap-servers=106.14.211.187:9092
- #topic
- spring.kafka.topic_node_relation_union=inc_node_relation_union
- #spring.kafka.bootstrap-servers=192.168.4.237:9092,192.168.4.235:9092,192.168.4.236:9092
- #spring.kafka.topic=xf_test
- #=============== provider =======================
- spring.kafka.producer.retries=3
- # 每次批量发送消息的数量
- spring.kafka.producer.batch-size=16384
- spring.kafka.producer.buffer-memory=33554432
- # 指定消息key和消息体的编解码方式
- spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
- spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
- #=============== consumer =======================
- # 指定默认消费者group id
- spring.kafka.consumer.group-id=neo4j_node_relation
- spring.kafka.consumer.auto-offset-reset=earliest
- # 取消自动提交
- spring.kafka.consumer.enable-auto-commit=true
- spring.kafka.consumer.auto-commit-interval=100
- # 指定消息key和消息体的编解码方式
- spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
- spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
- # 手动提交
- #spring.kafka.listener.ack-mode=MANUAL_IMMEDIATE
- #mongo
- spring.data.mongodb.uri=mongodb://itslaw:itslaw_168@dds-uf6ff5dfd9aef3641601-pub.mongodb.rds.aliyuncs.com:3717,dds-uf6ff5dfd9aef3642555-pub.mongodb.rds.aliyuncs.com:3717/itslaw?replicaSet=mgset-6501997
|