365 Commit-ok 2ace3f9858 ... 0276baf5fc

Szerző SHA1 Üzenet Dátum
  lyb 0276baf5fc 财产线索全量 3 éve
  lyb 8762107ca7 Merge remote-tracking branch 'origin/master' 3 éve
  lyb ed0b4fedc0 财产线索加金额 3 éve
  许家凯 4e2708eb9d fix: 过滤爬虫上传的脏数据 3 éve
  许家凯 908fad384d fix: 过滤爬虫上传的脏数据 3 éve
  许家凯 14f557bd82 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 828a052131 fix: 过滤爬虫上传的脏数据 3 éve
  许家凯 6a819e55f0 fix: 优化分区划分上的问题 3 éve
  许家凯 c6dd75a802 fix: 添加默认inc_ads空分区 3 éve
  许家凯 140ec5a87a Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 cd1a986e5e fix: deleted 默认为0 3 éve
  许家凯 d0a108ccdd fix: ds和update_time同时排序 3 éve
  许家凯 e7543e0b86 fix: base_company_mapping表支持跳分区 3 éve
  许家凯 182d568802 fix: 重跑数据分区设置 3 éve
  许家凯 4fcda8c11e fix: 重跑数据分区设置 3 éve
  许家凯 ae67cd2b0b Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 af1a52a278 fix: 去重加入ds和update_time 3 éve
  许家凯 633562b65a fix: 索引geo兼容 3 éve
  许家凯 1304cb67ec fix: 摘要计算修改入参 3 éve
  许家凯 3313e57b7f fix: 索引计算性能优化 3 éve
  许家凯 91397be05f fix: 兼容动态时间为空 3 éve
  许家凯 30d1a3f7bf Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 1d046a1b40 feat: ng空间下任务处理程序测试 3 éve
  许家凯 2effa9ce2a feat: ng空间下任务处理程序 3 éve
  xufei c105510111 增量修改 3 éve
  xufei f1039127a9 财产监控 3 éve
  xufei f24c6b611f 案源机会优化 3 éve
  xufei 3bcd85b53d 债权表优化 3 éve
  xufei a875f84647 索引id修改 3 éve
  许家凯 6fa9a4295c fix: 增量主要成员 3 éve
  许家凯 c7de3ba32f fix: 增量股权出质 3 éve
  许家凯 567ce1a738 feat: v8索引工具类 3 éve
  许家凯 f04f106079 fix: 动态json字符串异常 3 éve
  许家凯 50f0d7475d feat: 新空间下全量增量v8索引 3 éve
  许家凯 f57a7d9d76 fix: 动态加入统一捕获异常 3 éve
  许家凯 ba9f40c3b3 fix: 股东动态中字符串json异常 3 éve
  许家凯 570d0ed401 feat: 索引util 3 éve
  许家凯 49eac7a9d1 feat: 索引util 3 éve
  许家凯 c9b2b73bc4 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 682a593d13 add:测试索引生成 3 éve
  许家凯 397515813c add:注册资本转换 3 éve
  许家凯 677fe0f791 add:行业code映射表 3 éve
  许家凯 10f949b96d Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 bed063ae85 add:摘要数据终本案件加入历史和非历史 3 éve
  许家凯 dd475976c0 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 65720a490a fix: es返回省市区名字 3 éve
  许家凯 4ad9601922 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 71afd08d50 feat: 摘要计算走统一入口 3 éve
  许家凯 c4863bfa03 fix: 银行帐户加入id为空的过滤 3 éve
  许家凯 82805f3a25 fix: 司法协助改变去重字段 3 éve
  许家凯 5832400e51 feat: 企业银行帐户维度 3 éve
  许家凯 df2e2256fa fix: 企业动态过滤变更时间为空 3 éve
  许家凯 fbed037d62 fix: 企业动态过滤变更时间为空 3 éve
  许家凯 bfee4f4abd fix: 兼容模型计算 3 éve
  许家凯 ed42b73206 feat: 摘要替换新方法 3 éve
  xufei 2250a93e26 Merge remote-tracking branch 'origin/master' 3 éve
  xufei 3db8a54ef0 bugfix 3 éve
  xufei 65e107a091 Merge remote-tracking branch 'origin/master' 3 éve
  xufei a6a65cf3e1 司法案件优化 3 éve
  许家凯 9369efe310 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 f3205ced7e feat: 减资记录 3 éve
  许家凯 bcd22237c4 feat: 土地转让和土地抵押摘要计算区分开 3 éve
  许家凯 aa770699b2 fix: 股权出质fix bug 3 éve
  许家凯 5394578b8a feat: 股权出质加入新字段 3 éve
  许家凯 c198dfd632 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 e9c8021b8a fix: base_company_mapping修复bug 3 éve
  许家凯 f54bc5df83 fix: company维度支持写出索引 3 éve
  晏永年 4bf74fcb19 graphX性能优化 3 éve
  许家凯 d319daab32 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 9e55f3e8ce feat: 失信被执限高划分移除和非移除 3 éve
  许家凯 cd660bbd57 feat: base_company_mapping加字段 3 éve
  许家凯 e3e450ef7c fix: 司法案件fix bugs 3 éve
  许家凯 0f07c5ac80 fix: 司法案件fix bugs 3 éve
  许家凯 cf9851597f fix: 股权冻结fix bugs 3 éve
  许家凯 0fbcc4cb22 fix: 司法案件上游fix bugs 3 éve
  许家凯 f242b3cf68 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 91ee59d698 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 1436c82068 fix: 司法协助支持重跑 3 éve
  许家凯 b97ad989b0 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 851ffab236 fix: 司法案件上游数据添加唯一id 3 éve
  许家凯 e48bbee829 fix: 司法案件上游数据添加唯一id 3 éve
  许家凯 3b97d82439 fix: 司法案件上游数据添加唯一id 3 éve
  许家凯 7095cb621a Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 65e79deda3 fix: 司法案件上游数据添加唯一id 3 éve
  许家凯 616768f7f5 feat: 破产公告摘要 3 éve
  许家凯 db1100e1ad fix: 股权冻结fix 3 éve
  许家凯 2109ac26b5 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 0bd0575cef feat: 股权冻结增量和全量处理 3 éve
  许家凯 8900ffb521 feat: 摘要增强版发布 3 éve
  许家凯 63326f92fa fix: 案号校验规则更新 3 éve
  许家凯 77f5fc9b4e fix: 将同一cid的摘要进行聚合 3 éve
  许家凯 715757e438 fix: 测试 3 éve
  许家凯 0d33d7814f fix: 摘要增强版增量计算问题 3 éve
  许家凯 840dd03f9e feat: 摘要增强版 3 éve
  许家凯 4a673553f3 feat: 司法案件失信人fixbug 3 éve
  xufei 72a97c5c52 逻辑修改 3 éve
  晏永年 819aec3f2b fix 3 éve
  许家凯 3538c65a39 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 ad13a78f40 feat: 企业cid相关替换操作 3 éve
  许家凯 b49fb54c24 fix: 司法案件上游接入失信和被执数据 3 éve
  许家凯 29bcae642f Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 59e8d0101f fix: 解决查老赖不同表之间rowkey有冲突 3 éve
  许家凯 dcdd0e3dca fix: 摘要计算去除不可见字符 3 éve
  许家凯 e648052206 fix: 加入udf 3 éve
  许家凯 314a4d786e fix: 摘要去除非显示字符串影响 3 éve
  许家凯 5acafa15ed fix: 司法案件上游数据采用生产环境表 3 éve
  许家凯 4a2de24cd1 fix: 查老赖移除字段统一采用status 3 éve
  许家凯 64eccb4f2f Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 8422db489f fix: 身份证号udf 3 éve
  许家凯 b4669c5e95 fix: 身份证号校验单独提到utils中 3 éve
  许家凯 305a098a8a feat: 查老赖全异步处理 3 éve
  许家凯 1f67c4a67a fix: 身份证出生日期加入时间范围判断 3 éve
  许家凯 29e77b3b24 fix: 查老赖限高deleted字段修改 3 éve
  许家凯 825c9f990f fix: 查老赖修正label 3 éve
  许家凯 9c586cf8e1 fix: 查老赖上下游数据调试 3 éve
  许家凯 efbe298296 fix: 查老赖上游数据处理统一处理 3 éve
  许家凯 60a15d2bcd Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 d55175e0c3 feat: 查老赖上游数据处理统一处理 3 éve
  许家凯 8fe3a37dc0 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 1ca3e91917 feat: 查老赖加入其它维度 3 éve
  lyb 5749b8a1cd 执行人和终本,人的增量,以及司法案件预处理 3 éve
  许家凯 79489db316 fix: 调整company_dishonest_info_person 3 éve
  许家凯 103a37acf9 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 c37d135124 fix: 写phoenix 3 éve
  晏永年 7b773ade4d 查老赖统一预处理 3 éve
  许家凯 271d164b14 Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 45835c5c8d fix: ods->ads人的数据 3 éve
  许家凯 bf77cc2a6a fix: 查老赖修复拉历史数据问题 3 éve
  许家凯 626dbc632d fix: 查老赖下游统一拉取历史数据 3 éve
  许家凯 3487b689ce Merge remote-tracking branch 'origin/master' 3 éve
  许家凯 a0985dd18d feat: 查老赖 3 éve
  许家凯 c678f9faee feat: 司法案件流程发布上线 3 éve
  晏永年 4a4d2e4d5f 适配cids等 3 éve
  晏永年 0457931c60 fix 3 éve
  xufei d02497cc39 表名修改,name去重 3 éve
  晏永年 0a2593fa62 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 3 éve
  晏永年 ad4741175e feat:查老赖预处理 3 éve
  晏永年 4ee5d44474 自然人增量同步程序更名 3 éve
  晏永年 3741047b84 司法案件456维度将身份证补全移到外部(单独) 3 éve
  晏永年 d1b1c6ff23 feat:通用自然人身份证号码及企业cid补全 3 éve
  晏永年 75e0d01e2f 更改图处理正式库 3 éve
  晏永年 22b37453f8 fix 4 éve
  晏永年 295343f8b8 fix 4 éve
  xufei 89c3b4d4d4 立案信息融合 4 éve
  yandawei 0543599bb3 Merge remote-tracking branch 'origin/master' 4 éve
  yandawei 61360f9d0b 立案信息到司法案件 4 éve
  yandawei 5c52f51017 立案信息到司法案件 4 éve
  yandawei 192876f5f5 Merge remote-tracking branch 'origin/master' 4 éve
  yandawei 553e89996c 立案信息到司法案件中= 4 éve
  xufei 4ea7999f8d 司法案件字段修改 4 éve
  许家凯 ef5145be62 style: rename 4 éve
  晏永年 1adcc0142b feat:司法案件之送达公告、限制消费令(分企业和自然人2种)预处理 4 éve
  许家凯 7f9df76d6a Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 86f657fc88 feat: 司法案件失信人处理 4 éve
  许家凯 1aad2e9054 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 aa36f6559f feat: 身份证号格式统一 4 éve
  许家凯 789de46a5b feat: 添加法院层级广播变量函数 4 éve
  许家凯 7046d87aee feat: 添加案号规整函数 4 éve
  许家凯 2f33ecaf4f Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 c2af55b3fe feat: 添加与个人相关的维度 4 éve
  许家凯 9945797758 feat: 前置处理添加失信人 4 éve
  晏永年 d5dbd19a7c feat:司法案件之送达公告、限制消费 4 éve
  许家凯 2f066dce3b Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 4d642b1d48 fix: 企业动态清理html字符 4 éve
  许家凯 8576fa2859 fix: 司法案件上游计算 4 éve
  许家凯 50acf614c6 fix: 知识产权企业动态支持补数据 4 éve
  许家凯 5b06bb2c37 fix: 企业动态.融资历史,清算信息 4 éve
  许家凯 e6bf2a13d9 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 1d7714ae74 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 8cbe741700 fix: 企业动态.司法拍卖 4 éve
  许家凯 c0ac626f55 fix: 企业动态.股权出质 4 éve
  许家凯 1ca8c0d1d8 fix: 企业动态的对外投资错误数据 4 éve
  许家凯 f1e60dcfec fix: 企业动态的失信人信息 4 éve
  许家凯 9a0189c33c Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 67dada3cab fix: 企业动态的法院公告 4 éve
  许家凯 db0870981b Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 aa766f172c feat: 公司索引更新至v7 4 éve
  许家凯 45a1afe5d7 fix: 企业动态过滤大于当前时间的动态 4 éve
  许家凯 8e698cd12c Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 ca9b68b0e9 fix: 修复企业变更提取的合并问题 4 éve
  许家凯 5b4a861848 feat: 企业动态调整 4 éve
  许家凯 039d2fe650 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 23a050c8bf fix: 企业动态-年报对外担保 4 éve
  许家凯 5726a9e511 fix: 企业动态-公示催告 4 éve
  许家凯 d1f370f64e fix: 企业动态 4 éve
  许家凯 3743dade36 fix: 司法案件_文书 发布 4 éve
  许家凯 cf4a9de160 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 c2e4550e62 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 8b4309966f fix: 招投标info_type 4 éve
  晏永年 b28629d651 fix 4 éve
  xufei 494385711a Merge remote-tracking branch 'origin/master' 4 éve
  xufei e563c5defb 司法案件后置处理 4 éve
  xufei 41b53b2b6e 年报股权出质 4 éve
  许家凯 8988b3d255 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 17184217b2 fix: 年报对外投资输出摘要 4 éve
  许家凯 4c6ab41b30 fix: 股权出质输出摘要 4 éve
  许家凯 9963d3b93c fix: 主要成员输出摘要 4 éve
  许家凯 35748f065d fix: 企业动态换行调整 4 éve
  许家凯 e6353ef6e6 fix: 企业动态init调整 4 éve
  许家凯 df3a21772b feat: 企业动态发布生产 4 éve
  许家凯 7ad1530bf0 fix: 企业动态 4 éve
  许家凯 69e09c8360 fix: 企业动态 4 éve
  许家凯 b729d92c48 feat: 司法案件关系提取 4 éve
  许家凯 83f0a722ef Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 16ecf93863 fix: 主要成员未写入phoenix问题 4 éve
  许家凯 f6f1c2d09a Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 4adc729daf feat: 司法案件提关系 4 éve
  许家凯 23f8334df1 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 23c1caf68d fix: 破产公告 4 éve
  许家凯 272c452d64 fix: 修复知识产权bug 4 éve
  xufei 80524a8046 Merge remote-tracking branch 'origin/master' 4 éve
  xufei 4c41984f5d 时间格式转换 4 éve
  xufei 54971fc193 Merge remote-tracking branch 'origin/master' 4 éve
  xufei 6c7495d779 知识产权嵌入动态 4 éve
  xufei 7f471b11f7 知识产权动态bugfix 4 éve
  晏永年 f754ea1289 fix 4 éve
  晏永年 6a2aa90a2a fix 4 éve
  晏永年 8255285ff4 司法案件的GraphX处理 4 éve
  许家凯 32bd810223 fix: 企业动态引入聚合参数 4 éve
  yandawei 948097f549 Merge remote-tracking branch 'origin/master' 4 éve
  yandawei 4034a542fc 公司动态-招聘-修改为每日新增数量 4 éve
  yandawei 19f756b5d3 Merge remote-tracking branch 'origin/master' 4 éve
  yandawei 9de873e615 公司动态-招聘 4 éve
  晏永年 b10f91b2ec 企业公告维度的业务日期的兜底用update_time 4 éve
  晏永年 e93baf59e5 动产抵押表中若公示日期为null则用登记日期 4 éve
  晏永年 4aa01443bd fix 4 éve
  晏永年 d59a98ceb8 添加企业动态:企业公告 4 éve
  xufei 721c97919c 司法拍卖前置增加 4 éve
  许家凯 3e55dbcd88 feat: 存量企业动态从存量变更信息中提取 4 éve
  许家凯 77c242924a fix: 企业动态唯一id采用新md5 4 éve
  许家凯 0a30636155 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 55f0189580 feat: 添加摘要计算通用程序 4 éve
  许家凯 5270d444db feat: 企业动态添加维一标识 4 éve
  许家凯 79ab0c2427 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 4455c0a22b fix: 过滤银行和保险机构 4 éve
  许家凯 e5b3a417d4 feat: 工商信息变更及动态 4 éve
  晏永年 17ad1dab48 针对2.3类的案源机会过滤掉银行、保险企业 4 éve
  晏永年 b186d36957 公示催告 4 éve
  许家凯 202bc037bd feat: 企业动态风险等级抽取统一映射 4 éve
  许家凯 b70f5857f5 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 c556c2fedc feat: 主要成员企业动态 4 éve
  许家凯 20c8424683 feat: 主要成员变更提取 4 éve
  许家凯 e69a151f52 fix: 主要成员增量 4 éve
  许家凯 675f6c0cde Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 1dfbe8e709 feat: 变更和动态主函数入口参数调整 4 éve
  许家凯 b482d4862f Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 f49bd3920e fix: phoenix加入超时配制 4 éve
  许家凯 d832c12dfd Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 aea1077d14 feat: 主要成员增量和存量处理 4 éve
  许家凯 cfcd864234 feat: 存量cid通用spark程序加入自动建表 4 éve
  许家凯 45e3b9ce96 feat: 企业动态股权出质融入框架 4 éve
  许家凯 b0a8903811 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 50ac3b8230 feat: 企业变更通用程序修改 4 éve
  许家凯 4b1f265d00 fix: 企业变更及动态调整 4 éve
  许家凯 9d64faa7f4 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 64cc4c48b8 add:cid与new_cid的mapping表 4 éve
  晏永年 c3cb5f8c33 补全企业动态预备表的cname字段数据 4 éve
  xufei 83ec3fd3a4 增量计算补充 4 éve
  晏永年 09181e597e fix增量维度同步更换new_cid导致与老数据比较误判为insert数据 4 éve
  许家凯 ce51cfd574 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 da36b407bb add:股权出质 4 éve
  许家凯 c7ea469bc6 style 4 éve
  许家凯 79cd017511 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 daef91ca81 fix bugs 4 éve
  许家凯 1a0b844d32 fix bugs 4 éve
  许家凯 d36d594d22 添加企业动态启动参数 4 éve
  许家凯 abd7de490c Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 5d1246ec18 添加企业动态 4 éve
  许家凯 33837bf012 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 8893fcc727 add 4 éve
  许家凯 2a7365f911 公司动态通用程序init 4 éve
  许家凯 1bc8911ea6 公司变更动态加字段,phoenix不输出id 4 éve
  许家凯 4f57a575db Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 5d8395dfb0 企业债权关系表调参 4 éve
  许家凯 17e1014dc5 run 4 éve
  许家凯 1d475cb37f Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 4fe19da806 企业动态,利好消息只取指定维度的动态 4 éve
  许家凯 77f1af0123 增量索引、企业债权关系、人员表增量 4 éve
  许家凯 d7bf2abd8d add 4 éve
  许家凯 d4ec5db2e9 fix bugs 4 éve
  晏永年 dd8c8ba212 生成rowkey 4 éve
  晏永年 861dc4bb0c fix 4 éve
  晏永年 71f4559997 添加人与公司表增量同步 4 éve
  许家凯 f9b705f3e6 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 b3216d1694 公司基本信息索引部分调整输出 4 éve
  许家凯 884acfbab6 利好消息tags加参 4 éve
  许家凯 8289e52153 利好消息配置 4 éve
  xufei da507b43fc merge 4 éve
  xufei bd1947c5a2 Merge remote-tracking branch 'origin/master' 4 éve
  xufei 49ce50b82d 地块公示,购地信息修改 4 éve
  许家凯 5864d5dc50 利好消息 4 éve
  许家凯 e96ebbe75f Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 95bc6051a0 加入别名 4 éve
  许家凯 94d105bf87 加入九大类type自动推测 4 éve
  许家凯 30d0358c11 add 4 éve
  许家凯 e93c568347 Merge remote-tracking branch 'origin/master' 4 éve
  许家凯 6969da8e2a Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 0569194c60 知识产权处理程序 4 éve
  xufei cf0dc616b5 Merge remote-tracking branch 'origin/master' 4 éve
  xufei bf7f5b4208 add 4 éve
  xufei 5ac259c122 法院公告计算 4 éve
  许家凯 ea4583e4d0 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 6b045b8056 全量写出索引,企业债权关系表增量 4 éve
  许家凯 eeca257202 加入es client 4 éve
  许家凯 a1a02793ba 扩充变更字段 4 éve
  许家凯 60c7004d74 提取动态 4 éve
  许家凯 d534fff6ff add 4 éve
  许家凯 c221b903d1 fix bug 4 éve
  许家凯 3ce3b62429 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 1177b509e8 动态提到,pom更新 4 éve
  许家凯 eea45f2525 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 8aeb70be1d phoenix batchsize 4 éve
  许家凯 7886470b9c Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 dc96700725 公司基本信息 4 éve
  许家凯 84776f7d24 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 3bb5d4b7f9 add 4 éve
  许家凯 cf827cf81e 整理package 4 éve
  许家凯 883b17a97c Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 bd41fa12b4 摘要写出到hbase,公司基本信息更新到ads hbase和es 4 éve
  许家凯 6f69375a53 添加环境切换 4 éve
  许家凯 455d676beb Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 f188f2bb81 add 4 éve
  许家凯 aa86eb7b01 摘要 4 éve
  许家凯 6db783b08c 增量数据结合调度 4 éve
  xufei 95d3848b79 增量cids生成 4 éve
  yongnian b567fae904 fix 4 éve
  yongnian 146b9fcc49 增量单表(Type1)维度的复制方案 4 éve
  xufei 120168df92 增量数据更新 4 éve
  许家凯 044c2fef7a fix:jdbc phoenix 4 éve
  许家凯 e57fb79980 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 0fd6f59d2d add 4 éve
  xufei 7e888e1f26 Merge remote-tracking branch 'origin/master' 4 éve
  xufei 072d10626b 增加知识产权处理逻辑 4 éve
  许家凯 c001fb1a34 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 51612201b5 加入工具类,优化写出es 4 éve
  许家凯 6862604a91 写出索引到es 4 éve
  xufei 1cb7989763 Merge remote-tracking branch 'origin/master' 4 éve
  xufei 4a9c209180 新增模型字段 4 éve
  许家凯 acc7d7770a 加入隐式转换,写出phoenix两种方式。jdbc和in-memory 4 éve
  许家凯 e85368defd fix bugs 4 éve
  许家凯 c56a8ccbc4 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 863d0e998a odpsOps方式写入phoenix 4 éve
  yongnian 076e6ef15d 另一种spark读取phoenix方式绕开阿里云的bug 4 éve
  许家凯 58bf807021 add phoenix 4 éve
  许家凯 663dabba20 add phoenix test 4 éve
  许家凯 9d73ec20da 代码和依赖分离 4 éve
  许家凯 86c6c195df Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 df5c5ab443 hbase 依赖 4 éve
  许家凯 0d5b27e6c1 修改groupid 精简项目 4 éve
  xufei c2603b0992 软件著作权 4 éve
  许家凯 1c365c5664 local模式下读写hbase 4 éve
  xufei 2ab43f0b0b 基本信息评分修改版 4 éve
  许家凯 3cfbd0efe5 Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 de4438f583 公司名称映射关系计算 4 éve
  许家凯 d83bc175d2 更新mongodb 4 éve
  许家凯 da73f5b922 修复异常 4 éve
  许家凯 99acb8f87b a 4 éve
  许家凯 157bf5a727 fix bugs 4 éve
  许家凯 06d991e03c Merge branch 'master' of http://139.224.213.4:3000/bigdata/Spark_Max 4 éve
  许家凯 9b18b48c04 add 4 éve
  许家凯 6ea56f908e alter sparkUtils 4 éve
  许家凯 95e6684aa4 添加摘要计算 4 éve
  许家凯 b5c40d8963 add appName 4 éve
  许家凯 e6102fdbc6 alter name 4 éve
  xufei 0a9880c83f init commit 4 éve
70 módosított fájl, 2504 hozzáadás és 29 törlés
  1. 5 0
      src/main/scala/com/winhc/bigdata/spark/jobs/chance/ChangeExtract.scala
  2. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/chance/table/zxr_evaluate.scala
  3. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/chance/table/zxr_evaluate_results.scala
  4. 11 2
      src/main/scala/com/winhc/bigdata/spark/jobs/judicial/JudicialCaseRelationPre12.scala
  5. 61 21
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/CompanyMonitor.scala
  6. 47 3
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/CompanyMonitorHandle.scala
  7. 373 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/ChangeExtractAll.scala
  8. 79 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/CompanyChangeHandle1.scala
  9. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/auction_tracking_list.scala
  10. 32 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/bankruptcy_open_case.scala
  11. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_bid_list.scala
  12. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_reg_list.scala
  13. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_works_list.scala
  14. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_court_open_announcement_list.scala
  15. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_dishonest_info.scala
  16. 19 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_equity_info_list.scala
  17. 20 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_finance.scala
  18. 20 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder.scala
  19. 20 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder_v2.scala
  20. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_announcement.scala
  21. 31 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_mortgage.scala
  22. 42 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_mortgage_info.scala
  23. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_patent_list.scala
  24. 27 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_tm.scala
  25. 21 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_list.scala
  26. 30 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_restrict.scala
  27. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/increase_registered_capital_info.scala
  28. 28 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/wenshu_detail_combine.scala
  29. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate.scala
  30. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate_results.scala
  31. 320 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/CompanyMonitor.scala
  32. 37 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/auction_tracking_list.scala
  33. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/bankruptcy_open_case.scala
  34. 77 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company.scala
  35. 21 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_bid_list.scala
  36. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_reg_list.scala
  37. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_works_list.scala
  38. 29 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_court_open_announcement_list.scala
  39. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_dishonest_info.scala
  40. 34 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_equity_info_list.scala
  41. 23 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_finance.scala
  42. 76 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_holder_v2.scala
  43. 31 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_announcement.scala
  44. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage.scala
  45. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage_v2.scala
  46. 31 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_mortgage_info.scala
  47. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_patent_list.scala
  48. 25 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_tm.scala
  49. 36 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_list.scala
  50. 27 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_restrict.scala
  51. 32 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/increase_registered_capital_info.scala
  52. 47 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine.scala
  53. 51 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine_v2.scala
  54. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate.scala
  55. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate_results.scala
  56. 12 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/auction_tracking_list.scala
  57. 2 1
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company.scala
  58. 7 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_equity_info_list.scala
  59. 11 2
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_holder_v2.scala
  60. 7 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_land_announcement.scala
  61. 7 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_land_mortgage.scala
  62. 7 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_land_mortgage_v2.scala
  63. 7 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_mortgage_info.scala
  64. 11 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_zxr_list.scala
  65. 2 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_zxr_restrict.scala
  66. 6 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/increase_registered_capital_info.scala
  67. 10 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/wenshu_detail_combine.scala
  68. 12 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/wenshu_detail_combine_v2.scala
  69. 24 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/zxr_evaluate.scala
  70. 35 0
      src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/zxr_evaluate_results.scala

+ 5 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/chance/ChangeExtract.scala

@@ -373,6 +373,11 @@ object ChangeExtract {
     , Args(tableName = "company_annual_report_out_investment", primaryFields = "main_id")
     , Args(tableName = "increase_registered_capital_info", primaryFields = "change_time")
     , Args(tableName = "auction_tracking_list", primaryFields = "auction_items_id")
+
+
+    , Args(tableName = "zxr_evaluate", primaryFields = "name,case_no,asset_name")
+    , Args(tableName = "zxr_evaluate_results", primaryFields = "name,case_no,asset_name")
+
   )
 
 

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/chance/table/zxr_evaluate.scala

@@ -0,0 +1,23 @@
+
+package com.winhc.bigdata.spark.jobs.chance.table
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: lyb
+ * @Date: 2021-01-07
+ * @Description:询价
+ */
+
+case class zxr_evaluate(equCols: Seq[String]) extends CompanyChangeHandle with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"询价评估发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增询价评估")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "询价评估", Array("case_no","asset_type", "asset_name","insert_time" ))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("insert_time"), newMap("update_time"))
+  }
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/chance/table/zxr_evaluate_results.scala

@@ -0,0 +1,23 @@
+
+package com.winhc.bigdata.spark.jobs.chance.table
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: lyb
+ * @Date: 2021-01-07
+ * @Description:询价
+ */
+
+case class zxr_evaluate_results(equCols: Seq[String]) extends CompanyChangeHandle with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"询价评估结果发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增询价评估结果")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "询价评估结果", Array("case_no","asset_type", "asset_name","publish_time", "money" ))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("publish_time"), newMap("update_time"))
+  }
+}

+ 11 - 2
src/main/scala/com/winhc/bigdata/spark/jobs/judicial/JudicialCaseRelationPre12.scala

@@ -74,8 +74,17 @@ case class JudicialCaseRelationPre12(s: SparkSession, project: String
          |         ,case_create_time as date
          |         ,rowkey as detail_id
          |         ,exec_amount as case_amt
-         |      from $project.inc_ads_company_zxr_final_case
-         |      where length(case_no) > 0 and ds > '0'
+         |      from
+         |      (
+         |        select case_no,name,court_name,case_create_time,rowkey,update_time,exec_amount
+         |        from $project.ads_company_zxr_final_case
+         |        where length(case_no) > 0 and ds > '0'
+         |        union all
+         |        select case_no,name,court_name,case_create_time,rowkey,update_time,exec_amount
+         |        from $project.inc_ads_company_zxr_final_case
+         |        where length(case_no) > 0 and ds > '0'
+         |      )
+         |
          |      union all
          |      select
          |         md5(cleanup(case_no)) as judicase_id

+ 61 - 21
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/CompanyMonitor.scala

@@ -48,7 +48,9 @@ object CompanyMonitor {
            |    change_time     STRING COMMENT '变更时间',
            |    biz_id          STRING COMMENT '业务id,数据行id',
            |    info_risk_level STRING COMMENT '变更风险等级',
-           |    create_time     STRING COMMENT '创建时间'
+           |    create_time     STRING COMMENT '创建时间',
+           |    amt             STRING COMMENT '金额'
+           |
            |)
            |COMMENT '企业财务监控输出表'
            |PARTITIONED BY
@@ -134,7 +136,7 @@ object CompanyMonitor {
           result.map(res => Row(CompanyDynamicHandleUtils.getDynamicId(res._1, res._5, res._7, res._6),
             res._1, res._2, res._3, res._4,
             res._5.replaceAll("null", ""), res._6, res._7, res._8, res._9,
-            DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss")))
+            DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss"), res._10))
         }
       })
 
@@ -150,30 +152,66 @@ object CompanyMonitor {
         , "info_risk_level" -> StringType
         , "type" -> StringType
         , "create_time" -> StringType
+        , "amt" -> StringType
       ))
       spark.createDataFrame(rdd, schema)
         .createOrReplaceTempView(s"company_monitor_tmp_$tableName")
 
       unescapeHtml4()
-      sql(
-        s"""
-           |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
-           |SELECT  id
-           |        , cid
-           |        , cname
-           |        , table_type
-           |        , flow_type
-           |        , unescapeHtml4(rta_desc) rta_desc
-           |        , change_time
-           |        , biz_id
-           |        , info_risk_level
-           |        , type
-           |        , create_time
-           |FROM
-           |    company_monitor_tmp_$tableName
-           |WHERE id IS NOT NULL
-           |AND   to_timestamp(change_time) <= now()
-           |""".stripMargin)
+
+      if(tableName.equals("company_zxr_restrict") || tableName.equals("company_dishonest_info")){
+        var dataFlag = 3
+        if(tableName.equals("company_zxr_restrict")){
+          dataFlag = 5
+        }
+        sql(
+          s"""
+             |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
+             |SELECT  id
+             |        , cid
+             |        , cname
+             |        , table_type
+             |        , flow_type
+             |        , unescapeHtml4(rta_desc) rta_desc
+             |        , change_time
+             |        , biz_id
+             |        , info_risk_level
+             |        , type
+             |        , create_time
+             |        , if(case_amt is null, 0, case_amt/10000)
+             |FROM
+             |    (
+             |     select c.*, d.case_amt,  ROW_NUMBER() OVER (PARTITION BY c.id  ORDER BY d.case_amt DESC)  num from
+             |          ( SELECT a.*, b.judicase_id FROM  company_monitor_tmp_$tableName a
+             |            LEFT JOIN winhc_eci_dev.ads_judicial_case_relation_replace_cids b  ON a.biz_id = b.detail_id where b.ds = '${getLastPartitionsOrElse("winhc_eci_dev.ads_judicial_case_relation_replace_cids", "0")}' and b.flag = '$dataFlag'
+             |          ) c LEFT JOIN winhc_eci.ads_judicial_case_relation_r1 d ON c.judicase_id = d.judicase_id
+             |    )
+             |WHERE id IS NOT NULL
+             |AND   to_timestamp(change_time) <= now() and num = 1
+             |""".stripMargin)
+      }else{
+        sql(
+          s"""
+             |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
+             |SELECT  id
+             |        , cid
+             |        , cname
+             |        , table_type
+             |        , flow_type
+             |        , unescapeHtml4(rta_desc) rta_desc
+             |        , change_time
+             |        , biz_id
+             |        , info_risk_level
+             |        , type
+             |        , create_time
+             |        , amt
+             |FROM
+             |    company_monitor_tmp_$tableName
+             |WHERE id IS NOT NULL
+             |AND   to_timestamp(change_time) <= now()
+             |""".stripMargin)
+      }
+
     }
   }
 
@@ -200,6 +238,8 @@ object CompanyMonitor {
     , Args(tableName = "company_land_mortgage_v2", bName = 1)
     , Args(tableName = "auction_tracking_list", bName = 1)
     , Args(tableName = "increase_registered_capital_info", bName = 1)
+    , Args(tableName = "zxr_evaluate", bName = 1)
+    , Args(tableName = "zxr_evaluate_results", bName = 1)
 
   )
 

+ 47 - 3
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/CompanyMonitorHandle.scala

@@ -1,5 +1,12 @@
 package com.winhc.bigdata.spark.jobs.monitor
 
+import java.util
+import java.util.Collections
+
+import com.winhc.bigdata.spark.utils.BaseUtil
+import org.apache.http.entity.ContentType
+import org.apache.http.nio.entity.NStringEntity
+import org.apache.http.util.EntityUtils
 import org.apache.spark.internal.Logging
 
 /**
@@ -33,9 +40,12 @@ trait CompanyMonitorHandle extends Logging {
     , "company_holder_deleted" -> "1" //股东移除
     , "increase_registered_capital_info" -> "2" //增资记录
     , "auction_tracking_list" -> "3" //司法拍卖
+    , "zxr_evaluate" -> "3" //有资产选定询价评估机构
+    , "zxr_evaluate_results" -> "3" //有资产完成询价评估
+
   )
 
-  //财产流向 0 -> 流入 1 -> 流出
+  //财产流向 0 -> 流入 1 -> 流出  2->流向未知
   private val flow_type_map = Map(
     "wenshu_detail_combine_v2" -> "0" //新增胜诉案件
     , "company_dishonest_info" -> "1" //失信移除
@@ -55,10 +65,12 @@ trait CompanyMonitorHandle extends Logging {
     , "company_patent_list" -> "0" //专利
     , "company_copyright_reg_list" -> "0" //软件著作权
     , "company_copyright_works_list" -> "0" //作品著作权
-    , "company_holder_add" -> "0" //股东新增
+    , "company_holder_add" -> "2" //股东新增
     , "company_holder_deleted" -> "1" //股东移除
     , "increase_registered_capital_info" -> "0" //增资记录
     , "auction_tracking_list" -> "0" //司法拍卖
+    , "zxr_evaluate" -> "0" //有资产选定询价评估机构
+    , "zxr_evaluate_results" -> "0" //有资产完成询价评估
   )
 
 
@@ -85,6 +97,8 @@ trait CompanyMonitorHandle extends Logging {
     , "wenshu_detail_combine" -> "18" //WENSHU_DETAIL//ES
     , "wenshu_detail_combine_v2" -> "18" //WENSHU_DETAIL//ES
     , "auction_tracking_list" -> "19" //AUCTION_TRACKING_LIST//HBase
+    , "zxr_evaluate" -> "20" ////zxr_evaluate//HBase
+    , "zxr_evaluate_results" -> "21" ////zxr_evaluate_results//HBase
   )
 
   //类别
@@ -111,6 +125,8 @@ trait CompanyMonitorHandle extends Logging {
     , "wenshu_detail_combine_v2" -> "20" //新增胜诉案件
     , "company_land_mortgage_v2" -> "21" //接受了他人土地抵押
     , "auction_tracking_list" -> "22" //有资产即将被司法拍卖
+    , "zxr_evaluate" -> "23" //有资产选定询价评估机构
+    , "zxr_evaluate_results" -> "24" //有资产完成询价评估
   )
 
   /**
@@ -131,7 +147,7 @@ trait CompanyMonitorHandle extends Logging {
    * info_risk_level
    * type
    */
-  def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String)] = {
+  def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
     val rta_desc = get_rta_desc(old_map, new_map)
     if (rta_desc == null) {
       return Seq.empty
@@ -146,6 +162,7 @@ trait CompanyMonitorHandle extends Logging {
         , get_biz_id(rowkey, new_map)
         , get_info_risk_level(old_map, new_map)
         , get_type()
+        , getMoney(new_map)
       ))
     } catch {
       case e: Exception => {
@@ -235,4 +252,31 @@ trait CompanyMonitorHandle extends Logging {
   def get_conditional_filter(): String = ""
 
 
+  protected def getMoney(new_map: Map[String, String]) : String = "0"
+
+  protected def processMoney(amt : String) :String ={
+    var amtstr = amt
+    if (amtstr == null ){
+      "0"
+    }else{
+      amtstr = amtstr.replaceAll("[^\\d.]", "")
+
+      if(amtstr.equals("") || amtstr.split("\\.").size > 2  || amtstr.startsWith(".") || amtstr.endsWith(".") ){
+        amtstr = "0.0"
+      }
+      try{
+        var x = amtstr.toDouble
+      }catch {
+        case ex : Exception =>{
+          amtstr = "0"
+        }
+
+      }
+
+
+      amtstr
+    }
+  }
+
+
 }

+ 373 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/ChangeExtractAll.scala

@@ -0,0 +1,373 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change
+
+import com.winhc.bigdata.spark.config.EsConfig
+import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+import com.winhc.bigdata.spark.utils._
+import org.apache.spark.internal.Logging
+import org.apache.spark.sql.functions.col
+import org.apache.spark.sql.types.{MapType, StringType, StructField, StructType}
+import org.apache.spark.sql.{DataFrame, Row, SparkSession}
+
+import scala.annotation.meta.getter
+import scala.collection.mutable
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/7 11:25
+ * @Description: 筛选出数据的具体变更
+ */
+object ChangeExtractAll {
+
+  //判断两个map在指定key上是否相等,如不等反回不相等字段
+  def getDoubleDataMap(iterable: Iterable[Map[String, String]]): (Map[String, String], Map[String, String]) = {
+    val map = iterable.map(m => (m("change_flag"), m)).toMap
+    (map.getOrElse("0", null), map.getOrElse("1", null))
+  }
+
+  def getHandleClazz(tableName: String, equCols: Seq[String]): {def handle(rowkey: String, oldMap: Map[String, String], newMap: Map[String, String]): (String, String, String, Map[String, String], String, String, String, String, Map[String, String])} = {
+    val clazz = s"com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables.$tableName"
+    val foo = Class.forName(clazz)
+      .getConstructors.head.newInstance(equCols)
+      .asInstanceOf[ {
+      def handle(rowkey: String, oldMap: Map[String, String], newMap: Map[String, String]): (String, String, String, Map[String, String], String, String, String, String, Map[String, String])
+    }]
+    foo
+  }
+
+
+  case class ChangeExtractHandle(s: SparkSession,
+                                 project: String, //表所在工程名
+                                 tableName1: String, //表名(不加前后辍)
+                                 primaryKey: String, //此维度主键
+                                 inc_ds: String, //需要计算的分区
+                                 primaryFields: Seq[String] //主要字段,该字段任意一个不同 则认为发生变化
+                                ) extends LoggingUtils with Logging {
+    @(transient@getter) val spark: SparkSession = s
+
+
+    val target_eci_change_extract = "ads_change_extract_all"
+
+    val updateTimeMapping = Map(
+      "wenshu_detail_combine" -> "update_date", //文书排序时间
+      "company_equity_info_list" -> "reg_date" //文书排序时间
+    )
+    //不同name映射table
+    val tabMapping =
+      Map("company_holder_v2" -> "company_holder"//胜诉案件
+      )
+
+    //转换字段
+    def trans(s: String): String = {
+      var res = s
+      if (tabMapping.contains(s)) {
+        res = tabMapping(s)
+      }
+      res
+    }
+
+    def calc(isCopy: Boolean = true): Unit = {
+      val tableName = trans(tableName1)
+      val cols = primaryFields.filter(!_.equals(primaryKey)).seq
+
+      val ds = inc_ds.replace("-", "")
+
+      val intersectCols = getColumns(s"$project.ads_$tableName").toSet & getColumns(s"$project.inc_ads_$tableName").toSet
+
+      val otherAllCols = intersectCols.filter(!primaryKey.equals(_)).toSeq
+      val all_cols = primaryKey +: otherAllCols :+ "change_flag"
+
+      val lastDs_ads_all = getLastPartitionsOrElse(s"$project.ads_$tableName", "0")
+
+      val handle = ReflectUtils.getClazz[CompanyChangeHandle1](s"com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables.$tableName1", cols)
+      //      val handle = getHandleClazz(tableName, cols)
+
+      var condition = handle.getCondition()
+
+      val update_time = BaseUtil.nowDate()
+
+      var df: DataFrame = null
+      isCopy match {
+        case true => {
+          sql(
+            s"""
+               |SELECT  cid,current_cid as new_cid
+               |FROM    ${project}.inc_ads_company
+               |WHERE   ds = '${getLastPartitionsOrElse(s"$project.inc_ads_company", "0")}'
+               |AND     cid IS NOT NULL
+               |AND     current_cid IS NOT NULL
+               |GROUP BY cid,current_cid
+               |""".stripMargin).createOrReplaceTempView("mapping")
+
+          val cid = getColumns(s"$project.ads_$tableName").filter(f => f.equals("cid") || f.equals("new_cid")).max
+
+          primaryKey.equals("rowkey") match {
+            case true => {
+              df = sql(
+                s"""
+                   |SELECT  t2.$primaryKey,${otherAllCols.map("t2." + _).mkString(",")},'0' as change_flag
+                   |FROM    (
+                   |
+                   |             SELECT  concat_ws('_',coalesce(mm.new_cid,tmp.$cid),split(rowkey, '_')[1]) AS rowkey
+                   |                     ,${intersectCols.diff(Set("rowkey", "cid", "new_cid")).mkString(",")}
+                   |                     ,coalesce(mm.new_cid,tmp.$cid) AS new_cid
+                   |                     ,tmp.$cid as cid
+                   |                     ,c
+                   |             FROM    (
+                   |                         SELECT  a.*
+                   |                                 ,row_number() OVER (PARTITION BY a.${primaryKey} ORDER BY ${updateTimeMapping.getOrElse(tableName, "update_time")} DESC) c
+                   |                         FROM    (
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.ads_$tableName
+                   |                                     WHERE   ds = $lastDs_ads_all  ${condition}
+                   |                                     UNION ALL
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.inc_ads_$tableName
+                   |                                     WHERE   ds > $lastDs_ads_all ${condition}
+                   |                                 ) AS a
+                   |                     ) AS tmp
+                   |             LEFT JOIN mapping mm
+                   |             ON tmp.$cid = mm.cid
+                   |             WHERE   tmp.c = 1
+                   |        ) AS t2
+                   |""".stripMargin)
+            }
+            case false => {
+              df = sql(
+                s"""
+                   |SELECT  t2.$primaryKey,${otherAllCols.map("t2." + _).mkString(",")},'0' as change_flag
+                   |FROM    (
+                   |
+                   |             SELECT  ${intersectCols.diff(Set("rowkey", cid)).mkString(",")}
+                   |                     ,coalesce(mm.new_cid,tmp.$cid) AS $cid
+                   |             FROM    (
+                   |                         SELECT  a.*
+                   |                                 ,row_number() OVER (PARTITION BY a.${primaryKey} ORDER BY ${updateTimeMapping.getOrElse(tableName, "update_time")} DESC) c
+                   |                         FROM    (
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.ads_$tableName
+                   |                                     WHERE   ds = $lastDs_ads_all ${condition}
+                   |                                     UNION ALL
+                   |                                     SELECT  ${intersectCols.mkString(",")}
+                   |                                     FROM    $project.inc_ads_$tableName
+                   |                                     WHERE   ds > $lastDs_ads_all ${condition}
+                   |                                 ) AS a
+                   |                     ) AS tmp
+                   |             LEFT JOIN mapping mm
+                   |             ON tmp.$cid = mm.cid
+                   |             WHERE   tmp.c = 1
+                   |        ) AS t2
+                   |""".stripMargin)
+            }
+          }
+
+
+        }
+        case false => {
+          df = sql(
+            s"""
+
+               |SELECT  t2.$primaryKey,${otherAllCols.map("t2." + _).mkString(",")},'0' as change_flag
+               |FROM
+               |   (
+               |             SELECT  tmp.*
+               |             FROM    (
+               |                         SELECT  a.*
+               |                                 ,row_number() OVER (PARTITION BY a.${primaryKey} ORDER BY ${updateTimeMapping.getOrElse(tableName, "update_time")} DESC) c
+               |                         FROM    (
+               |                                     SELECT  ${intersectCols.mkString(",")}
+               |                                     FROM    $project.ads_$tableName
+               |                                     WHERE   ds = $lastDs_ads_all ${condition}
+               |                                     UNION ALL
+               |                                     SELECT  ${intersectCols.mkString(",")}
+               |                                     FROM    $project.inc_ads_$tableName
+               |                                     WHERE   ds > $lastDs_ads_all ${condition}
+               |                                 ) AS a
+               |                     ) AS tmp
+               |             WHERE   tmp.c = 1
+               |        ) AS t2
+               |""".stripMargin)
+        }
+      }
+
+
+      val rdd =
+        df.select(all_cols.map(column => col(column).cast("string")): _*)
+          .rdd.map(r => {
+          (r.getAs[String](primaryKey), all_cols.map(f => (f, r.getAs[String](f))).toMap)
+        }).groupByKey()
+          .map(x => {
+            val rowkey = x._1
+            val map_list = x._2
+            //          try {
+            //            if (map_list.size == 1) {
+            //              val res = handle.handle(rowkey, null, map_list.head)
+            //              Row(res._1, res._2, tableName, res._3, res._4, res._5, res._6, res._7, res._8, update_time, res._9)
+            //            } else {
+            //              if (map_list.size > 2) {
+            //                logInfo("list.size > 2! rowkey:" + rowkey)
+            //              }
+            val m = getDoubleDataMap(map_list)
+
+            val new_map = m._1
+            val old_map = m._2
+            if (new_map == null && old_map == null) {
+              null
+            } else if (old_map == null) {
+              val res = handle.handle(rowkey, null, map_list.head)
+              if (res == null) {
+                null
+              } else {
+                Row(res._1, res._2, tableName, res._3, res._4, res._5, res._6, res._7, res._8, update_time, res._9)
+              }
+            } else if (new_map == null) {
+              null
+            } else {
+              val res = handle.handle(rowkey, old_map, new_map)
+              if (res == null) {
+                null
+              } else {
+                Row(res._1, res._2, tableName, res._3, res._4, res._5, res._6, res._7, res._8, update_time, res._9)
+              }
+            }
+            //            }
+            /* } catch {
+               case e: Exception => {
+                 logError(s"xjk rowkey:$rowkey msg:${e.getMessage} equCols:$cols")
+                 logError(e.getMessage, e)
+                 println(s"xjk rowkey:$rowkey msg:${e.getMessage} equCols:$cols")
+               }
+                 null
+             }*/
+          }).filter(_ != null)
+
+      val schema = StructType(Array(
+        StructField("rowkey", StringType), //表数据主建
+        StructField("cid", StringType), //公司id
+        StructField("table_name", StringType), //表名
+        StructField("type", StringType), // 变更类型 insert update
+        StructField("data", MapType(StringType, StringType)), //变更后数据
+        StructField("fields", StringType), //如果是更新 则显示更新字段
+        StructField("title", StringType), // 动态数据展示 ps. 新增某土地公示
+        StructField("label", StringType), // 1.一般变更,2.风险变更
+        StructField("biz_time", StringType), //业务时间
+        StructField("update_time", StringType), //处理时间
+        StructField("old_data", MapType(StringType, StringType)) //变更前数据
+      ))
+
+      spark.createDataFrame(rdd, schema)
+        .createOrReplaceTempView(s"tmp_change_all_view$tableName1") //
+
+      sql(
+        s"""
+           |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${project}.$target_eci_change_extract PARTITION(ds='$ds',tn='$tableName1')
+           |SELECT *
+           |FROM
+           |    tmp_change_all_view$tableName1
+           |""".stripMargin)
+    }
+  }
+
+
+  // winhc_eci_dev company_tm rowkey 20200717 status_new
+  // winhc_eci_dev company_patent_list rowkey 20200717 lprs
+  // winhc_eci_dev company_certificate rowkey 20200707 type
+  // winhc_eci_dev company_copyright_works_list rowkey 20200717 type
+  // winhc_eci_dev company_copyright_reg_list rowkey 20200717 version
+  // winhc_eci_dev company_employment rowkey 20200630 source
+
+  // winhc_eci_dev company_land_publicity rowkey 20200717 title,location,use_for
+  // winhc_eci_dev company_land_announcement rowkey 20200717 e_number,project_name
+
+  // winhc_eci_dev company_bid_list rowkey 20200717 title
+  // winhc_eci_dev company_land_transfer rowkey 20200717 num,location
+  // winhc_eci_dev company_abnormal_info rowkey 20200717 remove_reason
+
+  // winhc_eci_dev company_own_tax rowkey 20200729 tax_balance,tax_category,tax_num
+
+
+  //winhc_eci_dev company_equity_info id 20200730 reg_number false
+
+
+  // winhc_eci_dev company cid 20200630 legal_entity_id,reg_location,business_scope,reg_status,reg_capital,emails,phones
+
+
+  private val startArgs = Seq(
+//    Args(tableName = "company_tm", primaryFields = "status_new")
+//    , Args(tableName = "company_patent_list", primaryFields = "lprs")
+     Args(tableName = "company_land_announcement", primaryFields = "e_number,project_name")
+    , Args(tableName = "company_bid_list", primaryFields = "title")
+    , Args(tableName = "company_zxr_list", primaryFields = "status")
+//    , Args(tableName = "company_copyright_works_list", primaryFields = "type")
+//    , Args(tableName = "company_copyright_reg_list", primaryFields = "version")
+    , Args(tableName = "company_land_mortgage", primaryFields = "land_num,source_url")
+    , Args(tableName = "bankruptcy_open_case", primaryFields = "case_no", isCopy = false) //破产重整
+    , Args(tableName = "company_mortgage_info", primaryFields = "reg_num") //动产抵押
+    , Args(tableName = "company_court_open_announcement_list", primaryFields = "case_reason") //开庭公告
+    , Args(tableName = "company_zxr_restrict", primaryFields = "status") //限制消费令,发现最新状态
+
+
+    , Args(tableName = "wenshu_detail_combine", primaryFields = "cname") //文书
+
+
+
+    , Args(tableName = "company_equity_info_list", primaryFields = "reg_number")
+    //公司名称,法人ID:人标识或公司标识,公司类型,注册地址,营业期限终止日期,经营范围,登记机关,企业状态                 ,注册资本,注销日期,注销原因
+    , Args(tableName = "company_finance", primaryFields = "round")
+    , Args(tableName = "company_dishonest_info", primaryFields = "status")
+//    , Args(tableName = "company_holder", primaryFields = "amount")
+//    , Args(tableName = "company_holder_v2", primaryFields = "deleted")
+    , Args(tableName = "increase_registered_capital_info", primaryFields = "change_time")
+    , Args(tableName = "auction_tracking_list", primaryFields = "auction_items_id")
+
+
+    , Args(tableName = "zxr_evaluate", primaryFields = "name,case_no,asset_name")
+    , Args(tableName = "zxr_evaluate_results", primaryFields = "name,case_no,asset_name")
+
+  )
+
+
+  private case class Args(project: String = "winhc_eci_dev"
+                          , tableName: String
+                          , primaryKey: String = "rowkey"
+                          , primaryFields: String
+                          , isCopy: Boolean = true)
+
+
+  def main(args: Array[String]): Unit = {
+    val Array(tableName, inc_ds) = args
+
+    val config = EsConfig.getEsConfigMap ++ mutable.Map(
+      "spark.hadoop.odps.project.name" -> "winhc_eci_dev",
+      "spark.hadoop.odps.spark.local.partition.amt" -> "100"
+    )
+    val spark = SparkUtils.InitEnv("MonitorChangeAll", config)
+
+
+    var start = startArgs
+    if (!tableName.equals("all")) {
+      val set = tableName.split(",").toSet
+      start = start.filter(a => set.contains(a.tableName))
+    }
+
+    val a = start.map(e => (e.tableName, () => {
+      ChangeExtractHandle(spark, e.project, e.tableName, e.primaryKey, inc_ds, e.primaryFields.split(",")).calc(e.isCopy)
+      true
+    }))
+
+    AsyncExtract.startAndWait(spark, a)
+
+    /* if (tableName.equals("all")) {
+       startArgs.foreach(e => {
+         ChangeExtractHandle(spark, e.project, e.tableName, e.primaryKey, inc_ds, e.primaryFields.split(",")).calc(e.isCopy)
+       })
+     } else {
+       val set = tableName.split(",").toSet
+       startArgs.filter(a => set.contains(a.tableName)).foreach(e => {
+         ChangeExtractHandle(spark, e.project, e.tableName, e.primaryKey, inc_ds, e.primaryFields.split(",")).calc(e.isCopy)
+       })
+     }*/
+
+    spark.stop()
+  }
+
+}

+ 79 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/CompanyChangeHandle1.scala

@@ -0,0 +1,79 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change
+
+import com.winhc.bigdata.spark.utils.BaseUtil.cleanup
+import org.apache.commons.lang3.StringUtils
+import org.apache.spark.internal.Logging
+
+import scala.annotation.meta.{getter, setter}
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+trait CompanyChangeHandle1 extends Serializable with Logging {
+  @getter
+  @setter
+  protected val equCols: Seq[String]
+
+  /**
+   *
+   * @param rowkey
+   * @param oldMap
+   * @param newMap
+   * @return rowkey,cid,类型【insert or update】,新数据,更新字段,更新标题,变更标签【1.一般变更,2.风险变更 ...】,业务时间
+   */
+  def handle(rowkey: String, oldMap: Map[String, String], newMap: Map[String, String]): (String, String, String, Map[String, String], String, String, String, String, Map[String, String]) = {
+    if(getBizTime(newMap)==null){
+      return null
+    }
+    if (oldMap == null) {
+      (rowkey, getCid(rowkey, newMap), "insert", newMap, null, getInsertTitle(newMap), getLabel(oldMap, newMap), getBizTime(newMap), null)
+    } else {
+      val t = getEquAndFields(oldMap, newMap)
+      if (t._1) {
+        null
+      } else {
+        (rowkey, getCid(rowkey, newMap), "update", newMap,
+          t._2
+          , getUpdateTitle(newMap), getLabel(oldMap, newMap), getBizTime(newMap), oldMap)
+      }
+    }
+  }
+
+  def getCid(rowkey: String, newMap: Map[String, String]): String = rowkey.split("_")(0)
+
+  def getUpdateTitle(newMap: Map[String, String]): String
+
+  def getInsertTitle(newMap: Map[String, String]): String
+
+  def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String
+
+  def getBizTime(newMap: Map[String, String]): String
+
+  def getEquAndFields(oldMap: Map[String, String], newMap: Map[String, String]): (Boolean, String) = {
+    val tmp = equCols.map(f => {
+      (f, cleanup(newMap(f)).equals(cleanup(oldMap(f))))
+    })
+    val eq = tmp.map(_._2).reduce((a1, a2) => a1 && a2)
+    if (eq) {
+      (true, null)
+    } else {
+      (eq, tmp.filter(!_._2).map(_._1).mkString(","))
+    }
+  }
+
+
+  protected def getValueOrNull(value: String, callBack: String): String = {
+    if (StringUtils.isNotBlank(value)) {
+      callBack
+    } else {
+      null
+    }
+  }
+
+  def getCondition():String ={
+    ""
+  }
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/auction_tracking_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/11
+ * @Description:司法拍卖
+ */
+
+case class auction_tracking_list(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+  override def getUpdateTitle(newMap: Map[String, String]): String = s"司法拍卖发生变更"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = s"新增司法拍卖"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "司法拍卖", Array("auction_items_id"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("end_time"), newMap("update_time"))
+  }
+
+
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/bankruptcy_open_case.scala

@@ -0,0 +1,32 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/5
+ * @Description:
+ */
+
+
+//破产公告
+
+case class bankruptcy_open_case(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = {
+    val str = ChangeExtractUtils.getTags(newMap, "破产重整", Array("case_no", "case_type", "agency_court", "applicant", "respondent", "public_date"))
+    str
+  }
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("public_date"),newMap("update_time"))
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"${newMap("case_no")}破产重整发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"新增${newMap("case_no")}破产重整")
+
+
+
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_bid_list.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/7/31
+ * @Description:
+ */
+
+
+//招投标
+
+case class company_bid_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"${newMap("title")}招投标信息发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"新增${newMap("title")}招投标信息")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "招投标", Array("publish_time", "title", "purchaser", "province", "abs"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("publish_time"),newMap("update_time"))
+
+  override def getCondition(): String = {
+
+    " AND publish_time >= '2020-01-01'"
+  }
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_reg_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//软件著作权
+
+case class company_copyright_reg_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("full_name"), s"${newMap("full_name")}软件著作权发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("full_name"), s"新增${newMap("full_name")}软件著作权")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("软件著作权", newMap("full_name"), newMap("reg_time"), newMap("reg_num"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("reg_time")
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_copyright_works_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//作品著作权
+
+case class company_copyright_works_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"${newMap("name")}作品著作权发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增${newMap("name")}作品著作权")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("作品著作权", newMap("name"), newMap("reg_time"), newMap("reg_num"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("reg_time")
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_court_open_announcement_list.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/11
+ * @Description:开庭公告
+ */
+
+case class company_court_open_announcement_list(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"${newMap("case_no")}开庭公告发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"新增${newMap("case_no")}开庭公告")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "开庭公告", Array("case_no", "start_date"))
+
+  override def getBizTime(newMap: Map[String, String]): String ={
+    DateUtils.getBizDate(newMap("start_date"), newMap("update_time"))
+  }
+
+  override def getCondition(): String = {
+
+    " AND start_date >= '2020-01-01'"
+  }
+
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_dishonest_info.scala

@@ -0,0 +1,25 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/8/12 18:50
+ * @Description: 企业失信被执
+ */
+case class company_dishonest_info(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "企业失信被执发生变化"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = s"新增1条企业失信信息:${newMap.getOrElse("court","")}"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "企业失信被执", Array("name", "case_no", "court", "gist_dd", "gist_unit"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("pub_date"),newMap("reg_time"),newMap("appro_time"),newMap("update_time"))
+
+  override def getCondition(): String = {
+
+    " AND status =1"
+  }
+}

+ 19 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_equity_info_list.scala

@@ -0,0 +1,19 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+//股权出质打平
+case class company_equity_info_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+
+  override def getCid(rowkey: String, newMap: Map[String, String]): String = newMap("cid")
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = s"股权出质信息发生变更"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = s"股权出质信息发生变更"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String  = ChangeExtractUtils.getTags(newMap, "股权出质", Array("pledgor", "pledgee", "cid", "pledgor_id", "pledgee_type", "pledgee_id", "pledgor_type"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("reg_date")
+}

+ 20 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_finance.scala

@@ -0,0 +1,20 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/8/12 17:42
+ * @Description: 融资历史
+ */
+case class company_finance(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "融资历史发生变化"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = s"获得了${newMap.getOrElse("round", "")}融资,由${newMap.getOrElse("inverstors", "")}投资"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "融资历史", Array("company_name", "finance_time", "money", "round", "inverstors"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("report_date")
+}

+ 20 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder.scala

@@ -0,0 +1,20 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/8/19 14:10
+ * @Description: 股东
+ */
+case class company_holder(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "股东发生更新"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = "新增股东"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "股东信息", Array("holder_id", "holder_type", "amount", "capital", "capital_actual"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("update_time")
+}

+ 20 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_holder_v2.scala

@@ -0,0 +1,20 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: π
+ * @Date: 2020/12/10
+ * @Description: 股东新增-移除
+ */
+case class company_holder_v2(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = "股东移除"
+
+  override def getInsertTitle(newMap: Map[String, String]): String = "新增股东"
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "股东信息", Array("holder_id", "holder_type", "amount", "capital", "capital_actual"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("update_time")
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_announcement.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/4
+ * @Description:
+ */
+
+
+//购地信息
+
+case class company_land_announcement(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("project_name"), s"${newMap("project_name")}购地信息发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("project_name"), s"新增${newMap("project_name")}购地信息")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "购地信息", Array("project_name", "project_loc", "area", "tran_price", "e_number"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("actual_start_time"),newMap("update_time"))
+
+  override def getCondition(): String = {
+
+    " AND contract_date >= '2018-01-01'"
+  }
+}

+ 31 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_land_mortgage.scala

@@ -0,0 +1,31 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/4
+ * @Description:
+ */
+
+
+//土地抵押
+
+case class company_land_mortgage(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("land_num"), s"${newMap("land_num")}土地抵押发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("land_num"), s"新增${newMap("land_num")}土地抵押")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "土地抵押", Array("land_mark", "land_num", "land_aministrative_area", "land_loc", "land_area", "use_right_num", "use_for", "area", "evaluate_amount", "mortgage_amount", "start_date", "end_date"))
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("start_date"),newMap("update_time"))
+
+
+  override def getCondition(): String = {
+
+    " AND start_date >= '2018-01-01'"
+  }
+}

+ 42 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_mortgage_info.scala

@@ -0,0 +1,42 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/10
+ * @Description:
+ */
+
+
+//动产抵押
+
+case class company_mortgage_info(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = {
+    val str = ChangeExtractUtils.getTags(newMap, "动产抵押", Array("reg_num", "reg_date", "publish_date", "amount", "overview_amount"))
+    str
+  }
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    //若公示日期为null则取登记日期
+    if(newMap("publish_date")==null) {
+      if(newMap("reg_date")==null){
+        newMap("update_time")
+      }
+      else {
+        newMap("reg_date")
+      }
+    }
+    else {
+      newMap("publish_date")
+    }
+  }
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("reg_num"), s"${newMap("reg_num")}动产抵押发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("reg_num"), s"新增${newMap("reg_num")}动产抵押")
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_patent_list.scala

@@ -0,0 +1,25 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//专利
+
+case class company_patent_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"${newMap("title")}专利发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("title"), s"新增${newMap("title")}专利")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("专利", newMap("title"), newMap("app_date"), newMap("app_number"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("app_date")
+}

+ 27 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_tm.scala

@@ -0,0 +1,27 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: XuJiakai
+ * @Date: 2020/7/9 16:44
+ * @Description:
+ */
+
+
+//商标
+
+case class company_tm(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("tm_name"), s"${newMap("tm_name")}商标发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("tm_name"), s"新增${newMap("tm_name")}商标")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.get_ip_tags("商标", newMap("tm_name"), newMap("app_date"), newMap("reg_no"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("app_date"), newMap("update_time"))
+  }
+}

+ 21 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_list.scala

@@ -0,0 +1,21 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.ChangeExtractUtils
+
+//被执行人
+case class company_zxr_list(equCols: Seq[String]) extends CompanyChangeHandle1 {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"${newMap("cname")}被执行人发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"新增${newMap("cname")}被执行人")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap,"成为被执行人", Array("case_create_time", "case_no", "exec_money"))
+
+  override def getBizTime(newMap: Map[String, String]): String = newMap("case_create_time")
+
+  override def getCondition(): String = {
+
+    " AND status =1"
+  }
+}

+ 30 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/company_zxr_restrict.scala

@@ -0,0 +1,30 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: Yan Yongnian
+ * @Date: 2020/8/14
+ * @Description:限制消费令
+ */
+//限制消费令
+case class company_zxr_restrict(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable {
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = {
+    val str = ChangeExtractUtils.getTags(newMap, "被限制高消费", Array("name", "identity_num", "court_name", "case_create_time", "case_no"))
+    str
+  }
+
+  override def getBizTime(newMap: Map[String, String]): String = DateUtils.getNotNullStr(newMap("case_create_time"),newMap("update_time"))
+
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"${newMap("case_no")}限制消费令发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("case_no"), s"新增${newMap("case_no")}限制消费令")
+
+  override def getCondition(): String = {
+
+    " AND status =1"
+  }
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/increase_registered_capital_info.scala

@@ -0,0 +1,23 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/11
+ * @Description:增资记录
+ */
+
+case class increase_registered_capital_info(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("change_item"), s"注册资本发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("change_item"), s"增资记录")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "增资记录", Array("change_item","change_time"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("change_time"), newMap("update_time"))
+  }
+}

+ 28 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/wenshu_detail_combine.scala

@@ -0,0 +1,28 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: π
+ * @Date: 2020/8/18
+ * @Description:裁判文书
+ */
+
+case class wenshu_detail_combine(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"裁判文书")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("cname"), s"裁判文书")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "裁判文书", Array("case_no", "cname"))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("judge_date"), newMap("update_date"))
+  }
+
+  override def getCondition(): String = {
+
+    " AND crawl_date >= '2020-01-01'"
+  }
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate.scala

@@ -0,0 +1,24 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: lyb
+ * @Date: 2021-01-07
+ * @Description:询价
+ */
+
+case class zxr_evaluate(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"询价评估发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增询价评估")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "询价评估", Array("case_no","asset_type", "asset_name","insert_time" ))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("insert_time"), newMap("update_time"))
+  }
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/change/tables/zxr_evaluate_results.scala

@@ -0,0 +1,24 @@
+
+package com.winhc.bigdata.spark.jobs.monitor.alldata.change.tables
+
+import com.winhc.bigdata.spark.jobs.chance.CompanyChangeHandle
+import com.winhc.bigdata.spark.jobs.monitor.alldata.change.CompanyChangeHandle1
+import com.winhc.bigdata.spark.utils.{ChangeExtractUtils, DateUtils}
+
+/**
+ * @Author: lyb
+ * @Date: 2021-01-07
+ * @Description:询价
+ */
+
+case class zxr_evaluate_results(equCols: Seq[String]) extends CompanyChangeHandle1 with Serializable  {
+  override def getUpdateTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"询价评估结果发生变更")
+
+  override def getInsertTitle(newMap: Map[String, String]): String = getValueOrNull(newMap("name"), s"新增询价评估结果")
+
+  override def getLabel(oldMap: Map[String, String], newMap: Map[String, String]): String = ChangeExtractUtils.getTags(newMap, "询价评估结果", Array("case_no","asset_type", "asset_name","publish_time", "money" ))
+
+  override def getBizTime(newMap: Map[String, String]): String = {
+    DateUtils.getBizDate(newMap("publish_time"), newMap("update_time"))
+  }
+}

+ 320 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/CompanyMonitor.scala

@@ -0,0 +1,320 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor
+
+import java.util.Date
+
+import com.winhc.bigdata.spark.config.EsConfig
+import com.winhc.bigdata.spark.jobs.dynamic.CompanyDynamicHandleUtils
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+import com.winhc.bigdata.spark.udf.BaseFunc
+import com.winhc.bigdata.spark.utils.BaseUtil.isWindows
+import com.winhc.bigdata.spark.utils.ReflectUtils.getClazz
+import com.winhc.bigdata.spark.utils.{AsyncExtract, LoggingUtils, SparkUtils}
+import org.apache.commons.lang3.time.DateFormatUtils
+import org.apache.spark.internal.Logging
+import org.apache.spark.sql.types.StringType
+import org.apache.spark.sql.{Row, SparkSession}
+
+import scala.annotation.meta.getter
+import scala.collection.immutable.ListMap
+import scala.collection.mutable
+
+/**
+ * @Author: π
+ * @Date: 2020/12/8
+ * @Description: 企业财产监控
+ */
+object CompanyMonitor {
+  val env = "dev"
+  val targetTab = "ads_company_monitor"
+
+  case class CompanyMonitorUtil(s: SparkSession,
+                                project: String, //表所在工程名
+                                ds: String //此维度主键
+                               ) extends LoggingUtils with Logging with BaseFunc {
+    @(transient@getter) val spark: SparkSession = s
+
+
+    def init(): Unit = {
+      sql(
+        s"""
+           |CREATE TABLE IF NOT EXISTS ${getEnvProjectName(env, project)}.$targetTab
+           |(
+           |    id              STRING COMMENT '唯一标示',
+           |    cid             STRING COMMENT '公司id',
+           |    cname           STRING COMMENT '公司name',
+           |    info_type       STRING COMMENT '变更分类,大类',
+           |    flow_type       STRING COMMENT '财产流向',
+           |    rta_desc        STRING COMMENT '变更信息描述,变更标题',
+           |    change_time     STRING COMMENT '变更时间',
+           |    biz_id          STRING COMMENT '业务id,数据行id',
+           |    info_risk_level STRING COMMENT '变更风险等级',
+           |    amt             STRING COMMENT '金额',
+           |    create_time     STRING COMMENT '创建时间'
+           |)
+           |COMMENT '企业财务监控输出表'
+           |PARTITIONED BY
+           |(
+           |    ds              STRING COMMENT '分区',
+           |    tn              STRING COMMENT '表名'
+           |)
+           |LIFECYCLE 30
+           |""".stripMargin)
+    }
+
+    val tabMapping =
+      Map("wenshu_detail_combine_v2" -> "wenshu_detail_combine" //胜诉案件
+        , "company_land_mortgage_v2" -> "company_land_mortgage" //土地抵押权人
+      )
+
+    //转换字段
+    def trans(s: String): String = {
+      var res = s
+      if (tabMapping.contains(s)) {
+        res = tabMapping(s)
+      }
+      res
+    }
+
+    //表名(不加前后辍)
+    def calc(tableName: String
+             , bName: Int = 0 //是否补充cname字段
+            ): Unit = {
+      val handle = getClazz[CompanyMonitorHandle](s"com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables.$tableName")
+
+      val types = handle.org_type()
+      val conditional = handle.get_conditional_filter()
+      val tn = trans(tableName)
+
+      val rdd = sql(
+        bName match {
+          //默认:无需补全cname字段
+          case 0 =>
+            s"""
+               |SELECT  *,null AS cname
+               |FROM    ${project}.ads_change_extract_all
+               |WHERE   ds = '$ds'
+               |AND     tn = '$tn'
+               |AND     TYPE in (${types.map("'" + _ + "'").mkString(",")})
+               |$conditional
+               |""".stripMargin
+          //需根据cid补全cname字段数据
+          case 1 =>
+            s"""
+               |SELECT A.*,B.cname AS cname
+               |FROM(
+               |  SELECT  *
+               |  FROM    ${project}.ads_change_extract_all
+               |  WHERE   ds = '$ds'
+               |  AND     tn = '$tn'
+               |  AND     TYPE in (${types.map("'" + _ + "'").mkString(",")})
+               |  $conditional
+               |) AS A
+               |LEFT JOIN (
+               |    SELECT cid,cname FROM  $project.base_company_mapping
+               |    WHERE ds = '${getLastPartitionsOrElse(project + ".base_company_mapping", "0")}'
+               |) AS B
+               |ON A.cid = B.cid
+               |""".stripMargin
+
+        })
+        .rdd.flatMap(r => {
+        val rowkey = r.getAs[String]("rowkey")
+        val cid = r.getAs[String]("cid")
+        val new_data = r.getAs[Map[String, String]]("data")
+        val old_data = r.getAs[Map[String, String]]("old_data")
+        val biz_date = r.getAs[String]("biz_date")
+        val fields = r.getAs[String]("fields")
+        val cname = r.getAs[String]("cname")
+
+        var createTime = ""
+        if(new_data.contains("update_time")){
+          createTime = new_data("update_time")
+        }else if(new_data.contains("update_date")){
+          createTime = new_data("update_date")
+        }
+
+        if(createTime == null || createTime.trim.equals("") || createTime.equalsIgnoreCase("null")){
+          if(new_data.contains("create_time")){
+            createTime = new_data("create_time")
+          }else if(new_data.contains("crawl_date")){
+            createTime = new_data("crawl_date")
+          }
+        }
+
+        if(createTime == null || createTime.trim.equals("") || createTime.equalsIgnoreCase("null")){
+          createTime = biz_date
+        }
+        if(createTime == null || createTime.trim.equals("")|| createTime.equalsIgnoreCase("null")){
+          None
+        }
+
+
+
+        if (biz_date == null)
+          None
+        val result = handle.handle(rowkey, biz_date, cid, if (fields == null) null else fields.split(","), old_data, new_data, cname)
+        if (result == null) {
+          None
+        }
+        else {
+          result.map(res => Row(CompanyDynamicHandleUtils.getDynamicId(res._1, res._5, res._7, res._6),
+            res._1, res._2, res._3, res._4,
+            res._5.replaceAll("null", ""), res._6, res._7, res._8, res._9,
+            createTime, res._10))
+        }
+      })
+
+      val schema = getSchema(ListMap(
+        "id" -> StringType
+        , "cid" -> StringType
+        , "cname" -> StringType
+        , "table_type" -> StringType
+        , "flow_type" -> StringType
+        , "rta_desc" -> StringType
+        , "change_time" -> StringType
+        , "biz_id" -> StringType
+        , "info_risk_level" -> StringType
+        , "type" -> StringType
+        , "create_time" -> StringType
+        , "amt" -> StringType
+      ))
+      spark.createDataFrame(rdd, schema)
+        .createOrReplaceTempView(s"company_monitor_tmp1_$tableName")
+
+      unescapeHtml4()
+
+      if(tableName.equals("company_zxr_restrict") || tableName.equals("company_dishonest_info")){
+        var dataFlag = 3
+        if(tableName.equals("company_zxr_restrict")){
+          dataFlag = 5
+        }
+        sql(
+          s"""
+             |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
+             |SELECT  id
+             |        , cid
+             |        , cname
+             |        , table_type
+             |        , flow_type
+             |        , unescapeHtml4(rta_desc) rta_desc
+             |        , change_time
+             |        , biz_id
+             |        , info_risk_level
+             |        , type
+             |        , create_time
+             |        , if(case_amt is null, 0, case_amt/10000)
+             |FROM
+             |    (
+             |     select c.*, d.case_amt,  ROW_NUMBER() OVER (PARTITION BY c.id  ORDER BY d.case_amt DESC)  num from
+             |          ( SELECT a.*, b.judicase_id FROM  company_monitor_tmp1_$tableName a
+             |            LEFT JOIN winhc_eci_dev.ads_judicial_case_relation_replace_cids b  ON a.biz_id = b.detail_id where b.ds = '${getLastPartitionsOrElse("winhc_eci_dev.ads_judicial_case_relation_replace_cids", "0")}' and b.flag = '$dataFlag'
+             |          ) c LEFT JOIN winhc_eci.ads_judicial_case_relation_r1 d ON c.judicase_id = d.judicase_id
+             |    )
+             |WHERE id IS NOT NULL
+             |AND   to_timestamp(change_time) <= now() and num = 1
+             |""".stripMargin)
+      }else{
+        sql(
+          s"""
+             |INSERT ${if (isWindows) "INTO" else "OVERWRITE"} TABLE ${getEnvProjectName(env, project)}.$targetTab PARTITION(ds='$ds',tn='$tableName')
+             |SELECT  id
+             |        , cid
+             |        , cname
+             |        , table_type
+             |        , flow_type
+             |        , unescapeHtml4(rta_desc) rta_desc
+             |        , change_time
+             |        , biz_id
+             |        , info_risk_level
+             |        , type
+             |        , create_time
+             |        , amt
+             |FROM
+             |    company_monitor_tmp1_$tableName
+             |WHERE id IS NOT NULL
+             |AND   to_timestamp(change_time) <= now()
+             |""".stripMargin)
+      }
+
+    }
+  }
+
+  private val startArgs = Seq(
+    Args(tableName = "wenshu_detail_combine_v2", bName = 1)
+    , Args(tableName = "company_dishonest_info", bName = 1)
+    , Args(tableName = "company_zxr_list", bName = 1)
+    , Args(tableName = "company_zxr_restrict", bName = 1)
+    , Args(tableName = "company_court_open_announcement_list", bName = 1)
+    , Args(tableName = "wenshu_detail_combine", bName = 1)
+    , Args(tableName = "company_equity_info_list", bName = 1)
+    , Args(tableName = "company_land_mortgage", bName = 1)
+    , Args(tableName = "company_land_announcement", bName = 1)
+    , Args(tableName = "company_finance", bName = 1)
+    , Args(tableName = "bankruptcy_open_case", bName = 1)
+    , Args(tableName = "company_bid_list", bName = 1)
+    , Args(tableName = "company_mortgage_info", bName = 1)
+    , Args(tableName = "company_tm", bName = 1)
+    , Args(tableName = "company_patent_list", bName = 1)
+    , Args(tableName = "company_copyright_reg_list", bName = 1)
+    , Args(tableName = "company_copyright_works_list", bName = 1)
+    , Args(tableName = "company_holder_v2", bName = 1)
+    //, Args(tableName = "company", bName = 1)
+    , Args(tableName = "company_land_mortgage_v2", bName = 1)
+    , Args(tableName = "auction_tracking_list", bName = 1)
+    , Args(tableName = "increase_registered_capital_info", bName = 1)
+    , Args(tableName = "zxr_evaluate", bName = 1)
+    , Args(tableName = "zxr_evaluate_results", bName = 1)
+
+  )
+
+  private case class Args(project: String = "winhc_eci_dev"
+                          , tableName: String
+                          , bName: Int = 1
+                          , aggs: Int = 0)
+
+  def main(args: Array[String]): Unit = {
+
+
+    if (args.length != 3) {
+      println(
+        s"""
+           |Please enter the legal parameters !
+           |<project> <tableNames> <ds>
+           |""".stripMargin)
+      sys.exit(-99)
+    }
+
+    val Array(project, tableNames, ds) = args
+
+    println(
+      s"""
+         |project: $project
+         |tableNames: $tableNames
+         |ds: $ds
+         |""".stripMargin)
+
+    val config = EsConfig.getEsConfigMap ++ mutable.Map(
+      "spark.hadoop.odps.project.name" -> project,
+      "spark.hadoop.odps.spark.local.partition.amt" -> "1000"
+    )
+    val spark = SparkUtils.InitEnv("CompanyMonitor", config)
+    val cd = CompanyMonitorUtil(spark, project, ds)
+    //cd.init()
+
+    var start = startArgs
+    if (!tableNames.equals("all")) {
+      val set = tableNames.split(",").toSet
+      start = start.filter(a => set.contains(a.tableName))
+    }
+
+    val a = start.map(e => (e.tableName, () => {
+      e.aggs match {
+        case _ => cd.calc(e.tableName, e.bName) //通用处理
+      }
+      true
+    }))
+
+    AsyncExtract.startAndWait(spark, a)
+    spark.stop()
+  }
+}

+ 37 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/auction_tracking_list.scala

@@ -0,0 +1,37 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/12/14
+ * @Description: 司法拍卖
+ */
+case class auction_tracking_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""标题:${new_map("auction_title")}
+       |起拍价:${new_map("initial_price")}
+       |拍卖时间:${new_map("end_time")}""".stripMargin
+
+
+  //override def org_type(): Seq[String] = Seq("insert","update")
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("query_price")
+    if (amtstr == null){
+      "0"
+    }else {
+      amtstr = processMoney(amtstr)
+      ((amtstr.toDouble)/ 10000).formatted("%.2f")
+    }
+  }
+
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/bankruptcy_open_case.scala

@@ -0,0 +1,23 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 破产公告
+ */
+case class bankruptcy_open_case() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |申请人:${new_map("applicant")}
+       |公开日期:${new_map("public_date")}""".stripMargin
+
+
+}

+ 77 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company.scala

@@ -0,0 +1,77 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+import com.winhc.bigdata.spark.utils.RegCapitalAmount
+import org.apache.commons.lang3.StringUtils
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 工商数据
+ */
+case class company() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""注册资本:${new_map("reg_capital")}
+       |变更时间:${new_map("update_time")}""".stripMargin
+
+  //注册资本上升
+  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
+    if (!compareAmount(old_map, new_map)) {
+      return Seq.empty
+    }
+    try {
+      Seq((cid
+        , cname
+        , get_table_type()
+        , get_flow_type()
+        , get_rta_desc(old_map, new_map)
+        , get_change_time(bizDate, new_map)
+        , get_biz_id(rowkey, new_map)
+        , get_info_risk_level(old_map, new_map)
+        , get_type()
+        , getMoney(new_map)
+      ))
+    } catch {
+      case e: Exception => {
+        logError(e.getMessage, e)
+      }
+        Seq.empty
+    }
+  }
+
+  /**
+   * 来源表的变更类型,insert or update
+   *
+   * @return
+   */
+  override def org_type(): Seq[String] = {
+    Seq("update")
+  }
+
+  /**
+   * 注册资本比较
+   */
+  def compareAmount(old_map: Map[String, String], new_map: Map[String, String]): Boolean = {
+    if (old_map == null || new_map == null) {
+      return false
+    }
+    val old = old_map("reg_capital")
+    val now = new_map("reg_capital")
+    if (StringUtils.isBlank(old) || StringUtils.isBlank(now)) {
+      return false
+    }
+    val old_num = RegCapitalAmount.getAmount(old)
+    val now_num = RegCapitalAmount.getAmount(now)
+    if (StringUtils.isBlank(old_num) || StringUtils.isBlank(now_num)) {
+      return false
+    }
+    old_num.toDouble < now_num.toDouble
+  }
+
+}

+ 21 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_bid_list.scala

@@ -0,0 +1,21 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 招投标
+ */
+case class company_bid_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""标题:${new_map("title")}""".stripMargin
+
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_reg_list.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 软件著作权
+ */
+case class company_copyright_reg_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""登记号:${new_map("reg_num")}
+       |软件名称:${new_map("full_name")}
+       |软件简称:${new_map("simple_name")}""".stripMargin
+
+
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_copyright_works_list.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 著作权
+ */
+case class company_copyright_works_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""作品名称:${new_map("name")}
+       |作品类别:${new_map("type")}
+       |登记日期:${new_map("reg_time")}""".stripMargin
+
+
+
+}

+ 29 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_court_open_announcement_list.scala

@@ -0,0 +1,29 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 有恢复执行案件即将开庭
+ */
+case class company_court_open_announcement_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String = {
+    s"""上诉人/原告:${new_map("plaintiff")}
+       |被上诉人/被告:${new_map("defendant")}
+       |案号:${new_map("case_no")}
+       |案由:${new_map("case_reason")}""".stripMargin
+  }
+
+  override def get_conditional_filter(): String = {
+    "AND  data['case_no'] like concat('%','恢','%')"
+  }
+
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_dishonest_info.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 企业失信
+ */
+case class company_dishonest_info() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |履行情况:${new_map("performance")}
+       |发布日期:${new_map("pub_date")}
+       |失信行为:${new_map("action_content")}""".stripMargin
+
+
+}

+ 34 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_equity_info_list.scala

@@ -0,0 +1,34 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 股权出质人
+ */
+case class company_equity_info_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""股权出质登记日期:${new_map("reg_date")}
+       |质权人:${new_map("pledgee")}
+       |出质股权标的企业:${new_map("target")}
+       |出质股权数额:${new_map("equity_amount")}""".stripMargin
+
+  override def get_conditional_filter(): String = {
+    "AND data['type'] = '1'"
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("equity_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 23 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_finance.scala

@@ -0,0 +1,23 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 融资信息
+ */
+case class company_finance() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""融资金额:${new_map("money")}
+       |轮次:${new_map("round")}
+       |投资人:${new_map("inverstors")}""".stripMargin
+
+
+}

+ 76 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_holder_v2.scala

@@ -0,0 +1,76 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 股东
+ */
+case class company_holder_v2() extends CompanyMonitorHandle {
+
+  /**
+   * 来源表的变更类型,insert or update
+   *
+   * @return
+   */
+  override def org_type(): Seq[String] = Seq("insert", "update")
+
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   * cid
+   * cname
+   * table_type
+   * flow_type
+   * rta_desc
+   * change_time
+   * biz_id
+   * info_risk_level
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""|认缴金额:${new_map("amount")}
+        |认缴出资时间:${new_map("update_time")}""".stripMargin
+
+  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
+    val rta_desc = get_rta_desc(old_map, new_map)
+    if (rta_desc == null) {
+      return Seq.empty
+    }
+
+    try {
+      var flag = false
+      if (change_fields == null) {
+        flag = true
+      }
+      Seq((cid
+        , cname
+        //, if (flag) "company_holder_add" else "company_holder_deleted"
+        , if (flag) "10" else "10"
+        , if (flag) "2" else "1"
+        , rta_desc
+        , get_change_time(bizDate, new_map)
+        , get_biz_id(rowkey, new_map)
+        , if (flag) "2" else "1"
+        , if (flag) "10" else "11"
+        , getMoney(new_map)
+      ))
+    } catch {
+      case e: Exception => {
+        logError(e.getMessage, e)
+      }
+        Seq.empty
+    }
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+
+}

+ 31 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_announcement.scala

@@ -0,0 +1,31 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 购地信息
+ */
+case class company_land_announcement() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""项目名称:${new_map("project_name")}
+       |面积(公顷):${new_map("area")}
+       |成交价格(万元):${new_map("tran_price")}
+       |合同签订日期:${new_map("contract_date")}""".stripMargin
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("tran_price")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 土地抵押-抵押人
+ */
+case class company_land_mortgage() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""所在行政区:${new_map("land_aministrative_area")}
+       |土地面积(公顷):${new_map("land_area")}
+       |抵押面积(公顷):${new_map("area")}
+       |评估金额(万元):${new_map("evaluate_amount")}
+       |抵押金额(万元):${new_map("mortgage_amount")}""".stripMargin
+
+
+  override def get_conditional_filter(): String = {
+    "AND  data['type'] in ('bothone','mortgagor') "
+  }
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("mortgage_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_land_mortgage_v2.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 土地抵押-抵押权人
+ */
+case class company_land_mortgage_v2() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""所在行政区:${new_map("land_aministrative_area")}
+       |土地面积(公顷):${new_map("land_area")}
+       |抵押面积(公顷):${new_map("area")}
+       |评估金额(万元):${new_map("evaluate_amount")}
+       |抵押金额(万元):${new_map("mortgage_amount")}""".stripMargin
+
+
+  override def get_conditional_filter(): String = {
+    "AND  data['type'] in ('bothtwo','mortgagee') "
+  }
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("mortgage_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 31 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_mortgage_info.scala

@@ -0,0 +1,31 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 动产抵押
+ */
+case class company_mortgage_info() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""登记编号:${new_map("reg_num")}
+       |被担保债权数额:${new_map("amount")}
+       |登记机关:${new_map("reg_department")}
+       |状态:${new_map("status")}""".stripMargin
+
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_patent_list.scala

@@ -0,0 +1,25 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 专利
+ */
+case class company_patent_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""申请号:${new_map("app_number")}
+       |公开公告号:${new_map("pub_number")}
+       |专利名称:${new_map("title")}
+       |公开公告日:${new_map("pub_date")}""".stripMargin
+
+
+
+}

+ 25 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_tm.scala

@@ -0,0 +1,25 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 商标
+ */
+case class company_tm() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""商标名:${new_map("tm_name")}
+       |申请日期:${new_map("app_date")}
+       |注册号:${new_map("reg_no")}
+       |申请人:${new_map("applicant_cn")}""".stripMargin
+
+
+
+}

+ 36 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_list.scala

@@ -0,0 +1,36 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 企业被执
+ */
+case class company_zxr_list() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""更新日期:${new_map("update_time")}
+       |执行标的:${new_map("exec_money")}
+       |案号:${new_map("case_no")}
+       |立案日期:${new_map("case_create_time")}""".stripMargin
+
+
+
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("exec_money")
+    amtstr = processMoney(amtstr)
+    amtstr = (amtstr.toDouble/10000 ).formatted("%.2f")
+    amtstr
+
+
+  }
+}

+ 27 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/company_zxr_restrict.scala

@@ -0,0 +1,27 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 限制消费令
+ */
+case class company_zxr_restrict() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |企业名称:${new_map("company_name")}
+       |限制消费人名称:${new_map("name")}
+       |执行法院名称:${new_map("court_name")}
+       |立案日期:${new_map("case_create_time")}""".stripMargin
+
+
+
+
+}

+ 32 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/increase_registered_capital_info.scala

@@ -0,0 +1,32 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/12/14
+ * @Description: 增资记录
+ */
+case class increase_registered_capital_info() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""变更后资本:${new_map("content_after")}
+       |变更前资本:${new_map("content_before")}
+       |变更时间:${new_map("change_time")}""".stripMargin
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("content_after")
+
+    amtstr = processMoney(amtstr)
+
+    amtstr
+
+
+  }
+}

+ 47 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine.scala

@@ -0,0 +1,47 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 有恢复执行案件-文书
+ */
+case class wenshu_detail_combine() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String = {
+    s"""案件名称:${new_map("title")}
+       |案号:${new_map("case_no")}
+       |法院:${new_map("court_name")}
+       |判决日期:${new_map("judge_date")}""".stripMargin
+  }
+
+  override def get_conditional_filter(): String = {
+    "AND  data['case_no'] like concat('%','恢','%')"
+  }
+
+  /**
+   * 业务id
+   *
+   * @param rowkey
+   * @return
+   */
+  override def get_biz_id(rowkey: String, new_map: Map[String, String]): String = {
+    new_map("case_id")
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("case_amt")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+
+  }
+}

+ 51 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/wenshu_detail_combine_v2.scala

@@ -0,0 +1,51 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2020/8/12 18:50
+ * @Description: 新增胜诉案件(原告)
+ */
+case class wenshu_detail_combine_v2() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String = {
+    s"""案件名称:${new_map("title")}
+       |案号:${new_map("case_no")}
+       |法院:${new_map("court_name")}
+       |判决日期:${new_map("judge_date")}""".stripMargin
+  }
+
+  override def get_conditional_filter(): String = {
+    "AND data['is_success'] = '胜'  AND data['case_stage']= '一审'  AND  data['case_type'] = '民事案件' AND  data['name_type'] = 'y'"
+  }
+
+
+  /**
+   * 业务id
+   *
+   * @param rowkey
+   * @return
+   */
+  override def get_biz_id(rowkey: String, new_map: Map[String, String]): String = {
+    new_map("case_id")
+  }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("case_amt")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+
+
+
+  }
+
+}

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2021/1/07 18:50
+ * @Description: 询价评估
+ */
+case class zxr_evaluate() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |财产类型:${new_map("asset_type")}
+       |财产名称:${new_map("asset_name")}
+       |日期:${new_map("insert_time")}""".stripMargin
+
+
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/alldata/monitor/tables/zxr_evaluate_results.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.alldata.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2021/1/07 18:50
+ * @Description: 询价评估结果
+ */
+case class zxr_evaluate_results() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |财产类型:${new_map("asset_type")}
+       |财产名称:${new_map("asset_name")}
+       |日期:${new_map("publish_time")}""".stripMargin
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("money")
+    if (amtstr == null){
+      "0"
+    }else {
+      var a1 = amtstr.replaceAll("\"|\\[|\\]", "").split(",").toArray.map(_.toDouble)
+      var d = a1.sum / a1.length / 10000
+      d.formatted("%.2f")
+    }
+  }
+}

+ 12 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/auction_tracking_list.scala

@@ -22,4 +22,16 @@ case class auction_tracking_list() extends CompanyMonitorHandle {
 
   //override def org_type(): Seq[String] = Seq("insert","update")
 
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("query_price")
+    if (amtstr == null){
+      "0"
+    }else {
+      amtstr = processMoney(amtstr)
+      ((amtstr.toDouble)/ 10000).formatted("%.2f")
+    }
+  }
+
 }

+ 2 - 1
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company.scala

@@ -21,7 +21,7 @@ case class company() extends CompanyMonitorHandle {
        |变更时间:${new_map("update_time")}""".stripMargin
 
   //注册资本上升
-  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String)] = {
+  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
     if (!compareAmount(old_map, new_map)) {
       return Seq.empty
     }
@@ -35,6 +35,7 @@ case class company() extends CompanyMonitorHandle {
         , get_biz_id(rowkey, new_map)
         , get_info_risk_level(old_map, new_map)
         , get_type()
+        , getMoney(new_map)
       ))
     } catch {
       case e: Exception => {

+ 7 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_equity_info_list.scala

@@ -24,4 +24,11 @@ case class company_equity_info_list() extends CompanyMonitorHandle {
     "AND data['type'] = '1'"
   }
 
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("equity_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
 }

+ 11 - 2
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_holder_v2.scala

@@ -34,7 +34,7 @@ case class company_holder_v2() extends CompanyMonitorHandle {
     s"""|认缴金额:${new_map("amount")}
         |认缴出资时间:${new_map("update_time")}""".stripMargin
 
-  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String)] = {
+  override def handle(rowkey: String, bizDate: String, cid: String, change_fields: Seq[String], old_map: Map[String, String], new_map: Map[String, String], cname: String = null): Seq[(String, String, String, String, String, String, String, String, String, String)] = {
     val rta_desc = get_rta_desc(old_map, new_map)
     if (rta_desc == null) {
       return Seq.empty
@@ -49,12 +49,13 @@ case class company_holder_v2() extends CompanyMonitorHandle {
         , cname
         //, if (flag) "company_holder_add" else "company_holder_deleted"
         , if (flag) "10" else "10"
-        , if (flag) "0" else "1"
+        , if (flag) "2" else "1"
         , rta_desc
         , get_change_time(bizDate, new_map)
         , get_biz_id(rowkey, new_map)
         , if (flag) "2" else "1"
         , if (flag) "10" else "11"
+        , getMoney(new_map)
       ))
     } catch {
       case e: Exception => {
@@ -64,4 +65,12 @@ case class company_holder_v2() extends CompanyMonitorHandle {
     }
   }
 
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+
 }

+ 7 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_land_announcement.scala

@@ -21,4 +21,11 @@ case class company_land_announcement() extends CompanyMonitorHandle {
        |合同签订日期:${new_map("contract_date")}""".stripMargin
 
 
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("tran_price")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
+
 }

+ 7 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_land_mortgage.scala

@@ -25,4 +25,11 @@ case class company_land_mortgage() extends CompanyMonitorHandle {
   override def get_conditional_filter(): String = {
     "AND  data['type'] in ('bothone','mortgagor') "
   }
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("mortgage_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
 }

+ 7 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_land_mortgage_v2.scala

@@ -25,4 +25,11 @@ case class company_land_mortgage_v2() extends CompanyMonitorHandle {
   override def get_conditional_filter(): String = {
     "AND  data['type'] in ('bothtwo','mortgagee') "
   }
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("mortgage_amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
 }

+ 7 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_mortgage_info.scala

@@ -21,4 +21,11 @@ case class company_mortgage_info() extends CompanyMonitorHandle {
        |状态:${new_map("status")}""".stripMargin
 
 
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("amount")
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
 }

+ 11 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_zxr_list.scala

@@ -24,4 +24,15 @@ case class company_zxr_list() extends CompanyMonitorHandle {
   override def org_type() = {
     Seq("update")
   }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("exec_money")
+    amtstr = processMoney(amtstr)
+    amtstr = (amtstr.toDouble/10000 ).formatted("%.2f")
+    amtstr
+
+
+  }
 }

+ 2 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/company_zxr_restrict.scala

@@ -24,4 +24,6 @@ case class company_zxr_restrict() extends CompanyMonitorHandle {
   override def org_type() = {
     Seq("update")
   }
+
+
 }

+ 6 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/increase_registered_capital_info.scala

@@ -19,5 +19,11 @@ case class increase_registered_capital_info() extends CompanyMonitorHandle {
        |变更前资本:${new_map("content_before")}
        |变更时间:${new_map("change_time")}""".stripMargin
 
+  override protected def getMoney(new_map: Map[String, String]): String = {
 
+    var amtstr = new_map("content_after")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+  }
 }

+ 10 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/wenshu_detail_combine.scala

@@ -34,4 +34,14 @@ case class wenshu_detail_combine() extends CompanyMonitorHandle {
   override def get_biz_id(rowkey: String, new_map: Map[String, String]): String = {
     new_map("case_id")
   }
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("case_amt")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+
+  }
 }

+ 12 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/wenshu_detail_combine_v2.scala

@@ -36,4 +36,16 @@ case class wenshu_detail_combine_v2() extends CompanyMonitorHandle {
     new_map("case_id")
   }
 
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("case_amt")
+
+    amtstr = processMoney(amtstr)
+    amtstr
+
+
+
+  }
+
 }

+ 24 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/zxr_evaluate.scala

@@ -0,0 +1,24 @@
+package com.winhc.bigdata.spark.jobs.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2021/1/07 18:50
+ * @Description: 询价评估
+ */
+case class zxr_evaluate() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |财产类型:${new_map("asset_type")}
+       |财产名称:${new_map("asset_name")}
+       |日期:${new_map("insert_time")}""".stripMargin
+
+
+}

+ 35 - 0
src/main/scala/com/winhc/bigdata/spark/jobs/monitor/tables/zxr_evaluate_results.scala

@@ -0,0 +1,35 @@
+package com.winhc.bigdata.spark.jobs.monitor.tables
+
+import com.winhc.bigdata.spark.jobs.monitor.CompanyMonitorHandle
+
+/**
+ * @Date: 2021/1/07 18:50
+ * @Description: 询价评估结果
+ */
+case class zxr_evaluate_results() extends CompanyMonitorHandle {
+  /**
+   * 信息描述
+   *
+   * @param old_map
+   * @param new_map
+   * @return
+   */
+  override protected def get_rta_desc(old_map: Map[String, String], new_map: Map[String, String]): String =
+    s"""案号:${new_map("case_no")}
+       |财产类型:${new_map("asset_type")}
+       |财产名称:${new_map("asset_name")}
+       |日期:${new_map("publish_time")}""".stripMargin
+
+
+  override protected def getMoney(new_map: Map[String, String]): String = {
+
+    var amtstr = new_map("money")
+    if (amtstr == null){
+      "0"
+    }else {
+      var a1 = amtstr.replaceAll("\"|\\[|\\]", "").split(",").toArray.map(_.toDouble)
+      var d = a1.sum / a1.length / 10000
+      d.formatted("%.2f")
+    }
+  }
+}