config.yaml 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. api:
  2. address: "http://localhost:8000" # API 地址
  3. mongo:
  4. host: localhost # MongoDB host
  5. port: 27017 # MongoDB port
  6. db: crawlab_test # MongoDB database
  7. username: "" # MongoDB username
  8. password: "" # MongoDB password
  9. authSource: "admin" # MongoDB auth source database
  10. redis:
  11. address: localhost # Redis host
  12. password: "" # Redis password
  13. database: 1 # Redis database
  14. port: 6379 # Redis port
  15. log:
  16. level: info # Log Level
  17. path: "/var/logs/crawlab" # Log Path
  18. isDeletePeriodically: "N" # 是否定期删除日志(默认为否)
  19. deleteFrequency: "@hourly" # 删除日志频率
  20. server:
  21. host: 0.0.0.0 # 后端监听地址
  22. port: 8000 # 后端监听端口
  23. master: "Y" # 是否为主节点
  24. secret: "crawlab" # JWT secret
  25. register:
  26. # mac地址 或者 ip地址,如果是ip,则需要手动指定IP
  27. type: "mac"
  28. ip: ""
  29. lang: # 安装语言环境, Y 为安装,N 为不安装
  30. python: "Y"
  31. node: "N"
  32. java: "N"
  33. dotnet: "N"
  34. spider:
  35. path: "/app/spiders" # 爬虫路径
  36. task:
  37. workers: 4 # 同时运行任务的执行器数量
  38. other:
  39. tmppath: "/tmp" # 临时文件目录
  40. version: 0.4.7 # 版本号
  41. setting:
  42. allowRegister: "N" # 是否允许注册
  43. enableTutorial: "N" # 是否弃用教程
  44. runOnMaster: "Y" # 是否在主节点上运行任务
  45. demoSpiders: "N" # 是否加载demo爬虫
  46. checkScrapy: "Y" # 是否自动检查Scrapy
  47. notification: # 消息通知
  48. mail: # 邮箱通知
  49. server: '' # SMPT 服务器地址
  50. port: '' # SMTP 服务器端口
  51. senderEmail: '' # 发送者邮箱
  52. senderIdentity: '' # 发送者身份
  53. smtp: # SMTP
  54. user: '' # SMTP 用户名
  55. password: '' # SMTP 密码