api: address: "http://localhost:8000" # API 地址 mongo: host: localhost # MongoDB host port: 27017 # MongoDB port db: crawlab_test # MongoDB database username: "" # MongoDB username password: "" # MongoDB password authSource: "admin" # MongoDB auth source database redis: address: localhost # Redis host password: "" # Redis password database: 1 # Redis database port: 6379 # Redis port log: level: info # Log Level path: "/var/logs/crawlab" # Log Path isDeletePeriodically: "N" # 是否定期删除日志(默认为否) deleteFrequency: "@hourly" # 删除日志频率 server: host: 0.0.0.0 # 后端监听地址 port: 8000 # 后端监听端口 master: "Y" # 是否为主节点 secret: "crawlab" # JWT secret register: # mac地址 或者 ip地址,如果是ip,则需要手动指定IP type: "mac" ip: "" lang: # 安装语言环境, Y 为安装,N 为不安装 python: "Y" node: "N" java: "N" dotnet: "N" spider: path: "/app/spiders" # 爬虫路径 task: workers: 4 # 同时运行任务的执行器数量 other: tmppath: "/tmp" # 临时文件目录 version: 0.4.7 # 版本号 setting: allowRegister: "N" # 是否允许注册 enableTutorial: "N" # 是否弃用教程 runOnMaster: "Y" # 是否在主节点上运行任务 demoSpiders: "N" # 是否加载demo爬虫 checkScrapy: "Y" # 是否自动检查Scrapy notification: # 消息通知 mail: # 邮箱通知 server: '' # SMPT 服务器地址 port: '' # SMTP 服务器端口 senderEmail: '' # 发送者邮箱 senderIdentity: '' # 发送者身份 smtp: # SMTP user: '' # SMTP 用户名 password: '' # SMTP 密码