lang: # 安装语言环境, Y 为安装,N 为不安装
python: "N"
node: "N"
java: "N"
dotnet: "N"
php: "N"
scripts: "/app/backend/scripts"
spider:
path: "/app/spiders"
task:
workers: 16
other:
tmppath: "/tmp"
version: 0.5.1
setting:
crawlabLogToES: "N" # Send crawlab runtime log to ES, open this option "Y", remember to set esClient
crawlabLogIndex: "crawlab-log"
allowRegister: "N"
enableTutorial: "N"
runOnMaster: "Y"
demoSpiders: "N"
checkScrapy: "Y"
autoInstall: "Y"
esClient: "" # Your ES client, for example, http://192.168.1.1:9200 or http://your-domain.com, if not use es, set empty
spiderLogIndex: "spider-log" # Index pattern for kibana, need to config on kibana
notification:
mail:
server: ''
port: ''
senderEmail: ''
senderIdentity: ''
smtp:
user: ''
password: ''
repo:
apiUrl: "https://center.crawlab.cn/api"
ossUrl: "https://repo.crawlab.cn"
自己在内网搭建测试
前端配置 NODE_ENV='production' VUE_APP_BASE_URL='192.168.0.30:8000' VUE_APP_CRAWLAB_BASE_URL=https://api.crawlab.cn VUE_APP_DOC_URL=http://docs.crawlab.cn
后端配置 api: address: "192.168.0.30:8000" mongo: host: 192.168.0.20 port: 27017 db: Crawlab username: "admin" password: "xxxxxx" authSource: "admin" redis: address: 192.168.0.22 password: "xxxxxx" database: 1 port: 6379 log: level: info path: "/var/log/crawlab" isDeletePeriodically: "N" deleteFrequency: "@hourly" server: host: 0.0.0.0 port: 8000 master: "Y" secret: "crawlab" register:
type 填 mac/ip/customName, 如果是ip,则需要手动指定IP, 如果是 customName, 需填写你的 customNodeName
lang: # 安装语言环境, Y 为安装,N 为不安装 python: "N" node: "N" java: "N" dotnet: "N" php: "N" scripts: "/app/backend/scripts" spider: path: "/app/spiders" task: workers: 16 other: tmppath: "/tmp" version: 0.5.1 setting: crawlabLogToES: "N" # Send crawlab runtime log to ES, open this option "Y", remember to set esClient crawlabLogIndex: "crawlab-log" allowRegister: "N" enableTutorial: "N" runOnMaster: "Y" demoSpiders: "N" checkScrapy: "Y" autoInstall: "Y" esClient: "" # Your ES client, for example, http://192.168.1.1:9200 or http://your-domain.com, if not use es, set empty spiderLogIndex: "spider-log" # Index pattern for kibana, need to config on kibana notification: mail: server: '' port: '' senderEmail: '' senderIdentity: '' smtp: user: '' password: '' repo: apiUrl: "https://center.crawlab.cn/api" ossUrl: "https://repo.crawlab.cn"
局域网内都能相互ping通,redis和mongo均能正常访问,防火墙端口均已打开,登录页就提示连不上服务器