Closed Schr0dingerCat closed 2 years ago
一样的问题,master镜像,develop镜像都不行
context deadline exceeded
/go/pkg/mod/github.com/crawlab-team/go-trace@v0.1.0/trace.go:11 github.com/crawlab-team/go-trace.TraceError()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/grpc/client/client.go:240 github.com/crawlab-team/crawlab-core/grpc/client.(*Client)._connect()
/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.1.0/retry.go:55 github.com/cenkalti/backoff/v4.RetryNotifyWithTimer()
/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.1.0/retry.go:34 github.com/cenkalti/backoff/v4.RetryNotify()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/grpc/client/client.go:220 github.com/crawlab-team/crawlab-core/grpc/client.(*Client).connect()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/grpc/client/client.go:57 github.com/crawlab-team/crawlab-core/grpc/client.(*Client).Start()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/node/service/worker_service.go:43 github.com/crawlab-team/crawlab-core/node/service.(*WorkerService).Start()
/usr/local/go/src/runtime/asm_amd64.s:1374 runtime.goexit()
grpc error: client failed to start
/go/pkg/mod/github.com/crawlab-team/go-trace@v0.1.0/trace.go:6 github.com/crawlab-team/go-trace.PrintError()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/utils/backoff.go:13 github.com/crawlab-team/crawlab-core/utils.BackoffErrorNotify.func1()
/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.1.0/retry.go:69 github.com/cenkalti/backoff/v4.RetryNotifyWithTimer()
/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.1.0/retry.go:34 github.com/cenkalti/backoff/v4.RetryNotify()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/grpc/client/client.go:220 github.com/crawlab-team/crawlab-core/grpc/client.(*Client).connect()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/grpc/client/client.go:57 github.com/crawlab-team/crawlab-core/grpc/client.(*Client).Start()
/go/pkg/mod/github.com/crawlab-team/crawlab-core@v0.6.0-beta.20210802.1344/node/service/worker_service.go:43 github.com/crawlab-team/crawlab-core/node/service.(*WorkerService).Start()
/usr/local/go/src/runtime/asm_amd64.s:1374 runtime.goexit()
2021/12/02 17:53:53 error grpc client connect error: grpc error: client failed to start. reattempt in 29.3 seconds...
0.6bete的docker compose 还有问题,用回0.5版本的就没问题。
version: '3.3'
services:
master:
image: tikazyq/crawlab:0.5.1
container_name: master
environment:
# CRAWLAB_API_ADDRESS: "https://<your_api_ip>:<your_api_port>" # backend API address 后端 API 地址. 适用于 https 或者源码部署
CRAWLAB_SERVER_MASTER: "Y" # whether to be master node 是否为主节点,主节点为 Y,工作节点为 N
CRAWLAB_MONGO_HOST: "mongo" # MongoDB host address MongoDB 的地址,在 docker compose 网络中,直接引用服务名称
# CRAWLAB_MONGO_PORT: "27017" # MongoDB port MongoDB 的端口
# CRAWLAB_MONGO_DB: "crawlab_test" # MongoDB database MongoDB 的数据库
# CRAWLAB_MONGO_USERNAME: "username" # MongoDB username MongoDB 的用户名
# CRAWLAB_MONGO_PASSWORD: "password" # MongoDB password MongoDB 的密码
# CRAWLAB_MONGO_AUTHSOURCE: "admin" # MongoDB auth source MongoDB 的验证源
CRAWLAB_REDIS_ADDRESS: "redis" # Redis host address Redis 的地址,在 docker compose 网络中,直接引用服务名称
# CRAWLAB_REDIS_PORT: "6379" # Redis port Redis 的端口
# CRAWLAB_REDIS_DATABASE: "1" # Redis database Redis 的数据库
# CRAWLAB_REDIS_PASSWORD: "password" # Redis password Redis 的密码
# CRAWLAB_LOG_LEVEL: "info" # log level 日志级别. 默认为 info
# CRAWLAB_LOG_ISDELETEPERIODICALLY: "N" # whether to periodically delete log files 是否周期性删除日志文件. 默认不删除
# CRAWLAB_LOG_DELETEFREQUENCY: "@hourly" # frequency of deleting log files 删除日志文件的频率. 默认为每小时
# CRAWLAB_TASK_WORKERS: 8 # number of task executors 任务执行器个数(并行执行任务数)
# CRAWLAB_SERVER_REGISTER_TYPE: "mac" # node register type 节点注册方式. 默认为 mac 地址,也可设置为 ip(防止 mac 地址冲突)
# CRAWLAB_SERVER_REGISTER_IP: "127.0.0.1" # node register ip 节点注册IP. 节点唯一识别号,只有当 CRAWLAB_SERVER_REGISTER_TYPE 为 "ip" 时才生效
# CRAWLAB_SERVER_LANG_NODE: "Y" # whether to pre-install Node.js 预安装 Node.js 语言环境
CRAWLAB_SERVER_LANG_JAVA: "Y" # whether to pre-install Java 预安装 Java 语言环境
# CRAWLAB_SERVER_LANG_DOTNET: "Y" # whether to pre-install .Net core 预安装 .Net Core 语言环境
# CRAWLAB_SERVER_LANG_PHP: "Y" # whether to pre-install PHP 预安装 PHP 语言环境
# CRAWLAB_SERVER_LANG_GO: "Y" # whether to pre-install Golang 预安装 Golang 语言环境
# CRAWLAB_SETTING_ALLOWREGISTER: "N" # whether to allow user registration 是否允许用户注册
# CRAWLAB_SETTING_ENABLETUTORIAL: "N" # whether to enable tutorial 是否启用教程
CRAWLAB_SETTING_RUNONMASTER: "N" # whether to run on master node 是否在主节点上运行任务
# CRAWLAB_SETTING_DEMOSPIDERS: "Y" # whether to init demo spiders 是否使用Demo爬虫
# CRAWLAB_SETTING_CHECKSCRAPY: "Y" # whether to automatically check if the spider is scrapy 是否自动检测爬虫为scrapy
# CRAWLAB_NOTIFICATION_MAIL_SERVER: smtp.exmaple.com # STMP server address STMP 服务器地址
# CRAWLAB_NOTIFICATION_MAIL_PORT: 465 # STMP server port STMP 服务器端口
# CRAWLAB_NOTIFICATION_MAIL_SENDEREMAIL: admin@exmaple.com # sender email 发送者邮箱
# CRAWLAB_NOTIFICATION_MAIL_SENDERIDENTITY: admin@exmaple.com # sender ID 发送者 ID
# CRAWLAB_NOTIFICATION_MAIL_SMTP_USER: username # SMTP username SMTP 用户名
# CRAWLAB_NOTIFICATION_MAIL_SMTP_PASSWORD: password # SMTP password SMTP 密码
ports:
- "8080:8080" # frontend port mapping 前端端口映射
depends_on:
- mongo
- redis
# volumes:
# - "/var/crawlab/log:/var/logs/crawlab" # log persistent 日志持久化
worker:
image: tikazyq/crawlab:0.5.1
container_name: worker
environment:
CRAWLAB_SERVER_MASTER: "N"
CRAWLAB_MONGO_HOST: "mongo"
CRAWLAB_REDIS_ADDRESS: "redis"
CRAWLAB_SERVER_LANG_JAVA: "Y" # whether to pre-install Java 预安装 Java 语言环境
depends_on:
- mongo
- redis
# volumes:
# - "/var/crawlab/log:/var/logs/crawlab" # log persistent 日志持久化
mongo:
image: mongo:latest
# environment:
# MONGO_INITDB_ROOT_USERNAME: username
# MONGO_INITDB_ROOT_PASSWORD: password
# volumes:
# - "/opt/crawlab/mongo/data/db:/data/db" # make data persistent 持久化
# ports:
# - "27017:27017" # expose port to host machine 暴露接口到宿主机
redis:
image: redis:latest
# command: redis-server --requirepass "password" # set redis password 设置 Redis 密码
# volumes:
# - "/opt/crawlab/redis/data:/data" # make data persistent 持久化
# ports:
# - "6379:6379" # expose port to host machine 暴露接口到宿主机
# splash: # use Splash to run spiders on dynamic pages
# image: scrapinghub/splash
# container_name: splash
# ports:
# - "8050:8050"
@ding112 I solved this error by replacing CRAWLAB_SERVER_MASTER: Y with CRAWLAB_NODE_MASTER: "Y"
CRAWLAB_NODE_MASTER
OMG!! You just discovered an ultimate secret in the universe.
系统环境 虚拟机中 Linux a 5.11.0-40-generic #44-Ubuntu SMP Wed Oct 20 16:16:42 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux Bug 描述 使用代码中默认的docker-compose.yml文件,使用 docker-compose up 运行,输出提示报错: crawlab_master | 2021/11/22 11:45:19 error grpc client connect error: grpc error: client failed to start. reattempt in 1.1 seconds... 复现步骤 该 Bug 复现步骤如下
期望结果 docker 镜像 能工作。
输出信息