#安装docker,crawlab,docker-compose
https://docs.docker.com/engine/install/debian/ //docker官网
curl -sSL https://get.docker.com/ | sh
pip3 install docker-compose
#新建docker-compose.yml文件
touch docker-compose.yml
vim docker-compose.yml
#运行,停止,删除docker-compose
docker-compose up -d
docker-compose stop
docker-compose down -v
#docker 复制文件和文件夹
docker cp -a /root/assets/index-44aaddda.js 容器id:/app/public/assets/index-44aaddda.js
docker cp -a /root/assets/index-680ea157.js 容器id:/app/public/assets/index-680ea157.js
#进入mongo容器,开启远程访问
docker-compose ps
docker-compose exec mongo bash
docker ps
docker exec -it <容器ID> sh
apt-get update
apt-get install vim
vim /etc/mongod.conf.orig
#bindIp: 127.0.0.1
#进入master容器改环境
docker-compose exec master bash
pip3 install requests==2.22.0
pip3 install scrapy==2.2.0
pip3 install pymongo==3.10.1
pip3 install cloudscraper
pip3 install gevent
#docker-compose.yml文件:
version: '3.3'
services:
master:
image: crawlabteam/crawlab:0.6.0
container_name: crawlab_example_master
environment:
CRAWLAB_NODE_MASTER: "Y"
CRAWLAB_MONGO_HOST: "mongo"
CRAWLAB_MONGO_PORT: "27017" # mongo port
CRAWLAB_MONGO_DB: "crawlab" # mongo database
CRAWLAB_MONGO_USERNAME: "username" # mongo username
CRAWLAB_MONGO_PASSWORD: "password" # mongo password
CRAWLAB_MONGO_AUTHSOURCE: "admin" # mongo auth source
volumes:
- "./.crawlab/master:/root/.crawlab"
ports:
- "8080:8080"
depends_on:
- mongo
mongo:
image: mongo:4.2
container_name: crawlab_example_mongo
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: "username" # mongo username
MONGO_INITDB_ROOT_PASSWORD: "password" # mongo password
ports:
- "27017:27017" # 开放 mongo 端口到宿主机
#Chanzhaoyu/chatgpt-web配置
version: '3'
services:
app:
image: chenzhaoyu94/chatgpt-web:v2.10.8 # 总是使用latest,更新时重新pull该tag镜像即可
ports:
- 3002:3002
environment:
# 二选一
OPENAI_API_KEY: sk-xxx
# 二选一
#OPENAI_ACCESS_TOKEN: xxx
# API接口地址,可选,设置 OPENAI_API_KEY 时可用
#OPENAI_API_BASE_URL: xxx
# API模型,可选,设置 OPENAI_API_KEY 时可用
#OPENAI_API_MODEL: xxx
# 反向代理,可选
#API_REVERSE_PROXY: xxx
# 访问权限密钥,可选
#AUTH_SECRET_KEY: xxx
# 每小时最大请求次数,可选,默认无限
MAX_REQUEST_PER_HOUR: 0
# 超时,单位毫秒,可选
TIMEOUT_MS: 60000
# Socks代理,可选,和 SOCKS_PROXY_PORT 一起时生效
#SOCKS_PROXY_HOST: xxx
# Socks代理端口,可选,和 SOCKS_PROXY_HOST 一起时生效
#SOCKS_PROXY_PORT: xxx
# HTTPS_PROXY 代理,可选
#HTTPS_PROXY: http://xxx:7890
version: '3'
services:
app:
image: chenzhaoyu94/chatgpt-web:v2.11.0 # 总是使用 latest ,更新时重新 pull 该 tag 镜像即可
ports:
- 127.0.0.1:3001:3002
environment:
# 二选一
OPENAI_API_KEY: sk-xxx
# 二选一
#OPENAI_ACCESS_TOKEN: xxx
# API接口地址,可选,设置 OPENAI_API_KEY 时可用
#OPENAI_API_BASE_URL: xxx
# API模型,可选,设置 OPENAI_API_KEY 时可用,https://platform.openai.com/docs/models
# gpt-4, gpt-4-0314, gpt-4-32k, gpt-4-32k-0314, gpt-3.5-turbo, gpt-3.5-turbo-0301, text-davinci-003, text-davinci-002, code-davinci-002
OPENAI_API_MODEL: gpt-3.5-turbo
# 反向代理,可选
#API_REVERSE_PROXY: xxx
# 访问权限密钥,可选
#AUTH_SECRET_KEY: xxx
# 每小时最大请求次数,可选,默认无限
MAX_REQUEST_PER_HOUR: 0
# 超时,单位毫秒,可选
TIMEOUT_MS: 60000
# Socks代理,可选,和 SOCKS_PROXY_PORT 一起时生效
#SOCKS_PROXY_HOST: xxx
# Socks代理端口,可选,和 SOCKS_PROXY_HOST 一起时生效
#SOCKS_PROXY_PORT: xxx
# HTTPS 代理,可选,支持 http,https,socks5
#HTTPS_PROXY: http://xxx:7890
#nginx转发端口
user root;
worker_processes auto;
pid /run/nginx.pid;
events {
worker_connections 768;
# multi_accept on;
}
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##
# SSL Settings
##
ssl_protocols TLSv1.2 TLSv1.3; # Dropping SSLv3, ref: POODLE
ssl_prefer_server_ciphers on;
##
# Logging Settings
##
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
##
# Gzip Settings
##
gzip on;
##
# Server Blocks
##
server {
listen 3002 default_server;
listen [::]:3002 default_server;
location / {
proxy_pass http://127.0.0.1:3001;
proxy_buffering off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
}
#自动启动
vim /lib/systemd/system/chatgpt.service
[Unit]
Description=Docker Compose Service
After=docker.service network-online.target
Requires=docker.service
[Service]
Type=forking
RemainAfterExit=yes
Restart=always
WorkingDirectory=/root/
ExecStart=/usr/local/bin/docker-compose up -d
ExecStop=/usr/local/bin/docker-compose stop
[Install]
WantedBy=multi-user.target
debian安装crawlab
最后编辑于 :
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...