scrapyd spiderkeeper docker部署
Dockerfile
FROM python:3.5 RUN cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && pip install --upgrade pip && pip install scrapy scrapyd sqlalchemy scrapy-redis mysql_connector scrapyd-client spiderkeeper COPY scrapyd.conf /etc/scrapyd/ COPY config.txt / VOLUME /data VOLUME /images EXPOSE 6800
config.txt
[mysql] db_host=root:123456@localhost:3306/scrapy [redis] db_host=localhost db_port=7501 [img] path = /images/
scrapyd.conf
[scrapyd] bind_address = 0.0.0.0 eggs_dir = /data/eggs logs_dir = /data/logs dbs_dir = /data/dbs http_port = 6800
docker-compose.yml
version: '2'
services:
worker_1:
image: scrapy
hostname: worker_1
volumes:
- /Project/docker/worker_1:/data
- /Project/docker/images:/images
command: scrapyd
worker_2:
image: scrapy
hostname: worker_2
volumes:
- /Project/docker/worker_2:/data
- /Project/docker/images:/images
command: scrapyd
spiderkeeper:
image: scrapy
hostname: spiderkeeper
ports:
- '5000:5000'
volumes:
- /Project/docker/spiderkeeper:/data
external_links:
- worker_1
- worker_2
command: spiderkeeper --database-url=sqlite:////data/SpiderKeeper.db --server=http://worker_1:6800 --server=http://worker_2:6800 --username=admin --password=admin
浙公网安备 33010602011771号