• scrapyd spiderkeeper docker部署


    Dockerfile

    FROM python:3.5
    RUN cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && pip install --upgrade pip && pip install scrapy scrapyd sqlalchemy scrapy-redis mysql_connector scrapyd-client spiderkeeper
    COPY scrapyd.conf /etc/scrapyd/
    COPY config.txt /
    VOLUME /data
    VOLUME /images
    EXPOSE 6800
    

    config.txt

    [mysql]
    db_host=root:123456@localhost:3306/scrapy
    [redis]
    db_host=localhost
    db_port=7501
    [img]
    path = /images/
    

    scrapyd.conf

    [scrapyd]
    bind_address = 0.0.0.0
    eggs_dir    = /data/eggs
    logs_dir    = /data/logs
    dbs_dir     = /data/dbs
    http_port   = 6800
    

    docker-compose.yml

    version: '2'
    services:
        worker_1:
            image: scrapy
            hostname: worker_1
            volumes:
                - /Project/docker/worker_1:/data
                - /Project/docker/images:/images
            command: scrapyd
        
        worker_2:
            image: scrapy
            hostname: worker_2
            volumes:
                - /Project/docker/worker_2:/data
                - /Project/docker/images:/images
            command: scrapyd
        
        spiderkeeper:
            image: scrapy
            hostname: spiderkeeper
            ports:
                - '5000:5000'
            volumes:
                - /Project/docker/spiderkeeper:/data
            external_links:
                - worker_1
                - worker_2
            command: spiderkeeper --database-url=sqlite:////data/SpiderKeeper.db --server=http://worker_1:6800 --server=http://worker_2:6800 --username=admin --password=admin
    

      

  • 相关阅读:
    第一课基础知识
    Linux基础命令
    IO&Process基础知识
    caffe-windows 运行matlab 接口的例子
    process 3d image using caffe
    caffe-windows配置 cuda6.5+vs2012
    cuda7.0安装windows+vs2012
    SGD步长
    Exercise: Convolutional Neural NetworkCNN的反向求导及练习
    Create sparse matrix:sparse
  • 原文地址:https://www.cnblogs.com/qy-brother/p/9076422.html
Copyright © 2020-2023  润新知