mirror of
https://github.com/vimagick/dockerfiles.git
synced 2025-12-21 13:23:02 +01:00
delete ghost
This commit is contained in:
@@ -1,19 +0,0 @@
|
|||||||
FROM ubuntu:14.04
|
|
||||||
MAINTAINER kev
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
|
||||||
ADD https://deb.nodesource.com/setup /tmp/
|
|
||||||
ADD https://ghost.org/zip/ghost-latest.zip /tmp/
|
|
||||||
RUN bash setup
|
|
||||||
RUN apt-get install -y nodejs unzip
|
|
||||||
RUN unzip ghost-latest.zip -d /ghost
|
|
||||||
|
|
||||||
WORKDIR /ghost
|
|
||||||
RUN sed -e 's@127.0.0.1@0.0.0.0@' config.example.js > config.js
|
|
||||||
RUN npm install --production
|
|
||||||
|
|
||||||
ENV NODE_ENV production
|
|
||||||
VOLUME /ghost
|
|
||||||
EXPOSE 2368
|
|
||||||
|
|
||||||
CMD npm start
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
ghost:
|
|
||||||
image: ghost:latest
|
|
||||||
ports:
|
|
||||||
- "9000:2368"
|
|
||||||
restart: always
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
#
|
|
||||||
# Dockerfile for scrapyd
|
|
||||||
#
|
|
||||||
|
|
||||||
FROM debian:jessie
|
|
||||||
MAINTAINER kev <noreply@datageek.info>
|
|
||||||
|
|
||||||
ADD ./scrapyd.conf /etc/scrapyd/
|
|
||||||
ADD ./requirements.txt /etc/scrapyd/
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y autoconf \
|
|
||||||
build-essential \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
libffi-dev \
|
|
||||||
libssl-dev \
|
|
||||||
libtool \
|
|
||||||
python \
|
|
||||||
python-dev \
|
|
||||||
vim-tiny \
|
|
||||||
&& mkdir libxml2 \
|
|
||||||
&& curl -sSL ftp://xmlsoft.org/libxml2/libxml2-2.9.2.tar.gz | tar xz --strip 1 -C libxml2 \
|
|
||||||
&& cd libxml2 \
|
|
||||||
&& ./configure --prefix=/usr \
|
|
||||||
&& make \
|
|
||||||
&& make install \
|
|
||||||
&& ldconfig \
|
|
||||||
&& cd .. \
|
|
||||||
&& rm -rf libxml2 \
|
|
||||||
&& mkdir libxslt \
|
|
||||||
&& curl -sSL https://git.gnome.org/browse/libxslt/snapshot/libxslt-1.1.28.tar.xz | tar xJ --strip 1 -C libxslt \
|
|
||||||
&& cd libxslt \
|
|
||||||
&& ./autogen.sh --prefix=/usr \
|
|
||||||
&& make \
|
|
||||||
&& make install \
|
|
||||||
&& ldconfig \
|
|
||||||
&& cd .. \
|
|
||||||
&& rm -rf libxslt \
|
|
||||||
&& curl -sSL https://bootstrap.pypa.io/get-pip.py | python \
|
|
||||||
&& pip install scrapy==0.24.6 scrapyd==1.0.1 \
|
|
||||||
&& pip install -r /etc/scrapyd/requirements.txt \
|
|
||||||
&& curl -sSL https://github.com/scrapy/scrapy/raw/master/extras/scrapy_bash_completion -o /etc/bash_completion.d/scrapy_bash_completion \
|
|
||||||
&& echo 'source /etc/bash_completion.d/scrapy_bash_completion' >> /root/.bashrc \
|
|
||||||
&& apt-get remove -y autoconf \
|
|
||||||
build-essential \
|
|
||||||
libffi-dev \
|
|
||||||
libssl-dev \
|
|
||||||
libtool \
|
|
||||||
python-dev \
|
|
||||||
&& apt-get autoremove -y \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
VOLUME /etc/scrapyd/ /var/lib/scrapyd/
|
|
||||||
|
|
||||||
EXPOSE 6800
|
|
||||||
|
|
||||||
CMD ["scrapyd"]
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
#
|
|
||||||
# Dockerfile for scrapyd
|
|
||||||
#
|
|
||||||
# References:
|
|
||||||
# - http://docs.docker.com/reference/builder/
|
|
||||||
# - http://doc.scrapy.org/en/latest/topics/ubuntu.html#topics-ubuntu
|
|
||||||
# - https://github.com/scrapy/scrapyd/blob/master/debian/scrapyd.upstart#L9-L11
|
|
||||||
# - http://pip.readthedocs.org/en/latest/installing.html
|
|
||||||
# - http://supervisord.org/index.html
|
|
||||||
#
|
|
||||||
|
|
||||||
FROM ubuntu:14.04
|
|
||||||
MAINTAINER kev <noreply@datageek.info>
|
|
||||||
|
|
||||||
ADD ./requirements.txt /tmp/
|
|
||||||
|
|
||||||
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 627220E7 \
|
|
||||||
&& echo 'deb http://archive.scrapy.org/ubuntu scrapy main' > /etc/apt/sources.list.d/scrapy.list \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y git \
|
|
||||||
libpq-dev \
|
|
||||||
build-essential \
|
|
||||||
python-dev \
|
|
||||||
python-pip \
|
|
||||||
python-numpy \
|
|
||||||
python-txzmq \
|
|
||||||
scrapy-0.24 \
|
|
||||||
scrapyd \
|
|
||||||
supervisor \
|
|
||||||
&& pip install -r /tmp/requirements.txt \
|
|
||||||
&& apt-get remove -y build-essential \
|
|
||||||
&& apt-get autoremove -y \
|
|
||||||
&& apt-get clean -y \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
ADD ./001-scrapyd /etc/scrapyd/conf.d/
|
|
||||||
ADD ./scrapyd.conf /etc/supervisor/conf.d/
|
|
||||||
|
|
||||||
EXPOSE 6800 9001
|
|
||||||
|
|
||||||
CMD supervisord -n -c /etc/supervisor/supervisord.conf
|
|
||||||
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
docker-scrapyd
|
|
||||||
==============
|
|
||||||
|
|
||||||
Dockerfile for building an image that runs [scrapyd][1].
|
|
||||||
|
|
||||||
## Building
|
|
||||||
|
|
||||||
$ docker build -t scrapyd .
|
|
||||||
|
|
||||||
## Running
|
|
||||||
|
|
||||||
$ docker run -p 6800:6800 scrapyd
|
|
||||||
|
|
||||||
[1]: https://github.com/scrapy/scrapyd
|
|
||||||
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
--allow-all-external
|
|
||||||
--allow-unverified jsonpath
|
|
||||||
|
|
||||||
# parser
|
|
||||||
git+https://github.com/scrapy/scrapely
|
|
||||||
jsonpath
|
|
||||||
jsonschema
|
|
||||||
pyparsing
|
|
||||||
pyquery
|
|
||||||
pyyaml
|
|
||||||
|
|
||||||
# database
|
|
||||||
pymongo
|
|
||||||
redis
|
|
||||||
|
|
||||||
# others
|
|
||||||
chardet
|
|
||||||
requests
|
|
||||||
service_identity
|
|
||||||
toolz
|
|
||||||
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
[scrapyd]
|
|
||||||
eggs_dir = /var/lib/scrapyd/eggs
|
|
||||||
logs_dir = /var/lib/scrapyd/logs
|
|
||||||
items_dir = /var/lib/scrapyd/items
|
|
||||||
dbs_dir = /var/lib/scrapyd/dbs
|
|
||||||
jobs_to_keep = 5
|
|
||||||
max_proc = 0
|
|
||||||
max_proc_per_cpu = 4
|
|
||||||
finished_to_keep = 100
|
|
||||||
poll_interval = 5
|
|
||||||
http_port = 6800
|
|
||||||
debug = off
|
|
||||||
runner = scrapyd.runner
|
|
||||||
application = scrapyd.app.application
|
|
||||||
launcher = scrapyd.launcher.Launcher
|
|
||||||
|
|
||||||
[services]
|
|
||||||
schedule.json = scrapyd.webservice.Schedule
|
|
||||||
cancel.json = scrapyd.webservice.Cancel
|
|
||||||
addversion.json = scrapyd.webservice.AddVersion
|
|
||||||
listprojects.json = scrapyd.webservice.ListProjects
|
|
||||||
listversions.json = scrapyd.webservice.ListVersions
|
|
||||||
listspiders.json = scrapyd.webservice.ListSpiders
|
|
||||||
delproject.json = scrapyd.webservice.DeleteProject
|
|
||||||
delversion.json = scrapyd.webservice.DeleteVersion
|
|
||||||
listjobs.json = scrapyd.webservice.ListJobs
|
|
||||||
Reference in New Issue
Block a user