Compare commits

...

3 Commits

Author SHA1 Message Date
370cd1536f fixes 2021-01-20 13:54:47 +01:00
96bde590ad slim 2021-01-20 13:54:02 +01:00
92b9f8d489 z 2021-01-20 13:53:38 +01:00
4 changed files with 14 additions and 10 deletions

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
venv
websucker.egg-info

View File

@ -1,14 +1,16 @@
FROM python:3.8.0-alpine
FROM python:3.8-slim
RUN apk add --update --no-cache git curl curl-dev vim py3-lxml gcc make libxml2-dev libxslt-dev libc-dev
RUN apt-get update && apt-get install -y git curl libcurl4-openssl-dev build-essential vim libssl-dev
RUN addgroup -S appgroup -g 1000 && \
adduser -u 1000 -S appuser -G appgroup
RUN mkdir /app
RUN addgroup appgroup && \
adduser appuser && adduser appuser appgroup
ADD requirements.txt /
RUN CASS_DRIVER_BUILD_CONCURRENCY=4 pip install -r /requirements.txt
RUN pip install https://git.kemt.fei.tuke.sk/dano/websucker-pip/archive/master.zip
RUN mkdir /app /src
ADD requirements.txt /src
RUN CASS_DRIVER_BUILD_CONCURRENCY=4 pip install -r /src/requirements.txt
ADD . /src
RUN pip install /src
WORKDIR /app
ENTRYPOINT ["websuck"]

View File

@ -430,7 +430,7 @@ def visit_sitemap(domain,connection,parser,db):
return True
def visit_links(links,connection,parser,db,is_online):
def visit_links(links,connection,parser,db,is_online=True):
"""
if the site is not online, then just check links
"""

View File

@ -132,7 +132,7 @@ def start(ctx, link):
p = ctx.obj["parser"]
c = Connection()
visit_links([link],c,p,db)
db.check_domain(domain)
#db.check_domain(domain)
@cli.command(help="Continue crawling of seen links from a domain")
@click.pass_context