diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 00000000..838f75d4 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,94 @@ +on: + pull_request: + paths: + - "Dockerfile" + + push: + paths: + - "Dockerfile" + + workflow_dispatch: + +env: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ github.token }} + +jobs: + deploy: + runs-on: ubuntu-latest + + env: + context: . + file: Dockerfile + image: grab-site + + permissions: + contents: read + id-token: write + packages: write + + steps: + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@v4 + with: + images: | + ${{ env.registry }}/${{ env.username }}/${{ env.image }} + tags: | + type=raw,value=latest,enable={{is_default_branch}} + type=ref,event=branch + type=ref,event=pr + type=ref,event=tag + type=schedule,pattern=nightly + type=semver,pattern={{major}} + type=semver,pattern={{major}}.{{minor} + type=semver,pattern={{version}} + type=sha,format=long + + - name: Setup QEMU + uses: docker/setup-qemu-action@v2 + + - name: Setup Docker Buildx + id: builder + uses: docker/setup-buildx-action@v2 + + - name: Log into registry ${{ env.registry }} + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + registry: ${{ env.registry }} + username: ${{ env.username }} + password: ${{ env.password }} + + - name: Checkout repository + uses: actions/checkout@v3 + with: + fetch-depth: 1 + + - name: Build ${{ env.image }} image + id: build + uses: docker/build-push-action@v4 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + context: ${{ env.context }} + file: ${{ env.context }}/${{ env.file }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64 + provenance: false + push: ${{ github.event != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + + - name: Install Cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@main + + - name: Sign the Docker images + if: github.event_name != 'pull_request' + run: | + echo "${{ steps.meta.outputs.tags }}" | xargs -I {} cosign sign --yes \ + -a "ref=${{ github.sha }}" \ + -a "repo=${{ github.repository }}" \ + -a "workflow=${{ github.workflow }}" \ + "{}@${{ steps.build.outputs.digest }}" diff --git a/.gitignore b/.gitignore index bee8a64b..63186c73 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ +.venv/ +build/ __pycache__ +*.egg-info diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..03f577e0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,85 @@ +# syntax=docker/dockerfile:1.6-labs + +ARG VARIANT="3.12-slim" +ARG WORKDIR="/usr/src/grab-site" + +# ------------------------------------------------------------------------------ + +FROM python:${VARIANT} as host-deps + +ARG VARIANT +ARG WORKDIR + +WORKDIR ${WORKDIR} + +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked <--/wpull.db" done +``` - ``` - wget https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer - chmod +x pyenv-installer - ./pyenv-installer - ~/.pyenv/bin/pyenv install 3.8.15 - ~/.pyenv/versions/3.8.15/bin/python -m venv ~/gs-venv - ~/gs-venv/bin/pip install --no-binary lxml --upgrade git+https://github.com/ArchiveTeam/grab-site - ``` - `--no-binary lxml` is necessary for the html5-parser build. +Install on Debian +--- -3. Add this to your `~/.bashrc` or `~/.zshrc`: +``` +sudo apt-get -y update +sudo apt-get -y install --no-install-recommends build-essential libre2-dev libxml2-dev libxslt-dev pkg-config zlib1g-dev +python3 -m venv .venv +. .venv/bin/activate +pip install --no-binary lxml . +``` - ``` - PATH="$PATH:$HOME/gs-venv/bin" - ``` +`--no-binary lxml` is necessary for the html5-parser build. - and then restart your shell (e.g. by opening a new terminal tab/window). Install on NixOS @@ -110,32 +103,7 @@ nix-env -f https://github.com/NixOS/nixpkgs/archive/release-23.05.tar.gz -iA gra -Install on another distribution lacking Python 3.7.x or 3.8.x ---- - -grab-site and its dependencies are available in [nixpkgs](https://github.com/NixOS/nixpkgs), which can be used on any Linux distribution. - -1. As root: - - Where `USER` is your non-root username: - - ``` - mkdir /nix - chown USER:USER /nix - ``` - -2. As the **non-root** user, install Nix: https://nixos.org/nix/download.html - -3. As the **non-root** user: - - ``` - nix-env -f https://github.com/NixOS/nixpkgs/archive/release-23.05.tar.gz -iA grab-site - ``` - - and then restart your shell (e.g. by opening a new terminal tab/window). - - - + Install on macOS --- @@ -274,14 +242,14 @@ Options can come before or after the URL. * `--igsets=IGSET1,IGSET2`: use ignore sets `IGSET1` and `IGSET2`. Ignore sets are used to avoid requesting junk URLs using a pre-made set of - regular expressions. See [the full list of available ignore sets](https://github.com/ArchiveTeam/grab-site/tree/master/libgrabsite/ignore_sets). + regular expressions. See [the full list of available ignore sets](https://github.com/ArchiveTeam/grab-site/tree/master/src/grab_site/ignore_sets). - The [global](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/global) + The [global](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/global) ignore set is implied and enabled unless `--no-global-igset` is used. The ignore sets can be changed during the crawl by editing the `DIR/igsets` file. -* `--no-global-igset`: don't add the [global](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/global) ignore set. +* `--no-global-igset`: don't add the [global](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/global) ignore set. * `--no-offsite-links`: avoid following links to a depth of 1 on other domains. @@ -431,7 +399,7 @@ Either don't crawl from Europe (because tumblr redirects to a GDPR `/privacy/con --ua "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:64.0) Gecko/20100101 Firefox/70.0 but not really nor Googlebot/2.1" ``` -Use [`--igsets=singletumblr`](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/singletumblr) +Use [`--igsets=singletumblr`](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/singletumblr) to avoid crawling the homepages of other tumblr blogs. If you don't care about who liked or reblogged a post, add `\?from_c=` to the @@ -443,7 +411,7 @@ hiding the page content with CSS. You are still likely to get a complete crawl. #### Subreddits -Use [`--igsets=reddit`](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/reddit) +Use [`--igsets=reddit`](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/reddit) and add a `/` at the end of the URL to avoid crawling all subreddits. When crawling a subreddit, you **must** get the casing of the subreddit right @@ -476,18 +444,18 @@ Use `--concurrency=1 --delay=500-1500`. #### MediaWiki sites with English language -Use [`--igsets=mediawiki`](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/mediawiki). +Use [`--igsets=mediawiki`](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/mediawiki). Note that this ignore set ignores old page revisions. #### MediaWiki sites with non-English language You will probably have to add ignores with translated `Special:*` URLs based on -[ignore_sets/mediawiki](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/mediawiki). +[ignore_sets/mediawiki](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/mediawiki). #### Forums that aren't Discourse Forums require more manual intervention with ignore patterns. -[`--igsets=forums`](https://github.com/ArchiveTeam/grab-site/blob/master/libgrabsite/ignore_sets/forums) +[`--igsets=forums`](https://github.com/ArchiveTeam/grab-site/blob/master/src/grab_site/ignore_sets/forums) is often useful for non-SMF forums, but you will have to add other ignore patterns, including one to ignore individual-forum-post pages if there are too many posts to crawl. (Generally, crawling the thread pages is enough.) diff --git a/TODO.md b/TODO.md new file mode 100644 index 00000000..cff8d9e3 --- /dev/null +++ b/TODO.md @@ -0,0 +1,68 @@ +# TODO + +## html5lib>=1 + +- [x] [_tokenizer](https://github.com/html5lib/html5lib-python/pull/270) + +```python +import sys +sys.modules["html5lib.tokenizer"] = __import__("html5lib._tokenizer") +``` + +## python>=3.7 + +- [x] [async def](https://github.com/python/cpython/issues/74591) + +```patch +diff --git a/.venv/lib/python3.7/site-packages/wpull/driver/process.py b/.venv/lib/python3.7/site-packages/wpull/driver/process.py +index e370538..48d1d39 100644 +--- a/.venv/lib/python3.7/site-packages/wpull/driver/process.py ++++ b/.venv/lib/python3.7/site-packages/wpull/driver/process.py +@@ -53,8 +53,8 @@ class Process(object): + ) + self._process = yield from process_future + +- self._stderr_reader = asyncio.async(self._read_stderr()) +- self._stdout_reader = asyncio.async(self._read_stdout()) ++ self._stderr_reader = asyncio.ensure_future(self._read_stderr()) ++ self._stdout_reader = asyncio.ensure_future(self._read_stdout()) + + if use_atexit: + atexit.register(self.close) +``` + +## python>=3.10 + +- [x] [collections.abc](https://github.com/python/cpython/issues/81505) + +```python +import collections +from collections.abc import Hashable, Mapping, MutableMapping +from typing import Callable +collections.Callable = Callable +collections.Hashable = Hashable +collections.Mapping = Mapping +collections.MutableMapping = MutableMapping +``` + +## python>=3.11 + +- [x] [@asyncio.coroutine](https://github.com/python/cpython/issues/87382) + +```python +import asyncio +# https://github.com/python/cpython/blob/68b34a720485f399e8699235b8f4e08f227dd43b/Lib/asyncio/coroutines.py#L105 +def coroutine(): ... +_is_coroutine = object() +asyncio.coroutine = coroutine +``` + +## tornado>=5 + +- [x] [SSL](https://github.com/tornadoweb/tornado/pull/2177) + +```python +from ssl import CertificateError +from tornado import netutil +netutil.SSLCertificateError = CertificateError +``` diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..7302db3c --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,14 @@ +version: "3.9" + +services: + app: + build: + context: . + dockerfile: Dockerfile + image: ghcr.io/archiveteam/grab-site:latest + ports: + - 29000:29000 + pull_policy: if_not_present + +volumes: + app_data: diff --git a/grab-site b/grab-site deleted file mode 100755 index 70b660c1..00000000 --- a/grab-site +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env python3 - -from libgrabsite import main -main.main() diff --git a/gs-dump-urls b/gs-dump-urls deleted file mode 100755 index ecf408c2..00000000 --- a/gs-dump-urls +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env python3 - -from libgrabsite import dump_urls -dump_urls.main() diff --git a/gs-server b/gs-server deleted file mode 100755 index d20fdec7..00000000 --- a/gs-server +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env python3 - -from libgrabsite import server -server.main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..6dc4ec3a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,52 @@ +[build-system] +build-backend = "setuptools.build_meta" +requires = ["setuptools>=68.2.2"] + +[project] +authors = [{ email = "ivan@ludios.org", name = "Ivan Kozik" }] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: End Users/Desktop", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Topic :: Internet :: WWW/HTTP", +] +dependencies = [ + "autobahn>=23.6.2", + "click>=8.1.7", + "grab-site[fix-re2, fix-wpull]", + "lmdb>=1.4.1", + "txaio>=23.1.1", + "websockets>=12.0", +] +description = "The archivist's web crawler: WARC output, dashboard for all crawls, dynamic ignore patterns" +dynamic = ["version"] +name = "grab-site" +requires-python = ">=3.9" + +[project.optional-dependencies] +fix-re2 = [ + # TODO: https://github.com/facebook/pyre2/issues/24 fb-re2>=1.0.7 + "fb-re2 @ https://github.com/facebook/pyre2/tarball/refs/pull/25/merge#egg=fb-re2", +] +fix-wpull = [ + "manhole>=1.8.0", + "packaging>=21.2", + "psutil>=5.9.6", + # TODO: https://github.com/ArchiveTeam/grab-site/issues/198 SQLAlchemy>=1.4 + # TODO: https://github.com/ArchiveTeam/wpull/issues/404 wpull>=2.0.1 + "wpull @ https://github.com/matthewcen/ludios_wpull/tarball/master#egg=wpull-3.0.9", + # TODO: https://github.com/tibonihoo/yapsy/issues/16 python>=3.12 + "yapsy @ https://github.com/tibonihoo/yapsy/tarball/master#egg=yapsy&subdirectory=package", +] + +[project.scripts] +grab-site = "grab_site.main:main" +gs-dump-urls = "grab_site.dump_urls:main" +gs-server = "grab_site.server:main" + +[project.urls] +homepage = "https://ludios.org/grab-site" + +[tool.setuptools.package-data] +grab_site = ["*.html", "*.ico", "*.txt", "ignore_sets/*"] diff --git a/setup.py b/setup.py deleted file mode 100644 index 015891bc..00000000 --- a/setup.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python3 - -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - -import os -import sys -import libgrabsite - -install_requires = [ - "click>=6.3", - "wpull @ https://github.com/ArchiveTeam/ludios_wpull/tarball/master#egg=wpull-3.0.9", - "manhole>=1.0.0", - "lmdb>=0.89", - "autobahn>=0.12.1", - "fb-re2>=1.0.6", - "websockets>=6.0", -] - -if 'GRAB_SITE_NO_CCHARDET' not in os.environ: - install_requires.append("cchardet>=1.0.0") - -setup( - name="grab-site", - version=libgrabsite.__version__, - description="The archivist's web crawler: WARC output, dashboard for all crawls, dynamic ignore patterns", - url="https://ludios.org/grab-site/", - author="Ivan Kozik", - author_email="ivan@ludios.org", - classifiers=[ - "Programming Language :: Python :: 3", - "Development Status :: 5 - Production/Stable", - "Intended Audience :: End Users/Desktop", - "License :: OSI Approved :: MIT License", - "Topic :: Internet :: WWW/HTTP", - ], - scripts=["grab-site", "gs-server", "gs-dump-urls"], - packages=["libgrabsite"], - package_data={"libgrabsite": ["*.html", "*.ico", "*.txt", "ignore_sets/*"]}, - install_requires=install_requires, -) diff --git a/libgrabsite/404.html b/src/grab_site/404.html similarity index 100% rename from libgrabsite/404.html rename to src/grab_site/404.html diff --git a/libgrabsite/__init__.py b/src/grab_site/__init__.py similarity index 100% rename from libgrabsite/__init__.py rename to src/grab_site/__init__.py diff --git a/libgrabsite/dashboard.html b/src/grab_site/dashboard.html similarity index 100% rename from libgrabsite/dashboard.html rename to src/grab_site/dashboard.html diff --git a/libgrabsite/dashboard_client.py b/src/grab_site/dashboard_client.py similarity index 100% rename from libgrabsite/dashboard_client.py rename to src/grab_site/dashboard_client.py diff --git a/libgrabsite/default_cookies.txt b/src/grab_site/default_cookies.txt similarity index 100% rename from libgrabsite/default_cookies.txt rename to src/grab_site/default_cookies.txt diff --git a/libgrabsite/dump_urls.py b/src/grab_site/dump_urls.py similarity index 95% rename from libgrabsite/dump_urls.py rename to src/grab_site/dump_urls.py index b5d41153..0b1e1b75 100644 --- a/libgrabsite/dump_urls.py +++ b/src/grab_site/dump_urls.py @@ -1,11 +1,11 @@ import click import sqlite3 -import libgrabsite +import grab_site def print_version(ctx, param, value): if not value or ctx.resilient_parsing: return - click.echo(libgrabsite.__version__) + click.echo(grab_site.__version__) ctx.exit() diff --git a/libgrabsite/dupes.py b/src/grab_site/dupes.py similarity index 100% rename from libgrabsite/dupes.py rename to src/grab_site/dupes.py diff --git a/libgrabsite/dupespotter.py b/src/grab_site/dupespotter.py similarity index 100% rename from libgrabsite/dupespotter.py rename to src/grab_site/dupespotter.py diff --git a/libgrabsite/favicon.ico b/src/grab_site/favicon.ico similarity index 100% rename from libgrabsite/favicon.ico rename to src/grab_site/favicon.ico diff --git a/libgrabsite/ignore_sets/blogs b/src/grab_site/ignore_sets/blogs similarity index 100% rename from libgrabsite/ignore_sets/blogs rename to src/grab_site/ignore_sets/blogs diff --git a/libgrabsite/ignore_sets/coppermine b/src/grab_site/ignore_sets/coppermine similarity index 100% rename from libgrabsite/ignore_sets/coppermine rename to src/grab_site/ignore_sets/coppermine diff --git a/libgrabsite/ignore_sets/facebook b/src/grab_site/ignore_sets/facebook similarity index 100% rename from libgrabsite/ignore_sets/facebook rename to src/grab_site/ignore_sets/facebook diff --git a/libgrabsite/ignore_sets/forums b/src/grab_site/ignore_sets/forums similarity index 100% rename from libgrabsite/ignore_sets/forums rename to src/grab_site/ignore_sets/forums diff --git a/libgrabsite/ignore_sets/global b/src/grab_site/ignore_sets/global similarity index 100% rename from libgrabsite/ignore_sets/global rename to src/grab_site/ignore_sets/global diff --git a/libgrabsite/ignore_sets/imdb b/src/grab_site/ignore_sets/imdb similarity index 100% rename from libgrabsite/ignore_sets/imdb rename to src/grab_site/ignore_sets/imdb diff --git a/libgrabsite/ignore_sets/mediawiki b/src/grab_site/ignore_sets/mediawiki similarity index 100% rename from libgrabsite/ignore_sets/mediawiki rename to src/grab_site/ignore_sets/mediawiki diff --git a/libgrabsite/ignore_sets/meetupeverywhere b/src/grab_site/ignore_sets/meetupeverywhere similarity index 100% rename from libgrabsite/ignore_sets/meetupeverywhere rename to src/grab_site/ignore_sets/meetupeverywhere diff --git a/libgrabsite/ignore_sets/nogravatar b/src/grab_site/ignore_sets/nogravatar similarity index 100% rename from libgrabsite/ignore_sets/nogravatar rename to src/grab_site/ignore_sets/nogravatar diff --git a/libgrabsite/ignore_sets/noonion b/src/grab_site/ignore_sets/noonion similarity index 100% rename from libgrabsite/ignore_sets/noonion rename to src/grab_site/ignore_sets/noonion diff --git a/libgrabsite/ignore_sets/nosortedindex b/src/grab_site/ignore_sets/nosortedindex similarity index 100% rename from libgrabsite/ignore_sets/nosortedindex rename to src/grab_site/ignore_sets/nosortedindex diff --git a/libgrabsite/ignore_sets/pinterest b/src/grab_site/ignore_sets/pinterest similarity index 100% rename from libgrabsite/ignore_sets/pinterest rename to src/grab_site/ignore_sets/pinterest diff --git a/libgrabsite/ignore_sets/reddit b/src/grab_site/ignore_sets/reddit similarity index 100% rename from libgrabsite/ignore_sets/reddit rename to src/grab_site/ignore_sets/reddit diff --git a/libgrabsite/ignore_sets/singletumblr b/src/grab_site/ignore_sets/singletumblr similarity index 100% rename from libgrabsite/ignore_sets/singletumblr rename to src/grab_site/ignore_sets/singletumblr diff --git a/libgrabsite/ignore_sets/twitter b/src/grab_site/ignore_sets/twitter similarity index 100% rename from libgrabsite/ignore_sets/twitter rename to src/grab_site/ignore_sets/twitter diff --git a/libgrabsite/ignore_sets/youtube b/src/grab_site/ignore_sets/youtube similarity index 100% rename from libgrabsite/ignore_sets/youtube rename to src/grab_site/ignore_sets/youtube diff --git a/libgrabsite/main.py b/src/grab_site/main.py similarity index 99% rename from libgrabsite/main.py rename to src/grab_site/main.py index cfc62a5d..0a8bde29 100644 --- a/libgrabsite/main.py +++ b/src/grab_site/main.py @@ -10,12 +10,12 @@ import datetime import shlex import click -import libgrabsite +import grab_site def print_version(ctx, param, value): if not value or ctx.resilient_parsing: return - click.echo(libgrabsite.__version__) + click.echo(grab_site.__version__) ctx.exit() def replace_2arg(args, arg, replacement): @@ -220,7 +220,7 @@ def main(concurrency, concurrent, delay, recursive, offsite_links, igsets, else: working_dir = os.path.abspath(dir) - LIBGRABSITE = os.path.dirname(libgrabsite.__file__) + LIBGRABSITE = os.path.dirname(grab_site.__file__) args = [ "--debug" if debug else "--quiet", "-U", ua, diff --git a/libgrabsite/server.py b/src/grab_site/server.py similarity index 100% rename from libgrabsite/server.py rename to src/grab_site/server.py diff --git a/libgrabsite/wpull_hooks.py b/src/grab_site/wpull_hooks.py similarity index 99% rename from libgrabsite/wpull_hooks.py rename to src/grab_site/wpull_hooks.py index eba5cf68..5eeaf185 100644 --- a/libgrabsite/wpull_hooks.py +++ b/src/grab_site/wpull_hooks.py @@ -17,8 +17,8 @@ from wpull.pipeline.session import ItemSession from wpull.url import URLInfo -from libgrabsite import wpull_tweaks, dashboard_client -import libgrabsite +from grab_site import wpull_tweaks, dashboard_client +import grab_site working_dir = os.environ["GRAB_SITE_WORKING_DIR"] @@ -46,7 +46,7 @@ def compile_combined_regexp(patterns): def include_ignore_line(line): return line and not line.startswith("#") -ignore_sets_path = os.path.join(os.path.dirname(libgrabsite.__file__), "ignore_sets") +ignore_sets_path = os.path.join(os.path.dirname(grab_site.__file__), "ignore_sets") def get_patterns_for_ignore_set(name: str): assert name != "", name with open(os.path.join(ignore_sets_path, name), "r", encoding="utf-8") as f: diff --git a/libgrabsite/wpull_tweaks.py b/src/grab_site/wpull_tweaks.py similarity index 95% rename from libgrabsite/wpull_tweaks.py rename to src/grab_site/wpull_tweaks.py index 65de15da..6d0fe3cb 100644 --- a/libgrabsite/wpull_tweaks.py +++ b/src/grab_site/wpull_tweaks.py @@ -6,8 +6,8 @@ from wpull.document.html import HTMLReader from wpull.processor.rule import ProcessingRule -from libgrabsite import dupespotter, __version__ -from libgrabsite.dupes import DupesOnDisk +from grab_site import dupespotter, __version__ +from grab_site.dupes import DupesOnDisk def response_body_size(response) -> int: diff --git a/tests/offline-tests b/tests/offline-tests index aadacfa4..fa5294b7 100755 --- a/tests/offline-tests +++ b/tests/offline-tests @@ -5,4 +5,4 @@ set -eu -o pipefail -o verbose grab-site --help grab-site --version gs-dump-urls --help -python -c 'import libgrabsite.server' +python -c 'import grab_site.server'