aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml22
-rw-r--r--pyproject.toml151
-rw-r--r--test/test_validations.py45
-rw-r--r--wikiget/__init__.py11
-rw-r--r--wikiget/dl.py99
-rw-r--r--wikiget/validations.py8
-rw-r--r--wikiget/version.py2
-rw-r--r--wikiget/wikiget.py135
8 files changed, 310 insertions, 163 deletions
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index c1ca29c..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-language: python
-dist: xenial
-python:
- - '3.6'
- - '3.7'
- - '3.8'
- - '3.9'
- - '3.10-dev'
-install:
- - pip install -U pip
- - pip install .
-script:
- - python setup.py test
-deploy:
- provider: pypi
- user: clpo13
- password:
- secure: KvcviHqqT4YprtmzAtf9w8BkKPfwJ53LIOXmUFotzT1Qjt3FSE7bWVzDItFjy54zZM+tqKAniL91R+2tM5uQFn4fVS/yykN1Akts6ZnkJdq99Lgdb1V3gEv366K5AWoYKgjZX+PRvmOk8BXSrqbVtXN0lhmoemmeJVDqDHg2HJZNYFwvmr/g64amm2d/cdfLxKHpduwciNY6xUhOFIdlbrJ1T767mpC+gnqfzmJeNF7K95pmyBF6Wvl4AkKzwJJkyZULQF2VFtIT6bzSuM6G26ZT6H7UyoP+8+CvI4Fe6h8Ol7sWSuVC5gz+5istRORUy8RQ22HWW1ZZKOw1+8/dHuBPvIZOnfcTvNw07e7267KUoO4FfGLvTxU2likorr5gZh1YaCNut6XJkjzwNddkutCXv65H7zOhSn2gl7vMFkUUf+kEM9pSBcA1zf7Y9+7U3HgyD1OH+a5jRIOe0Vy9r3PPaMXuDgsHxZkrVlsr3LgtGwFD0jWMDZtROXds6OXW6/n6cN30IPSf/qWdgduNIq3wj0JbALI5AB0rugNNPhePMVOfF90W9WLPFlxQCjLji8NpvM5341bS8aLhFIgIfRGDgG9AN+I/dZNIwD2J0vfw/BDaLVNc2XUAGLa359Lbz9bFYUp0J5B8hdMMyR6YULaN5alz2VsUC5sD6kPiqTo=
- on:
- tags: true
- distributions: sdist bdist_wheel
- skip_existing: true
diff --git a/pyproject.toml b/pyproject.toml
index eed7728..8b906c6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,8 +1,8 @@
[build-system]
-requires = ["setuptools"]
-build-backend = "setuptools.build_meta"
-#requires = ["hatchling"]
-#build-backend = "hatchling.build"
+#requires = ["setuptools"]
+#build-backend = "setuptools.build_meta"
+requires = ["hatchling"]
+build-backend = "hatchling.build"
#requires = ["pdm-backend"]
#build-backend = "pdm.backend"
@@ -54,15 +54,142 @@ wikiget = "wikiget.wikiget:main"
[tool.setuptools.dynamic]
version = {attr = "wikiget.version.__version__"}
-#[tool.hatch.version]
-#path = "src/wikiget/version.py"
+[tool.hatch.version]
+path = "wikiget/version.py"
#[tool.pdm]
-#version = { source = "file", path = "src/wikiget/version.py" }
+#version = { source = "file", path = "wikiget/version.py" }
-[tool.pytest.ini_options]
-addopts = [
- "--import-mode=importlib",
- "--cov=wikiget",
+#[tool.pytest.ini_options]
+#addopts = [
+# "--import-mode=importlib",
+# "--cov=wikiget",
+#]
+#testpaths = ["test"]
+
+[tool.hatch.envs.default]
+dependencies = [
+ "coverage[toml]>=6.5",
+ "pytest",
+]
+[tool.hatch.envs.default.scripts]
+test = "pytest {args:test}"
+test-cov = "coverage run -m pytest {args:test}"
+cov-report = [
+ "- coverage combine",
+ "coverage report",
+]
+cov = [
+ "test-cov",
+ "cov-report",
]
-testpaths = ["test"]
+
+[[tool.hatch.envs.all.matrix]]
+python = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+
+[tool.hatch.envs.lint]
+detached = true
+dependencies = [
+ "black",
+ "mypy",
+ "ruff",
+]
+[tool.hatch.envs.lint.scripts]
+typing = "mypy --install-types --non-interactive {args:wikiget test}"
+style = [
+ "ruff {args:.}",
+ "black --check --diff {args:.}",
+]
+fmt = [
+ "black {args:.}",
+ "ruff --fix {args:.}",
+ "style",
+]
+all = [
+ "style",
+ "typing",
+]
+
+[tool.black]
+target-version = ["py37"]
+line-length = 88
+
+[tool.ruff]
+target-version = "py37"
+line-length = 88
+select = [
+ "A",
+ "ARG",
+ "B",
+ "C",
+ "DTZ",
+ "E",
+ "EM",
+ "F",
+ "FBT",
+ "I",
+ "ICN",
+ "ISC",
+ "N",
+ "PLC",
+ "PLE",
+ "PLR",
+ "PLW",
+ "Q",
+ "RUF",
+ "S",
+ "T",
+ "TID",
+ "UP",
+ "W",
+ "YTT",
+]
+ignore = [
+ # Allow non-abstract empty methods in abstract base classes
+ "B027",
+ # Allow boolean positional values in function calls, like `dict.get(... True)`
+ "FBT003",
+ # Ignore checks for possible passwords
+ "S105", "S106", "S107",
+ # Ignore complexity
+ "C901", "PLR0911", "PLR0912", "PLR0913", "PLR0915",
+ # FIXME: temporarily ignore usage of `print()`
+ "T201",
+]
+unfixable = [
+ # Don't touch unused imports
+ "F401",
+]
+
+[tool.ruff.isort]
+known-first-party = ["wikiget"]
+
+[tool.ruff.flake8-tidy-imports]
+ban-relative-imports = "all"
+
+[tool.ruff.per-file-ignores]
+# Tests can use magic values, assertions, and relative imports
+"test/**/*" = ["PLR2004", "S101", "TID252"]
+
+[tool.coverage.run]
+source_pkgs = ["wikiget"]
+branch = true
+parallel = true
+omit = [
+ "wikiget/version.py",
+]
+
+[tool.coverage.paths]
+hatchtest = ["wikiget"]
+tests = ["test"]
+
+[tool.coverage.report]
+exclude_lines = [
+ "no cov",
+ "if __name__ == .__main__.:",
+ "if TYPE_CHECKING:",
+]
+
+[[tool.mypy.overrides]]
+module = ["mwclient"]
+ignore_missing_imports = true
diff --git a/test/test_validations.py b/test/test_validations.py
index 5b7d4fc..1abd96a 100644
--- a/test/test_validations.py
+++ b/test/test_validations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# wikiget - CLI tool for downloading files from Wikimedia sites
# Copyright (C) 2018-2021 Cody Logan
# SPDX-License-Identifier: GPL-3.0-or-later
@@ -23,8 +22,12 @@ def test_invalid_site_input():
"""
Invalid site strings should not return regex match objects.
"""
- invalid_input = ['example.com', 'vim.wikia.com',
- 'en.wikipedia.com', 'en.wikimpedia.org']
+ invalid_input = [
+ "example.com",
+ "vim.wikia.com",
+ "en.wikipedia.com",
+ "en.wikimpedia.org",
+ ]
for i in invalid_input:
site_match = valid_site(i)
assert site_match is None
@@ -34,8 +37,12 @@ def test_valid_site_input():
"""
Valid site strings should return regex match objects.
"""
- valid_input = ['en.wikipedia.org', 'commons.wikimedia.org',
- 'de.wikipedia.org', 'meta.wikimedia.org']
+ valid_input = [
+ "en.wikipedia.org",
+ "commons.wikimedia.org",
+ "de.wikipedia.org",
+ "meta.wikimedia.org",
+ ]
for i in valid_input:
site_match = valid_site(i)
assert site_match is not None
@@ -46,20 +53,19 @@ def test_file_regex():
File regex should return a match object with match groups corresponding
to the file prefix and name.
"""
- i = 'File:Example.jpg'
+ i = "File:Example.jpg"
file_match = valid_file(i)
assert file_match is not None
- assert file_match.group(0) == 'File:Example.jpg' # entire match
- assert file_match.group(1) == 'File:' # first group
- assert file_match.group(2) == 'Example.jpg' # second group
+ assert file_match.group(0) == "File:Example.jpg" # entire match
+ assert file_match.group(1) == "File:" # first group
+ assert file_match.group(2) == "Example.jpg" # second group
def test_invalid_file_input():
"""
Invalid file strings should not return regex match objects.
"""
- invalid_input = ['file:example', 'example.jpg', 'Foo Bar.gif',
- 'Fil:Example.jpg']
+ invalid_input = ["file:example", "example.jpg", "Foo Bar.gif", "Fil:Example.jpg"]
for i in invalid_input:
file_match = valid_file(i)
assert file_match is None
@@ -69,9 +75,14 @@ def test_valid_file_input():
"""
Valid file strings should return regex match objects.
"""
- valid_input = ['Image:example.jpg', 'file:example.jpg',
- 'File:example.file-01.jpg', 'FILE:FOO.BMP',
- 'File:ß handwritten sample.gif', 'File:A (1).jpeg']
+ valid_input = [
+ "Image:example.jpg",
+ "file:example.jpg",
+ "File:example.file-01.jpg",
+ "FILE:FOO.BMP",
+ "File:ß handwritten sample.gif",
+ "File:A (1).jpeg",
+ ]
for i in valid_input:
file_match = valid_file(i)
assert file_match is not None
@@ -81,9 +92,9 @@ def test_verify_hash(tmp_path):
"""
Confirm that verify_hash returns the proper SHA1 hash.
"""
- file_name = 'testfile'
- file_contents = 'foobar'
- file_sha1 = '8843d7f92416211de9ebb963ff4ce28125932878'
+ file_name = "testfile"
+ file_contents = "foobar"
+ file_sha1 = "8843d7f92416211de9ebb963ff4ce28125932878"
tmp_file = tmp_path / file_name
tmp_file.write_text(file_contents)
diff --git a/wikiget/__init__.py b/wikiget/__init__.py
index 126f04d..b68b0ec 100644
--- a/wikiget/__init__.py
+++ b/wikiget/__init__.py
@@ -22,7 +22,10 @@ from wikiget.version import __version__ as wikiget_version
# set some global constants
BLOCKSIZE = 65536
CHUNKSIZE = 1024
-DEFAULT_SITE = 'commons.wikimedia.org'
-DEFAULT_PATH = '/w/'
-USER_AGENT = ('wikiget/{} (https://github.com/clpo13/wikiget) '
- 'mwclient/{}'.format(wikiget_version, mwclient_version))
+DEFAULT_SITE = "commons.wikimedia.org"
+DEFAULT_PATH = "/w/"
+USER_AGENT = "wikiget/{} (https://github.com/clpo13/wikiget) mwclient/{}".format(
+ wikiget_version, mwclient_version
+)
+STD_VERBOSE = 1
+VERY_VERBOSE = 2
diff --git a/wikiget/dl.py b/wikiget/dl.py
index f05061e..949f09e 100644
--- a/wikiget/dl.py
+++ b/wikiget/dl.py
@@ -23,7 +23,7 @@ from mwclient import APIError, InvalidResponse, LoginError, Site
from requests import ConnectionError, HTTPError
from tqdm import tqdm
-from wikiget import CHUNKSIZE, DEFAULT_SITE, USER_AGENT
+import wikiget
from wikiget.validations import valid_file, verify_hash
@@ -33,10 +33,9 @@ def download(dl, args):
if url.netloc:
filename = url.path
site_name = url.netloc
- if args.site is not DEFAULT_SITE and not args.quiet:
+ if args.site is not wikiget.DEFAULT_SITE and not args.quiet:
# this will work even if the user specifies 'commons.wikimedia.org'
- print('Warning: target is a URL, '
- 'ignoring site specified with --site')
+ print("Warning: target is a URL, ignoring site specified with --site")
else:
filename = dl
site_name = args.site
@@ -56,30 +55,32 @@ def download(dl, args):
dest = args.output or filename
- if args.verbose >= 2:
- print(f'User agent: {USER_AGENT}')
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print(f"User agent: {wikiget.USER_AGENT}")
# connect to site and identify ourselves
- if args.verbose >= 1:
- print(f'Site name: {site_name}')
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print(f"Site name: {site_name}")
try:
- site = Site(site_name, path=args.path, clients_useragent=USER_AGENT)
+ site = Site(site_name, path=args.path, clients_useragent=wikiget.USER_AGENT)
if args.username and args.password:
site.login(args.username, args.password)
except ConnectionError as e:
# usually this means there is no such site, or there's no network
# connection, though it could be a certificate problem
print("Error: couldn't connect to specified site.")
- if args.verbose >= 2:
- print('Full error message:')
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print("Full error message:")
print(e)
sys.exit(1)
except HTTPError as e:
# most likely a 403 forbidden or 404 not found error for api.php
- print("Error: couldn't find the specified wiki's api.php. "
- "Check the value of --path.")
- if args.verbose >= 2:
- print('Full error message:')
+ print(
+ "Error: couldn't find the specified wiki's api.php. "
+ "Check the value of --path."
+ )
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print("Full error message:")
print(e)
sys.exit(1)
except (InvalidResponse, LoginError) as e:
@@ -95,10 +96,12 @@ def download(dl, args):
except APIError as e:
# an API error at this point likely means access is denied,
# which could happen with a private wiki
- print('Error: access denied. Try providing credentials with '
- '--username and --password.')
- if args.verbose >= 2:
- print('Full error message:')
+ print(
+ "Error: access denied. Try providing credentials with "
+ "--username and --password."
+ )
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print("Full error message:")
for i in e.args:
print(i)
sys.exit(1)
@@ -106,59 +109,63 @@ def download(dl, args):
if file.imageinfo != {}:
# file exists either locally or at a common repository,
# like Wikimedia Commons
- file_url = file.imageinfo['url']
- file_size = file.imageinfo['size']
- file_sha1 = file.imageinfo['sha1']
-
- if args.verbose >= 1:
- print(f"Info: downloading '{filename}' "
- f"({file_size} bytes) from {site.host}",
- end='')
+ file_url = file.imageinfo["url"]
+ file_size = file.imageinfo["size"]
+ file_sha1 = file.imageinfo["sha1"]
+
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print(
+ f"Info: downloading '{filename}' "
+ f"({file_size} bytes) from {site.host}",
+ end="",
+ )
if args.output:
print(f" to '{dest}'")
else:
- print('\n', end='')
- print(f'Info: {file_url}')
+ print("\n", end="")
+ print(f"Info: {file_url}")
if os.path.isfile(dest) and not args.force:
- print(f"File '{dest}' already exists, skipping download "
- "(use -f to ignore)")
+ print(f"File '{dest}' already exists, skipping download (use -f to ignore)")
else:
try:
- fd = open(dest, 'wb')
- except IOError as e:
- print('File could not be written. '
- 'The following error was encountered:')
+ fd = open(dest, "wb")
+ except OSError as e:
+ print("File could not be written. The following error was encountered:")
print(e)
sys.exit(1)
else:
# download the file(s)
- if args.verbose >= 1:
+ if args.verbose >= wikiget.STD_VERBOSE:
leave_bars = True
else:
leave_bars = False
- with tqdm(leave=leave_bars, total=file_size,
- unit='B', unit_scale=True,
- unit_divisor=CHUNKSIZE) as progress_bar:
+ with tqdm(
+ leave=leave_bars,
+ total=file_size,
+ unit="B",
+ unit_scale=True,
+ unit_divisor=wikiget.CHUNKSIZE,
+ ) as progress_bar:
with fd:
res = site.connection.get(file_url, stream=True)
progress_bar.set_postfix(file=dest, refresh=False)
- for chunk in res.iter_content(CHUNKSIZE):
+ for chunk in res.iter_content(wikiget.CHUNKSIZE):
fd.write(chunk)
progress_bar.update(len(chunk))
# verify file integrity and optionally print details
dl_sha1 = verify_hash(dest)
- if args.verbose >= 1:
- print(f'Info: downloaded file SHA1 is {dl_sha1}')
- print(f'Info: server file SHA1 is {file_sha1}')
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print(f"Info: downloaded file SHA1 is {dl_sha1}")
+ print(f"Info: server file SHA1 is {file_sha1}")
if dl_sha1 == file_sha1:
- if args.verbose >= 1:
- print('Info: hashes match!')
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print("Info: hashes match!")
# at this point, we've successfully downloaded the file
else:
- print('Error: hash mismatch! Downloaded file may be corrupt.')
+ print("Error: hash mismatch! Downloaded file may be corrupt.")
sys.exit(1)
else:
diff --git a/wikiget/validations.py b/wikiget/validations.py
index 5e7213f..dc70df4 100644
--- a/wikiget/validations.py
+++ b/wikiget/validations.py
@@ -31,7 +31,7 @@ def valid_file(search_string):
"""
# second group could also restrict to file extensions with three or more
# letters with ([^/\r\n\t\f\v]+\.\w{3,})
- file_regex = re.compile(r'(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$', re.I)
+ file_regex = re.compile(r"(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$", re.I)
return file_regex.search(search_string)
@@ -44,7 +44,7 @@ def valid_site(search_string):
:param search_string: string to validate
:returns: a regex Match object if there's a match or None otherwise
"""
- site_regex = re.compile(r'wiki[mp]edia\.org$', re.I)
+ site_regex = re.compile(r"wiki[mp]edia\.org$", re.I)
return site_regex.search(search_string)
@@ -55,8 +55,8 @@ def verify_hash(filename):
:param filename: name of the file to calculate a hash for
:return: hash digest
"""
- hasher = hashlib.sha1()
- with open(filename, 'rb') as dl:
+ hasher = hashlib.sha1() # noqa: S324
+ with open(filename, "rb") as dl:
buf = dl.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
diff --git a/wikiget/version.py b/wikiget/version.py
index 93b60a1..dd9b22c 100644
--- a/wikiget/version.py
+++ b/wikiget/version.py
@@ -1 +1 @@
-__version__ = '0.5.1'
+__version__ = "0.5.1"
diff --git a/wikiget/wikiget.py b/wikiget/wikiget.py
index da60037..ba36766 100644
--- a/wikiget/wikiget.py
+++ b/wikiget/wikiget.py
@@ -19,7 +19,7 @@ import argparse
import logging
import sys
-from wikiget import DEFAULT_SITE, DEFAULT_PATH, wikiget_version
+import wikiget
from wikiget.dl import download
@@ -29,81 +29,102 @@ def main():
when installed with `pip install` or `python setup.py install`.
"""
- parser = argparse.ArgumentParser(description="""
- A tool for downloading files from
- MediaWiki sites using the file name or
- description page URL
- """,
- epilog="""
- Copyright (C) 2018-2021 Cody Logan
- and contributors.
- License GPLv3+: GNU GPL version 3 or later
- <http://www.gnu.org/licenses/gpl.html>.
- This is free software; you are free to
- change and redistribute it under certain
- conditions. There is NO WARRANTY, to the
- extent permitted by law.
- """)
- parser.add_argument('FILE', help="""
- name of the file to download with the File:
- prefix, or the URL of its file description page
- """)
- parser.add_argument('-V', '--version', action='version',
- version=f'%(prog)s {wikiget_version}')
+ parser = argparse.ArgumentParser(
+ description="""
+ A tool for downloading files from
+ MediaWiki sites using the file name or
+ description page URL
+ """,
+ epilog="""
+ Copyright (C) 2018-2023 Cody Logan
+ and contributors.
+ License GPLv3+: GNU GPL version 3 or later
+ <http://www.gnu.org/licenses/gpl.html>.
+ This is free software; you are free to
+ change and redistribute it under certain
+ conditions. There is NO WARRANTY, to the
+ extent permitted by law.
+ """,
+ )
+ parser.add_argument(
+ "FILE",
+ help="""
+ name of the file to download with the File:
+ prefix, or the URL of its file description page
+ """,
+ )
+ parser.add_argument(
+ "-V",
+ "--version",
+ action="version",
+ version=f"%(prog)s {wikiget.wikiget_version}",
+ )
message_options = parser.add_mutually_exclusive_group()
- message_options.add_argument('-q', '--quiet',
- help='suppress warning messages',
- action='store_true')
- message_options.add_argument('-v', '--verbose',
- help='print detailed information; '
- 'use -vv for even more detail',
- action='count', default=0)
- parser.add_argument('-f', '--force',
- help='force overwriting existing files',
- action='store_true')
- parser.add_argument('-s', '--site', default=DEFAULT_SITE,
- help='MediaWiki site to download from '
- '(default: %(default)s)')
- parser.add_argument('-p', '--path', default=DEFAULT_PATH,
- help='MediaWiki site path, where api.php is located '
- '(default: %(default)s)')
- parser.add_argument('--username', default='',
- help='MediaWiki site username, for private wikis')
- parser.add_argument('--password', default='',
- help='MediaWiki site password, for private wikis')
+ message_options.add_argument(
+ "-q", "--quiet", help="suppress warning messages", action="store_true"
+ )
+ message_options.add_argument(
+ "-v",
+ "--verbose",
+ help="print detailed information; use -vv for even more detail",
+ action="count",
+ default=0,
+ )
+ parser.add_argument(
+ "-f", "--force", help="force overwriting existing files", action="store_true"
+ )
+ parser.add_argument(
+ "-s",
+ "--site",
+ default=wikiget.DEFAULT_SITE,
+ help="MediaWiki site to download from (default: %(default)s)",
+ )
+ parser.add_argument(
+ "-p",
+ "--path",
+ default=wikiget.DEFAULT_PATH,
+ help="MediaWiki site path, where api.php is located (default: %(default)s)",
+ )
+ parser.add_argument(
+ "--username", default="", help="MediaWiki site username, for private wikis"
+ )
+ parser.add_argument(
+ "--password", default="", help="MediaWiki site password, for private wikis"
+ )
output_options = parser.add_mutually_exclusive_group()
- output_options.add_argument('-o', '--output',
- help='write download to OUTPUT')
- output_options.add_argument('-a', '--batch',
- help='treat FILE as a textfile containing '
- 'multiple files to download, one URL or '
- 'filename per line', action='store_true')
+ output_options.add_argument("-o", "--output", help="write download to OUTPUT")
+ output_options.add_argument(
+ "-a",
+ "--batch",
+ help="treat FILE as a textfile containing "
+ "multiple files to download, one URL or "
+ "filename per line",
+ action="store_true",
+ )
args = parser.parse_args()
# print API and debug messages in verbose mode
- if args.verbose >= 2:
+ if args.verbose >= wikiget.VERY_VERBOSE:
logging.basicConfig(level=logging.DEBUG)
- elif args.verbose >= 1:
+ elif args.verbose >= wikiget.STD_VERBOSE:
logging.basicConfig(level=logging.WARNING)
if args.batch:
# batch download mode
input_file = args.FILE
- if args.verbose >= 1:
+ if args.verbose >= wikiget.STD_VERBOSE:
print(f"Info: using batch file '{input_file}'")
try:
- fd = open(input_file, 'r')
- except IOError as e:
- print('File could not be read. '
- 'The following error was encountered:')
+ fd = open(input_file)
+ except OSError as e:
+ print("File could not be read. The following error was encountered:")
print(e)
sys.exit(1)
else:
with fd:
for _, line in enumerate(fd):
- line = line.strip()
- download(line, args)
+ download(line.strip(), args)
else:
# single download mode
dl = args.FILE