aboutsummaryrefslogtreecommitdiff
path: root/wikiget
diff options
context:
space:
mode:
authorclpo13 <clpo13@gmail.com>2023-09-26 12:09:07 -0700
committerGitHub <noreply@github.com>2023-09-26 12:09:07 -0700
commit602bbbb7e387f5c126d5130c6e7193f5ae906d9c (patch)
tree39d709c7117e05ed25cd4cc1ccd00cf15022448d /wikiget
parent6d2acf3bba628f62fe91bb778b7bb92a1057969b (diff)
parentfbac39b1423475345b99c2da6a02be110c660d7a (diff)
downloadwikiget-602bbbb7e387f5c126d5130c6e7193f5ae906d9c.tar.gz
wikiget-602bbbb7e387f5c126d5130c6e7193f5ae906d9c.zip
Merge pull request #6 from clpo13/style-and-lint
Run linters with Hatch.
Diffstat (limited to 'wikiget')
-rw-r--r--wikiget/__init__.py11
-rw-r--r--wikiget/dl.py99
-rw-r--r--wikiget/validations.py8
-rw-r--r--wikiget/version.py2
-rw-r--r--wikiget/wikiget.py135
5 files changed, 143 insertions, 112 deletions
diff --git a/wikiget/__init__.py b/wikiget/__init__.py
index 126f04d..b68b0ec 100644
--- a/wikiget/__init__.py
+++ b/wikiget/__init__.py
@@ -22,7 +22,10 @@ from wikiget.version import __version__ as wikiget_version
# set some global constants
BLOCKSIZE = 65536
CHUNKSIZE = 1024
-DEFAULT_SITE = 'commons.wikimedia.org'
-DEFAULT_PATH = '/w/'
-USER_AGENT = ('wikiget/{} (https://github.com/clpo13/wikiget) '
- 'mwclient/{}'.format(wikiget_version, mwclient_version))
+DEFAULT_SITE = "commons.wikimedia.org"
+DEFAULT_PATH = "/w/"
+USER_AGENT = "wikiget/{} (https://github.com/clpo13/wikiget) mwclient/{}".format(
+ wikiget_version, mwclient_version
+)
+STD_VERBOSE = 1
+VERY_VERBOSE = 2
diff --git a/wikiget/dl.py b/wikiget/dl.py
index f05061e..949f09e 100644
--- a/wikiget/dl.py
+++ b/wikiget/dl.py
@@ -23,7 +23,7 @@ from mwclient import APIError, InvalidResponse, LoginError, Site
from requests import ConnectionError, HTTPError
from tqdm import tqdm
-from wikiget import CHUNKSIZE, DEFAULT_SITE, USER_AGENT
+import wikiget
from wikiget.validations import valid_file, verify_hash
@@ -33,10 +33,9 @@ def download(dl, args):
if url.netloc:
filename = url.path
site_name = url.netloc
- if args.site is not DEFAULT_SITE and not args.quiet:
+ if args.site is not wikiget.DEFAULT_SITE and not args.quiet:
# this will work even if the user specifies 'commons.wikimedia.org'
- print('Warning: target is a URL, '
- 'ignoring site specified with --site')
+ print("Warning: target is a URL, ignoring site specified with --site")
else:
filename = dl
site_name = args.site
@@ -56,30 +55,32 @@ def download(dl, args):
dest = args.output or filename
- if args.verbose >= 2:
- print(f'User agent: {USER_AGENT}')
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print(f"User agent: {wikiget.USER_AGENT}")
# connect to site and identify ourselves
- if args.verbose >= 1:
- print(f'Site name: {site_name}')
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print(f"Site name: {site_name}")
try:
- site = Site(site_name, path=args.path, clients_useragent=USER_AGENT)
+ site = Site(site_name, path=args.path, clients_useragent=wikiget.USER_AGENT)
if args.username and args.password:
site.login(args.username, args.password)
except ConnectionError as e:
# usually this means there is no such site, or there's no network
# connection, though it could be a certificate problem
print("Error: couldn't connect to specified site.")
- if args.verbose >= 2:
- print('Full error message:')
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print("Full error message:")
print(e)
sys.exit(1)
except HTTPError as e:
# most likely a 403 forbidden or 404 not found error for api.php
- print("Error: couldn't find the specified wiki's api.php. "
- "Check the value of --path.")
- if args.verbose >= 2:
- print('Full error message:')
+ print(
+ "Error: couldn't find the specified wiki's api.php. "
+ "Check the value of --path."
+ )
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print("Full error message:")
print(e)
sys.exit(1)
except (InvalidResponse, LoginError) as e:
@@ -95,10 +96,12 @@ def download(dl, args):
except APIError as e:
# an API error at this point likely means access is denied,
# which could happen with a private wiki
- print('Error: access denied. Try providing credentials with '
- '--username and --password.')
- if args.verbose >= 2:
- print('Full error message:')
+ print(
+ "Error: access denied. Try providing credentials with "
+ "--username and --password."
+ )
+ if args.verbose >= wikiget.VERY_VERBOSE:
+ print("Full error message:")
for i in e.args:
print(i)
sys.exit(1)
@@ -106,59 +109,63 @@ def download(dl, args):
if file.imageinfo != {}:
# file exists either locally or at a common repository,
# like Wikimedia Commons
- file_url = file.imageinfo['url']
- file_size = file.imageinfo['size']
- file_sha1 = file.imageinfo['sha1']
-
- if args.verbose >= 1:
- print(f"Info: downloading '{filename}' "
- f"({file_size} bytes) from {site.host}",
- end='')
+ file_url = file.imageinfo["url"]
+ file_size = file.imageinfo["size"]
+ file_sha1 = file.imageinfo["sha1"]
+
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print(
+ f"Info: downloading '{filename}' "
+ f"({file_size} bytes) from {site.host}",
+ end="",
+ )
if args.output:
print(f" to '{dest}'")
else:
- print('\n', end='')
- print(f'Info: {file_url}')
+ print("\n", end="")
+ print(f"Info: {file_url}")
if os.path.isfile(dest) and not args.force:
- print(f"File '{dest}' already exists, skipping download "
- "(use -f to ignore)")
+ print(f"File '{dest}' already exists, skipping download (use -f to ignore)")
else:
try:
- fd = open(dest, 'wb')
- except IOError as e:
- print('File could not be written. '
- 'The following error was encountered:')
+ fd = open(dest, "wb")
+ except OSError as e:
+ print("File could not be written. The following error was encountered:")
print(e)
sys.exit(1)
else:
# download the file(s)
- if args.verbose >= 1:
+ if args.verbose >= wikiget.STD_VERBOSE:
leave_bars = True
else:
leave_bars = False
- with tqdm(leave=leave_bars, total=file_size,
- unit='B', unit_scale=True,
- unit_divisor=CHUNKSIZE) as progress_bar:
+ with tqdm(
+ leave=leave_bars,
+ total=file_size,
+ unit="B",
+ unit_scale=True,
+ unit_divisor=wikiget.CHUNKSIZE,
+ ) as progress_bar:
with fd:
res = site.connection.get(file_url, stream=True)
progress_bar.set_postfix(file=dest, refresh=False)
- for chunk in res.iter_content(CHUNKSIZE):
+ for chunk in res.iter_content(wikiget.CHUNKSIZE):
fd.write(chunk)
progress_bar.update(len(chunk))
# verify file integrity and optionally print details
dl_sha1 = verify_hash(dest)
- if args.verbose >= 1:
- print(f'Info: downloaded file SHA1 is {dl_sha1}')
- print(f'Info: server file SHA1 is {file_sha1}')
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print(f"Info: downloaded file SHA1 is {dl_sha1}")
+ print(f"Info: server file SHA1 is {file_sha1}")
if dl_sha1 == file_sha1:
- if args.verbose >= 1:
- print('Info: hashes match!')
+ if args.verbose >= wikiget.STD_VERBOSE:
+ print("Info: hashes match!")
# at this point, we've successfully downloaded the file
else:
- print('Error: hash mismatch! Downloaded file may be corrupt.')
+ print("Error: hash mismatch! Downloaded file may be corrupt.")
sys.exit(1)
else:
diff --git a/wikiget/validations.py b/wikiget/validations.py
index 5e7213f..dc70df4 100644
--- a/wikiget/validations.py
+++ b/wikiget/validations.py
@@ -31,7 +31,7 @@ def valid_file(search_string):
"""
# second group could also restrict to file extensions with three or more
# letters with ([^/\r\n\t\f\v]+\.\w{3,})
- file_regex = re.compile(r'(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$', re.I)
+ file_regex = re.compile(r"(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$", re.I)
return file_regex.search(search_string)
@@ -44,7 +44,7 @@ def valid_site(search_string):
:param search_string: string to validate
:returns: a regex Match object if there's a match or None otherwise
"""
- site_regex = re.compile(r'wiki[mp]edia\.org$', re.I)
+ site_regex = re.compile(r"wiki[mp]edia\.org$", re.I)
return site_regex.search(search_string)
@@ -55,8 +55,8 @@ def verify_hash(filename):
:param filename: name of the file to calculate a hash for
:return: hash digest
"""
- hasher = hashlib.sha1()
- with open(filename, 'rb') as dl:
+ hasher = hashlib.sha1() # noqa: S324
+ with open(filename, "rb") as dl:
buf = dl.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
diff --git a/wikiget/version.py b/wikiget/version.py
index 93b60a1..dd9b22c 100644
--- a/wikiget/version.py
+++ b/wikiget/version.py
@@ -1 +1 @@
-__version__ = '0.5.1'
+__version__ = "0.5.1"
diff --git a/wikiget/wikiget.py b/wikiget/wikiget.py
index da60037..ba36766 100644
--- a/wikiget/wikiget.py
+++ b/wikiget/wikiget.py
@@ -19,7 +19,7 @@ import argparse
import logging
import sys
-from wikiget import DEFAULT_SITE, DEFAULT_PATH, wikiget_version
+import wikiget
from wikiget.dl import download
@@ -29,81 +29,102 @@ def main():
when installed with `pip install` or `python setup.py install`.
"""
- parser = argparse.ArgumentParser(description="""
- A tool for downloading files from
- MediaWiki sites using the file name or
- description page URL
- """,
- epilog="""
- Copyright (C) 2018-2021 Cody Logan
- and contributors.
- License GPLv3+: GNU GPL version 3 or later
- <http://www.gnu.org/licenses/gpl.html>.
- This is free software; you are free to
- change and redistribute it under certain
- conditions. There is NO WARRANTY, to the
- extent permitted by law.
- """)
- parser.add_argument('FILE', help="""
- name of the file to download with the File:
- prefix, or the URL of its file description page
- """)
- parser.add_argument('-V', '--version', action='version',
- version=f'%(prog)s {wikiget_version}')
+ parser = argparse.ArgumentParser(
+ description="""
+ A tool for downloading files from
+ MediaWiki sites using the file name or
+ description page URL
+ """,
+ epilog="""
+ Copyright (C) 2018-2023 Cody Logan
+ and contributors.
+ License GPLv3+: GNU GPL version 3 or later
+ <http://www.gnu.org/licenses/gpl.html>.
+ This is free software; you are free to
+ change and redistribute it under certain
+ conditions. There is NO WARRANTY, to the
+ extent permitted by law.
+ """,
+ )
+ parser.add_argument(
+ "FILE",
+ help="""
+ name of the file to download with the File:
+ prefix, or the URL of its file description page
+ """,
+ )
+ parser.add_argument(
+ "-V",
+ "--version",
+ action="version",
+ version=f"%(prog)s {wikiget.wikiget_version}",
+ )
message_options = parser.add_mutually_exclusive_group()
- message_options.add_argument('-q', '--quiet',
- help='suppress warning messages',
- action='store_true')
- message_options.add_argument('-v', '--verbose',
- help='print detailed information; '
- 'use -vv for even more detail',
- action='count', default=0)
- parser.add_argument('-f', '--force',
- help='force overwriting existing files',
- action='store_true')
- parser.add_argument('-s', '--site', default=DEFAULT_SITE,
- help='MediaWiki site to download from '
- '(default: %(default)s)')
- parser.add_argument('-p', '--path', default=DEFAULT_PATH,
- help='MediaWiki site path, where api.php is located '
- '(default: %(default)s)')
- parser.add_argument('--username', default='',
- help='MediaWiki site username, for private wikis')
- parser.add_argument('--password', default='',
- help='MediaWiki site password, for private wikis')
+ message_options.add_argument(
+ "-q", "--quiet", help="suppress warning messages", action="store_true"
+ )
+ message_options.add_argument(
+ "-v",
+ "--verbose",
+ help="print detailed information; use -vv for even more detail",
+ action="count",
+ default=0,
+ )
+ parser.add_argument(
+ "-f", "--force", help="force overwriting existing files", action="store_true"
+ )
+ parser.add_argument(
+ "-s",
+ "--site",
+ default=wikiget.DEFAULT_SITE,
+ help="MediaWiki site to download from (default: %(default)s)",
+ )
+ parser.add_argument(
+ "-p",
+ "--path",
+ default=wikiget.DEFAULT_PATH,
+ help="MediaWiki site path, where api.php is located (default: %(default)s)",
+ )
+ parser.add_argument(
+ "--username", default="", help="MediaWiki site username, for private wikis"
+ )
+ parser.add_argument(
+ "--password", default="", help="MediaWiki site password, for private wikis"
+ )
output_options = parser.add_mutually_exclusive_group()
- output_options.add_argument('-o', '--output',
- help='write download to OUTPUT')
- output_options.add_argument('-a', '--batch',
- help='treat FILE as a textfile containing '
- 'multiple files to download, one URL or '
- 'filename per line', action='store_true')
+ output_options.add_argument("-o", "--output", help="write download to OUTPUT")
+ output_options.add_argument(
+ "-a",
+ "--batch",
+ help="treat FILE as a textfile containing "
+ "multiple files to download, one URL or "
+ "filename per line",
+ action="store_true",
+ )
args = parser.parse_args()
# print API and debug messages in verbose mode
- if args.verbose >= 2:
+ if args.verbose >= wikiget.VERY_VERBOSE:
logging.basicConfig(level=logging.DEBUG)
- elif args.verbose >= 1:
+ elif args.verbose >= wikiget.STD_VERBOSE:
logging.basicConfig(level=logging.WARNING)
if args.batch:
# batch download mode
input_file = args.FILE
- if args.verbose >= 1:
+ if args.verbose >= wikiget.STD_VERBOSE:
print(f"Info: using batch file '{input_file}'")
try:
- fd = open(input_file, 'r')
- except IOError as e:
- print('File could not be read. '
- 'The following error was encountered:')
+ fd = open(input_file)
+ except OSError as e:
+ print("File could not be read. The following error was encountered:")
print(e)
sys.exit(1)
else:
with fd:
for _, line in enumerate(fd):
- line = line.strip()
- download(line, args)
+ download(line.strip(), args)
else:
# single download mode
dl = args.FILE