From 4d60641d84670f5d0174f872a550a05b5effa01a Mon Sep 17 00:00:00 2001 From: Cody Logan Date: Tue, 26 Sep 2023 12:53:06 -0700 Subject: Fix typo in pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8b906c6..87759c4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -180,7 +180,7 @@ omit = [ ] [tool.coverage.paths] -hatchtest = ["wikiget"] +wikiget = ["wikiget"] tests = ["test"] [tool.coverage.report] -- cgit v1.2.3 From cc78716425b184fef0e8cda191c9a0cce53d26a9 Mon Sep 17 00:00:00 2001 From: Cody Logan Date: Tue, 26 Sep 2023 13:17:18 -0700 Subject: Reorganize code into src layout --- pyproject.toml | 39 ++++------ src/wikiget/__init__.py | 31 ++++++++ src/wikiget/dl.py | 174 +++++++++++++++++++++++++++++++++++++++++++++ src/wikiget/validations.py | 64 +++++++++++++++++ src/wikiget/version.py | 1 + src/wikiget/wikiget.py | 131 ++++++++++++++++++++++++++++++++++ test/test_validations.py | 102 -------------------------- tests/test_validations.py | 102 ++++++++++++++++++++++++++ wikiget/__init__.py | 31 -------- wikiget/dl.py | 174 --------------------------------------------- wikiget/validations.py | 64 ----------------- wikiget/version.py | 1 - wikiget/wikiget.py | 131 ---------------------------------- 13 files changed, 518 insertions(+), 527 deletions(-) create mode 100644 src/wikiget/__init__.py create mode 100644 src/wikiget/dl.py create mode 100644 src/wikiget/validations.py create mode 100644 src/wikiget/version.py create mode 100644 src/wikiget/wikiget.py delete mode 100644 test/test_validations.py create mode 100644 tests/test_validations.py delete mode 100644 wikiget/__init__.py delete mode 100644 wikiget/dl.py delete mode 100644 wikiget/validations.py delete mode 100644 wikiget/version.py delete mode 100644 wikiget/wikiget.py diff --git a/pyproject.toml b/pyproject.toml index 87759c4..aab4b3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,6 @@ [build-system] -#requires = ["setuptools"] -#build-backend = "setuptools.build_meta" requires = ["hatchling"] build-backend = "hatchling.build" -#requires = ["pdm-backend"] -#build-backend = "pdm.backend" [project] name = "wikiget" @@ -51,21 +47,19 @@ Repository = "https://github.com/clpo13/wikiget" [project.scripts] wikiget = "wikiget.wikiget:main" -[tool.setuptools.dynamic] -version = {attr = "wikiget.version.__version__"} - [tool.hatch.version] -path = "wikiget/version.py" +path = "src/wikiget/version.py" -#[tool.pdm] -#version = { source = "file", path = "wikiget/version.py" } +[tool.pytest.ini_options] +addopts = [ + "--import-mode=importlib", +] +testpaths = ["tests"] -#[tool.pytest.ini_options] -#addopts = [ -# "--import-mode=importlib", -# "--cov=wikiget", -#] -#testpaths = ["test"] +[tool.hatch.build.targets.sdist] +exclude = [ + "/.github", +] [tool.hatch.envs.default] dependencies = [ @@ -73,8 +67,8 @@ dependencies = [ "pytest", ] [tool.hatch.envs.default.scripts] -test = "pytest {args:test}" -test-cov = "coverage run -m pytest {args:test}" +test = "python -m pytest {args}" +test-cov = "coverage run -m pytest {args}" cov-report = [ "- coverage combine", "coverage report", @@ -95,7 +89,7 @@ dependencies = [ "ruff", ] [tool.hatch.envs.lint.scripts] -typing = "mypy --install-types --non-interactive {args:wikiget test}" +typing = "mypy --install-types --non-interactive {args:src/wikiget tests}" style = [ "ruff {args:.}", "black --check --diff {args:.}", @@ -169,19 +163,16 @@ ban-relative-imports = "all" [tool.ruff.per-file-ignores] # Tests can use magic values, assertions, and relative imports -"test/**/*" = ["PLR2004", "S101", "TID252"] +"tests/**/*" = ["PLR2004", "S101", "TID252"] [tool.coverage.run] source_pkgs = ["wikiget"] branch = true parallel = true -omit = [ - "wikiget/version.py", -] [tool.coverage.paths] wikiget = ["wikiget"] -tests = ["test"] +tests = ["tests"] [tool.coverage.report] exclude_lines = [ diff --git a/src/wikiget/__init__.py b/src/wikiget/__init__.py new file mode 100644 index 0000000..b68b0ec --- /dev/null +++ b/src/wikiget/__init__.py @@ -0,0 +1,31 @@ +# wikiget - CLI tool for downloading files from Wikimedia sites +# Copyright (C) 2018, 2019, 2020 Cody Logan and contributors +# SPDX-License-Identifier: GPL-3.0-or-later +# +# Wikiget is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Wikiget is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Wikiget. If not, see . + +from mwclient import __version__ as mwclient_version + +from wikiget.version import __version__ as wikiget_version + +# set some global constants +BLOCKSIZE = 65536 +CHUNKSIZE = 1024 +DEFAULT_SITE = "commons.wikimedia.org" +DEFAULT_PATH = "/w/" +USER_AGENT = "wikiget/{} (https://github.com/clpo13/wikiget) mwclient/{}".format( + wikiget_version, mwclient_version +) +STD_VERBOSE = 1 +VERY_VERBOSE = 2 diff --git a/src/wikiget/dl.py b/src/wikiget/dl.py new file mode 100644 index 0000000..949f09e --- /dev/null +++ b/src/wikiget/dl.py @@ -0,0 +1,174 @@ +# wikiget - CLI tool for downloading files from Wikimedia sites +# Copyright (C) 2018-2021 Cody Logan and contributors +# SPDX-License-Identifier: GPL-3.0-or-later +# +# Wikiget is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Wikiget is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Wikiget. If not, see . + +import os +import sys +from urllib.parse import unquote, urlparse + +from mwclient import APIError, InvalidResponse, LoginError, Site +from requests import ConnectionError, HTTPError +from tqdm import tqdm + +import wikiget +from wikiget.validations import valid_file, verify_hash + + +def download(dl, args): + url = urlparse(dl) + + if url.netloc: + filename = url.path + site_name = url.netloc + if args.site is not wikiget.DEFAULT_SITE and not args.quiet: + # this will work even if the user specifies 'commons.wikimedia.org' + print("Warning: target is a URL, ignoring site specified with --site") + else: + filename = dl + site_name = args.site + + file_match = valid_file(filename) + + # check if this is a valid file + if file_match and file_match.group(1): + # has File:/Image: prefix and extension + filename = file_match.group(2) + else: + # no file extension and/or prefix, probably an article + print(f"Could not parse input '{filename}' as a file. ") + sys.exit(1) + + filename = unquote(filename) # remove URL encoding for special characters + + dest = args.output or filename + + if args.verbose >= wikiget.VERY_VERBOSE: + print(f"User agent: {wikiget.USER_AGENT}") + + # connect to site and identify ourselves + if args.verbose >= wikiget.STD_VERBOSE: + print(f"Site name: {site_name}") + try: + site = Site(site_name, path=args.path, clients_useragent=wikiget.USER_AGENT) + if args.username and args.password: + site.login(args.username, args.password) + except ConnectionError as e: + # usually this means there is no such site, or there's no network + # connection, though it could be a certificate problem + print("Error: couldn't connect to specified site.") + if args.verbose >= wikiget.VERY_VERBOSE: + print("Full error message:") + print(e) + sys.exit(1) + except HTTPError as e: + # most likely a 403 forbidden or 404 not found error for api.php + print( + "Error: couldn't find the specified wiki's api.php. " + "Check the value of --path." + ) + if args.verbose >= wikiget.VERY_VERBOSE: + print("Full error message:") + print(e) + sys.exit(1) + except (InvalidResponse, LoginError) as e: + # InvalidResponse: site exists, but we couldn't communicate with the + # API endpoint for some reason other than an HTTP error. + # LoginError: missing or invalid credentials + print(e) + sys.exit(1) + + # get info about the target file + try: + file = site.images[filename] + except APIError as e: + # an API error at this point likely means access is denied, + # which could happen with a private wiki + print( + "Error: access denied. Try providing credentials with " + "--username and --password." + ) + if args.verbose >= wikiget.VERY_VERBOSE: + print("Full error message:") + for i in e.args: + print(i) + sys.exit(1) + + if file.imageinfo != {}: + # file exists either locally or at a common repository, + # like Wikimedia Commons + file_url = file.imageinfo["url"] + file_size = file.imageinfo["size"] + file_sha1 = file.imageinfo["sha1"] + + if args.verbose >= wikiget.STD_VERBOSE: + print( + f"Info: downloading '{filename}' " + f"({file_size} bytes) from {site.host}", + end="", + ) + if args.output: + print(f" to '{dest}'") + else: + print("\n", end="") + print(f"Info: {file_url}") + + if os.path.isfile(dest) and not args.force: + print(f"File '{dest}' already exists, skipping download (use -f to ignore)") + else: + try: + fd = open(dest, "wb") + except OSError as e: + print("File could not be written. The following error was encountered:") + print(e) + sys.exit(1) + else: + # download the file(s) + if args.verbose >= wikiget.STD_VERBOSE: + leave_bars = True + else: + leave_bars = False + with tqdm( + leave=leave_bars, + total=file_size, + unit="B", + unit_scale=True, + unit_divisor=wikiget.CHUNKSIZE, + ) as progress_bar: + with fd: + res = site.connection.get(file_url, stream=True) + progress_bar.set_postfix(file=dest, refresh=False) + for chunk in res.iter_content(wikiget.CHUNKSIZE): + fd.write(chunk) + progress_bar.update(len(chunk)) + + # verify file integrity and optionally print details + dl_sha1 = verify_hash(dest) + + if args.verbose >= wikiget.STD_VERBOSE: + print(f"Info: downloaded file SHA1 is {dl_sha1}") + print(f"Info: server file SHA1 is {file_sha1}") + if dl_sha1 == file_sha1: + if args.verbose >= wikiget.STD_VERBOSE: + print("Info: hashes match!") + # at this point, we've successfully downloaded the file + else: + print("Error: hash mismatch! Downloaded file may be corrupt.") + sys.exit(1) + + else: + # no file information returned + print(f"Target '{filename}' does not appear to be a valid file.") + sys.exit(1) diff --git a/src/wikiget/validations.py b/src/wikiget/validations.py new file mode 100644 index 0000000..dc70df4 --- /dev/null +++ b/src/wikiget/validations.py @@ -0,0 +1,64 @@ +# wikiget - CLI tool for downloading files from Wikimedia sites +# Copyright (C) 2018, 2019, 2020 Cody Logan +# SPDX-License-Identifier: GPL-3.0-or-later +# +# Wikiget is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Wikiget is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Wikiget. If not, see . + +import hashlib +import re + +from wikiget import BLOCKSIZE + + +def valid_file(search_string): + """ + Determines if the given string contains a valid file name, defined as a + string ending with a '.' and at least one character, beginning with 'File:' + or 'Image:', the standard file prefixes in MediaWiki. + :param search_string: string to validate + :returns: a regex Match object if there's a match or None otherwise + """ + # second group could also restrict to file extensions with three or more + # letters with ([^/\r\n\t\f\v]+\.\w{3,}) + file_regex = re.compile(r"(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$", re.I) + return file_regex.search(search_string) + + +def valid_site(search_string): + """ + Determines if the given string contains a valid site name, defined as a + string ending with 'wikipedia.org' or 'wikimedia.org'. This covers all + subdomains of those domains. Eventually, it should be possible to support + any MediaWiki site, regardless of domain name. + :param search_string: string to validate + :returns: a regex Match object if there's a match or None otherwise + """ + site_regex = re.compile(r"wiki[mp]edia\.org$", re.I) + return site_regex.search(search_string) + + +def verify_hash(filename): + """ + Calculates the SHA1 hash of the given file for comparison with a known + value. + :param filename: name of the file to calculate a hash for + :return: hash digest + """ + hasher = hashlib.sha1() # noqa: S324 + with open(filename, "rb") as dl: + buf = dl.read(BLOCKSIZE) + while len(buf) > 0: + hasher.update(buf) + buf = dl.read(BLOCKSIZE) + return hasher.hexdigest() diff --git a/src/wikiget/version.py b/src/wikiget/version.py new file mode 100644 index 0000000..dd9b22c --- /dev/null +++ b/src/wikiget/version.py @@ -0,0 +1 @@ +__version__ = "0.5.1" diff --git a/src/wikiget/wikiget.py b/src/wikiget/wikiget.py new file mode 100644 index 0000000..ba36766 --- /dev/null +++ b/src/wikiget/wikiget.py @@ -0,0 +1,131 @@ +# wikiget - CLI tool for downloading files from Wikimedia sites +# Copyright (C) 2018-2021 Cody Logan and contributors +# SPDX-License-Identifier: GPL-3.0-or-later +# +# Wikiget is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Wikiget is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Wikiget. If not, see . + +import argparse +import logging +import sys + +import wikiget +from wikiget.dl import download + + +def main(): + """ + Main entry point for console script. Automatically compiled by setuptools + when installed with `pip install` or `python setup.py install`. + """ + + parser = argparse.ArgumentParser( + description=""" + A tool for downloading files from + MediaWiki sites using the file name or + description page URL + """, + epilog=""" + Copyright (C) 2018-2023 Cody Logan + and contributors. + License GPLv3+: GNU GPL version 3 or later + . + This is free software; you are free to + change and redistribute it under certain + conditions. There is NO WARRANTY, to the + extent permitted by law. + """, + ) + parser.add_argument( + "FILE", + help=""" + name of the file to download with the File: + prefix, or the URL of its file description page + """, + ) + parser.add_argument( + "-V", + "--version", + action="version", + version=f"%(prog)s {wikiget.wikiget_version}", + ) + message_options = parser.add_mutually_exclusive_group() + message_options.add_argument( + "-q", "--quiet", help="suppress warning messages", action="store_true" + ) + message_options.add_argument( + "-v", + "--verbose", + help="print detailed information; use -vv for even more detail", + action="count", + default=0, + ) + parser.add_argument( + "-f", "--force", help="force overwriting existing files", action="store_true" + ) + parser.add_argument( + "-s", + "--site", + default=wikiget.DEFAULT_SITE, + help="MediaWiki site to download from (default: %(default)s)", + ) + parser.add_argument( + "-p", + "--path", + default=wikiget.DEFAULT_PATH, + help="MediaWiki site path, where api.php is located (default: %(default)s)", + ) + parser.add_argument( + "--username", default="", help="MediaWiki site username, for private wikis" + ) + parser.add_argument( + "--password", default="", help="MediaWiki site password, for private wikis" + ) + output_options = parser.add_mutually_exclusive_group() + output_options.add_argument("-o", "--output", help="write download to OUTPUT") + output_options.add_argument( + "-a", + "--batch", + help="treat FILE as a textfile containing " + "multiple files to download, one URL or " + "filename per line", + action="store_true", + ) + + args = parser.parse_args() + + # print API and debug messages in verbose mode + if args.verbose >= wikiget.VERY_VERBOSE: + logging.basicConfig(level=logging.DEBUG) + elif args.verbose >= wikiget.STD_VERBOSE: + logging.basicConfig(level=logging.WARNING) + + if args.batch: + # batch download mode + input_file = args.FILE + if args.verbose >= wikiget.STD_VERBOSE: + print(f"Info: using batch file '{input_file}'") + try: + fd = open(input_file) + except OSError as e: + print("File could not be read. The following error was encountered:") + print(e) + sys.exit(1) + else: + with fd: + for _, line in enumerate(fd): + download(line.strip(), args) + else: + # single download mode + dl = args.FILE + download(dl, args) diff --git a/test/test_validations.py b/test/test_validations.py deleted file mode 100644 index 1abd96a..0000000 --- a/test/test_validations.py +++ /dev/null @@ -1,102 +0,0 @@ -# wikiget - CLI tool for downloading files from Wikimedia sites -# Copyright (C) 2018-2021 Cody Logan -# SPDX-License-Identifier: GPL-3.0-or-later -# -# Wikiget is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Wikiget is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Wikiget. If not, see . - -from wikiget.validations import valid_file, valid_site, verify_hash - - -def test_invalid_site_input(): - """ - Invalid site strings should not return regex match objects. - """ - invalid_input = [ - "example.com", - "vim.wikia.com", - "en.wikipedia.com", - "en.wikimpedia.org", - ] - for i in invalid_input: - site_match = valid_site(i) - assert site_match is None - - -def test_valid_site_input(): - """ - Valid site strings should return regex match objects. - """ - valid_input = [ - "en.wikipedia.org", - "commons.wikimedia.org", - "de.wikipedia.org", - "meta.wikimedia.org", - ] - for i in valid_input: - site_match = valid_site(i) - assert site_match is not None - - -def test_file_regex(): - """ - File regex should return a match object with match groups corresponding - to the file prefix and name. - """ - i = "File:Example.jpg" - file_match = valid_file(i) - assert file_match is not None - assert file_match.group(0) == "File:Example.jpg" # entire match - assert file_match.group(1) == "File:" # first group - assert file_match.group(2) == "Example.jpg" # second group - - -def test_invalid_file_input(): - """ - Invalid file strings should not return regex match objects. - """ - invalid_input = ["file:example", "example.jpg", "Foo Bar.gif", "Fil:Example.jpg"] - for i in invalid_input: - file_match = valid_file(i) - assert file_match is None - - -def test_valid_file_input(): - """ - Valid file strings should return regex match objects. - """ - valid_input = [ - "Image:example.jpg", - "file:example.jpg", - "File:example.file-01.jpg", - "FILE:FOO.BMP", - "File:ß handwritten sample.gif", - "File:A (1).jpeg", - ] - for i in valid_input: - file_match = valid_file(i) - assert file_match is not None - - -def test_verify_hash(tmp_path): - """ - Confirm that verify_hash returns the proper SHA1 hash. - """ - file_name = "testfile" - file_contents = "foobar" - file_sha1 = "8843d7f92416211de9ebb963ff4ce28125932878" - - tmp_file = tmp_path / file_name - tmp_file.write_text(file_contents) - - assert verify_hash(tmp_file) == file_sha1 diff --git a/tests/test_validations.py b/tests/test_validations.py new file mode 100644 index 0000000..1abd96a --- /dev/null +++ b/tests/test_validations.py @@ -0,0 +1,102 @@ +# wikiget - CLI tool for downloading files from Wikimedia sites +# Copyright (C) 2018-2021 Cody Logan +# SPDX-License-Identifier: GPL-3.0-or-later +# +# Wikiget is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Wikiget is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Wikiget. If not, see . + +from wikiget.validations import valid_file, valid_site, verify_hash + + +def test_invalid_site_input(): + """ + Invalid site strings should not return regex match objects. + """ + invalid_input = [ + "example.com", + "vim.wikia.com", + "en.wikipedia.com", + "en.wikimpedia.org", + ] + for i in invalid_input: + site_match = valid_site(i) + assert site_match is None + + +def test_valid_site_input(): + """ + Valid site strings should return regex match objects. + """ + valid_input = [ + "en.wikipedia.org", + "commons.wikimedia.org", + "de.wikipedia.org", + "meta.wikimedia.org", + ] + for i in valid_input: + site_match = valid_site(i) + assert site_match is not None + + +def test_file_regex(): + """ + File regex should return a match object with match groups corresponding + to the file prefix and name. + """ + i = "File:Example.jpg" + file_match = valid_file(i) + assert file_match is not None + assert file_match.group(0) == "File:Example.jpg" # entire match + assert file_match.group(1) == "File:" # first group + assert file_match.group(2) == "Example.jpg" # second group + + +def test_invalid_file_input(): + """ + Invalid file strings should not return regex match objects. + """ + invalid_input = ["file:example", "example.jpg", "Foo Bar.gif", "Fil:Example.jpg"] + for i in invalid_input: + file_match = valid_file(i) + assert file_match is None + + +def test_valid_file_input(): + """ + Valid file strings should return regex match objects. + """ + valid_input = [ + "Image:example.jpg", + "file:example.jpg", + "File:example.file-01.jpg", + "FILE:FOO.BMP", + "File:ß handwritten sample.gif", + "File:A (1).jpeg", + ] + for i in valid_input: + file_match = valid_file(i) + assert file_match is not None + + +def test_verify_hash(tmp_path): + """ + Confirm that verify_hash returns the proper SHA1 hash. + """ + file_name = "testfile" + file_contents = "foobar" + file_sha1 = "8843d7f92416211de9ebb963ff4ce28125932878" + + tmp_file = tmp_path / file_name + tmp_file.write_text(file_contents) + + assert verify_hash(tmp_file) == file_sha1 diff --git a/wikiget/__init__.py b/wikiget/__init__.py deleted file mode 100644 index b68b0ec..0000000 --- a/wikiget/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# wikiget - CLI tool for downloading files from Wikimedia sites -# Copyright (C) 2018, 2019, 2020 Cody Logan and contributors -# SPDX-License-Identifier: GPL-3.0-or-later -# -# Wikiget is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Wikiget is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Wikiget. If not, see . - -from mwclient import __version__ as mwclient_version - -from wikiget.version import __version__ as wikiget_version - -# set some global constants -BLOCKSIZE = 65536 -CHUNKSIZE = 1024 -DEFAULT_SITE = "commons.wikimedia.org" -DEFAULT_PATH = "/w/" -USER_AGENT = "wikiget/{} (https://github.com/clpo13/wikiget) mwclient/{}".format( - wikiget_version, mwclient_version -) -STD_VERBOSE = 1 -VERY_VERBOSE = 2 diff --git a/wikiget/dl.py b/wikiget/dl.py deleted file mode 100644 index 949f09e..0000000 --- a/wikiget/dl.py +++ /dev/null @@ -1,174 +0,0 @@ -# wikiget - CLI tool for downloading files from Wikimedia sites -# Copyright (C) 2018-2021 Cody Logan and contributors -# SPDX-License-Identifier: GPL-3.0-or-later -# -# Wikiget is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Wikiget is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Wikiget. If not, see . - -import os -import sys -from urllib.parse import unquote, urlparse - -from mwclient import APIError, InvalidResponse, LoginError, Site -from requests import ConnectionError, HTTPError -from tqdm import tqdm - -import wikiget -from wikiget.validations import valid_file, verify_hash - - -def download(dl, args): - url = urlparse(dl) - - if url.netloc: - filename = url.path - site_name = url.netloc - if args.site is not wikiget.DEFAULT_SITE and not args.quiet: - # this will work even if the user specifies 'commons.wikimedia.org' - print("Warning: target is a URL, ignoring site specified with --site") - else: - filename = dl - site_name = args.site - - file_match = valid_file(filename) - - # check if this is a valid file - if file_match and file_match.group(1): - # has File:/Image: prefix and extension - filename = file_match.group(2) - else: - # no file extension and/or prefix, probably an article - print(f"Could not parse input '{filename}' as a file. ") - sys.exit(1) - - filename = unquote(filename) # remove URL encoding for special characters - - dest = args.output or filename - - if args.verbose >= wikiget.VERY_VERBOSE: - print(f"User agent: {wikiget.USER_AGENT}") - - # connect to site and identify ourselves - if args.verbose >= wikiget.STD_VERBOSE: - print(f"Site name: {site_name}") - try: - site = Site(site_name, path=args.path, clients_useragent=wikiget.USER_AGENT) - if args.username and args.password: - site.login(args.username, args.password) - except ConnectionError as e: - # usually this means there is no such site, or there's no network - # connection, though it could be a certificate problem - print("Error: couldn't connect to specified site.") - if args.verbose >= wikiget.VERY_VERBOSE: - print("Full error message:") - print(e) - sys.exit(1) - except HTTPError as e: - # most likely a 403 forbidden or 404 not found error for api.php - print( - "Error: couldn't find the specified wiki's api.php. " - "Check the value of --path." - ) - if args.verbose >= wikiget.VERY_VERBOSE: - print("Full error message:") - print(e) - sys.exit(1) - except (InvalidResponse, LoginError) as e: - # InvalidResponse: site exists, but we couldn't communicate with the - # API endpoint for some reason other than an HTTP error. - # LoginError: missing or invalid credentials - print(e) - sys.exit(1) - - # get info about the target file - try: - file = site.images[filename] - except APIError as e: - # an API error at this point likely means access is denied, - # which could happen with a private wiki - print( - "Error: access denied. Try providing credentials with " - "--username and --password." - ) - if args.verbose >= wikiget.VERY_VERBOSE: - print("Full error message:") - for i in e.args: - print(i) - sys.exit(1) - - if file.imageinfo != {}: - # file exists either locally or at a common repository, - # like Wikimedia Commons - file_url = file.imageinfo["url"] - file_size = file.imageinfo["size"] - file_sha1 = file.imageinfo["sha1"] - - if args.verbose >= wikiget.STD_VERBOSE: - print( - f"Info: downloading '{filename}' " - f"({file_size} bytes) from {site.host}", - end="", - ) - if args.output: - print(f" to '{dest}'") - else: - print("\n", end="") - print(f"Info: {file_url}") - - if os.path.isfile(dest) and not args.force: - print(f"File '{dest}' already exists, skipping download (use -f to ignore)") - else: - try: - fd = open(dest, "wb") - except OSError as e: - print("File could not be written. The following error was encountered:") - print(e) - sys.exit(1) - else: - # download the file(s) - if args.verbose >= wikiget.STD_VERBOSE: - leave_bars = True - else: - leave_bars = False - with tqdm( - leave=leave_bars, - total=file_size, - unit="B", - unit_scale=True, - unit_divisor=wikiget.CHUNKSIZE, - ) as progress_bar: - with fd: - res = site.connection.get(file_url, stream=True) - progress_bar.set_postfix(file=dest, refresh=False) - for chunk in res.iter_content(wikiget.CHUNKSIZE): - fd.write(chunk) - progress_bar.update(len(chunk)) - - # verify file integrity and optionally print details - dl_sha1 = verify_hash(dest) - - if args.verbose >= wikiget.STD_VERBOSE: - print(f"Info: downloaded file SHA1 is {dl_sha1}") - print(f"Info: server file SHA1 is {file_sha1}") - if dl_sha1 == file_sha1: - if args.verbose >= wikiget.STD_VERBOSE: - print("Info: hashes match!") - # at this point, we've successfully downloaded the file - else: - print("Error: hash mismatch! Downloaded file may be corrupt.") - sys.exit(1) - - else: - # no file information returned - print(f"Target '{filename}' does not appear to be a valid file.") - sys.exit(1) diff --git a/wikiget/validations.py b/wikiget/validations.py deleted file mode 100644 index dc70df4..0000000 --- a/wikiget/validations.py +++ /dev/null @@ -1,64 +0,0 @@ -# wikiget - CLI tool for downloading files from Wikimedia sites -# Copyright (C) 2018, 2019, 2020 Cody Logan -# SPDX-License-Identifier: GPL-3.0-or-later -# -# Wikiget is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Wikiget is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Wikiget. If not, see . - -import hashlib -import re - -from wikiget import BLOCKSIZE - - -def valid_file(search_string): - """ - Determines if the given string contains a valid file name, defined as a - string ending with a '.' and at least one character, beginning with 'File:' - or 'Image:', the standard file prefixes in MediaWiki. - :param search_string: string to validate - :returns: a regex Match object if there's a match or None otherwise - """ - # second group could also restrict to file extensions with three or more - # letters with ([^/\r\n\t\f\v]+\.\w{3,}) - file_regex = re.compile(r"(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$", re.I) - return file_regex.search(search_string) - - -def valid_site(search_string): - """ - Determines if the given string contains a valid site name, defined as a - string ending with 'wikipedia.org' or 'wikimedia.org'. This covers all - subdomains of those domains. Eventually, it should be possible to support - any MediaWiki site, regardless of domain name. - :param search_string: string to validate - :returns: a regex Match object if there's a match or None otherwise - """ - site_regex = re.compile(r"wiki[mp]edia\.org$", re.I) - return site_regex.search(search_string) - - -def verify_hash(filename): - """ - Calculates the SHA1 hash of the given file for comparison with a known - value. - :param filename: name of the file to calculate a hash for - :return: hash digest - """ - hasher = hashlib.sha1() # noqa: S324 - with open(filename, "rb") as dl: - buf = dl.read(BLOCKSIZE) - while len(buf) > 0: - hasher.update(buf) - buf = dl.read(BLOCKSIZE) - return hasher.hexdigest() diff --git a/wikiget/version.py b/wikiget/version.py deleted file mode 100644 index dd9b22c..0000000 --- a/wikiget/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.5.1" diff --git a/wikiget/wikiget.py b/wikiget/wikiget.py deleted file mode 100644 index ba36766..0000000 --- a/wikiget/wikiget.py +++ /dev/null @@ -1,131 +0,0 @@ -# wikiget - CLI tool for downloading files from Wikimedia sites -# Copyright (C) 2018-2021 Cody Logan and contributors -# SPDX-License-Identifier: GPL-3.0-or-later -# -# Wikiget is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Wikiget is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Wikiget. If not, see . - -import argparse -import logging -import sys - -import wikiget -from wikiget.dl import download - - -def main(): - """ - Main entry point for console script. Automatically compiled by setuptools - when installed with `pip install` or `python setup.py install`. - """ - - parser = argparse.ArgumentParser( - description=""" - A tool for downloading files from - MediaWiki sites using the file name or - description page URL - """, - epilog=""" - Copyright (C) 2018-2023 Cody Logan - and contributors. - License GPLv3+: GNU GPL version 3 or later - . - This is free software; you are free to - change and redistribute it under certain - conditions. There is NO WARRANTY, to the - extent permitted by law. - """, - ) - parser.add_argument( - "FILE", - help=""" - name of the file to download with the File: - prefix, or the URL of its file description page - """, - ) - parser.add_argument( - "-V", - "--version", - action="version", - version=f"%(prog)s {wikiget.wikiget_version}", - ) - message_options = parser.add_mutually_exclusive_group() - message_options.add_argument( - "-q", "--quiet", help="suppress warning messages", action="store_true" - ) - message_options.add_argument( - "-v", - "--verbose", - help="print detailed information; use -vv for even more detail", - action="count", - default=0, - ) - parser.add_argument( - "-f", "--force", help="force overwriting existing files", action="store_true" - ) - parser.add_argument( - "-s", - "--site", - default=wikiget.DEFAULT_SITE, - help="MediaWiki site to download from (default: %(default)s)", - ) - parser.add_argument( - "-p", - "--path", - default=wikiget.DEFAULT_PATH, - help="MediaWiki site path, where api.php is located (default: %(default)s)", - ) - parser.add_argument( - "--username", default="", help="MediaWiki site username, for private wikis" - ) - parser.add_argument( - "--password", default="", help="MediaWiki site password, for private wikis" - ) - output_options = parser.add_mutually_exclusive_group() - output_options.add_argument("-o", "--output", help="write download to OUTPUT") - output_options.add_argument( - "-a", - "--batch", - help="treat FILE as a textfile containing " - "multiple files to download, one URL or " - "filename per line", - action="store_true", - ) - - args = parser.parse_args() - - # print API and debug messages in verbose mode - if args.verbose >= wikiget.VERY_VERBOSE: - logging.basicConfig(level=logging.DEBUG) - elif args.verbose >= wikiget.STD_VERBOSE: - logging.basicConfig(level=logging.WARNING) - - if args.batch: - # batch download mode - input_file = args.FILE - if args.verbose >= wikiget.STD_VERBOSE: - print(f"Info: using batch file '{input_file}'") - try: - fd = open(input_file) - except OSError as e: - print("File could not be read. The following error was encountered:") - print(e) - sys.exit(1) - else: - with fd: - for _, line in enumerate(fd): - download(line.strip(), args) - else: - # single download mode - dl = args.FILE - download(dl, args) -- cgit v1.2.3 From 3c720f269d69f8bc505efed3f86881e530f55949 Mon Sep 17 00:00:00 2001 From: Cody Logan Date: Tue, 26 Sep 2023 13:25:19 -0700 Subject: Update README --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 043f305..a0ccf8d 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ # wikiget +[![Python package](https://github.com/clpo13/wikiget/actions/workflows/python.yml/badge.svg?branch=master)](https://github.com/clpo13/wikiget/actions/workflows/python.yml) [![PyPI version](https://badge.fury.io/py/wikiget.svg)](https://badge.fury.io/py/wikiget) Something like wget for downloading a file from MediaWiki sites (like Wikipedia @@ -93,9 +94,13 @@ source venv/bin/activate Then run `pip install -e .` to invoke an ["editable" install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs), meaning any changes made to the source will be reflected immediately in the executable -script. Unit tests can be run with `pytest` (make sure to run `pip install pytest-cov` +script. Unit tests can be run with `pytest` (make sure to run `pip install pytest` in the virtual environment first.) +Alternatively, using [Hatch](https://hatch.pypa.io/latest/), simply clone the repository +and run `hatch run test` to create the environment and run pytest. Also try `hatch shell` +or `hatch run wikiget --help`. + ## License Copyright (C) 2018-2023 Cody Logan and contributors -- cgit v1.2.3