aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorCody Logan <clpo13@gmail.com>2023-09-26 15:17:04 -0700
committerCody Logan <clpo13@gmail.com>2023-09-26 15:17:04 -0700
commita1995912ed24b37a990f3fcd5e91dbf7b46669fb (patch)
treeaa45974d0c999f2dd9a1684e43913b51b0184aa1 /src
parentbb0bf8f0c79c31114a615cb201505de3fae15044 (diff)
downloadwikiget-a1995912ed24b37a990f3fcd5e91dbf7b46669fb.tar.gz
wikiget-a1995912ed24b37a990f3fcd5e91dbf7b46669fb.zip
Reorganize file tree
Diffstat (limited to 'src')
-rw-r--r--src/wikiget/__init__.py28
-rw-r--r--src/wikiget/dl.py159
-rw-r--r--src/wikiget/validations.py64
-rw-r--r--src/wikiget/version.py1
-rw-r--r--src/wikiget/wikiget.py157
5 files changed, 409 insertions, 0 deletions
diff --git a/src/wikiget/__init__.py b/src/wikiget/__init__.py
new file mode 100644
index 0000000..4adcae3
--- /dev/null
+++ b/src/wikiget/__init__.py
@@ -0,0 +1,28 @@
+# wikiget - CLI tool for downloading files from Wikimedia sites
+# Copyright (C) 2018-2021 Cody Logan and contributors
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Wikiget is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Wikiget is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Wikiget. If not, see <https://www.gnu.org/licenses/>.
+
+from mwclient import __version__ as mwclient_version
+
+from .version import __version__ as wikiget_version
+
+# set some global constants
+BLOCKSIZE = 65536
+CHUNKSIZE = 1024
+DEFAULT_SITE = "commons.wikimedia.org"
+DEFAULT_PATH = "/w/"
+USER_AGENT = (f"wikiget/{wikiget_version} (https://github.com/clpo13/wikiget) "
+ f"mwclient/{mwclient_version}")
diff --git a/src/wikiget/dl.py b/src/wikiget/dl.py
new file mode 100644
index 0000000..8f32218
--- /dev/null
+++ b/src/wikiget/dl.py
@@ -0,0 +1,159 @@
+# wikiget - CLI tool for downloading files from Wikimedia sites
+# Copyright (C) 2018-2021 Cody Logan and contributors
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Wikiget is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Wikiget is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Wikiget. If not, see <https://www.gnu.org/licenses/>.
+
+import logging
+import os
+import sys
+from urllib.parse import unquote, urlparse
+
+from mwclient import APIError, InvalidResponse, LoginError, Site
+from requests import ConnectionError, HTTPError
+from tqdm import tqdm
+
+from . import CHUNKSIZE, DEFAULT_SITE, USER_AGENT
+from .validations import valid_file, verify_hash
+
+
+def download(dl, args):
+ url = urlparse(dl)
+
+ if url.netloc:
+ filename = url.path
+ site_name = url.netloc
+ if args.site is not DEFAULT_SITE:
+ # this will work even if the user specifies 'commons.wikimedia.org'
+ logging.warning("target is a URL, "
+ "ignoring site specified with --site")
+ else:
+ filename = dl
+ site_name = args.site
+
+ file_match = valid_file(filename)
+
+ # check if this is a valid file
+ if file_match and file_match.group(1):
+ # has File:/Image: prefix and extension
+ filename = file_match.group(2)
+ else:
+ # no file extension and/or prefix, probably an article
+ logging.error(f"Could not parse input '{filename}' as a file.")
+ sys.exit(1)
+
+ filename = unquote(filename) # remove URL encoding for special characters
+
+ dest = args.output or filename
+
+ logging.debug(f"User agent: {USER_AGENT}")
+
+ # connect to site and identify ourselves
+ logging.info(f"Site name: {site_name}")
+ try:
+ site = Site(site_name, path=args.path, clients_useragent=USER_AGENT)
+ if args.username and args.password:
+ site.login(args.username, args.password)
+ except ConnectionError as e:
+ # usually this means there is no such site, or there's no network
+ # connection, though it could be a certificate problem
+ logging.error("Couldn't connect to specified site.")
+ logging.debug("Full error message:")
+ logging.debug(e)
+ sys.exit(1)
+ except HTTPError as e:
+ # most likely a 403 forbidden or 404 not found error for api.php
+ logging.error("Couldn't find the specified wiki's api.php. "
+ "Check the value of --path.")
+ logging.debug("Full error message:")
+ logging.debug(e)
+ sys.exit(1)
+ except (InvalidResponse, LoginError) as e:
+ # InvalidResponse: site exists, but we couldn't communicate with the
+ # API endpoint for some reason other than an HTTP error.
+ # LoginError: missing or invalid credentials
+ logging.error(e)
+ sys.exit(1)
+
+ # get info about the target file
+ try:
+ file = site.images[filename]
+ except APIError as e:
+ # an API error at this point likely means access is denied,
+ # which could happen with a private wiki
+ logging.error("Access denied. Try providing credentials with "
+ "--username and --password.")
+ logging.debug("Full error message:")
+ for i in e.args:
+ logging.debug(i)
+ sys.exit(1)
+
+ if file.imageinfo != {}:
+ # file exists either locally or at a common repository,
+ # like Wikimedia Commons
+ file_url = file.imageinfo["url"]
+ file_size = file.imageinfo["size"]
+ file_sha1 = file.imageinfo["sha1"]
+
+ filename_log = (f"Downloading '{filename}' ({file_size} bytes) "
+ f"from {site.host}")
+ if args.output:
+ filename_log += f" to '{dest}'"
+ logging.info(filename_log)
+ logging.info(f"{file_url}")
+
+ if os.path.isfile(dest) and not args.force:
+ logging.warning(f"File '{dest}' already exists, skipping download "
+ "(use -f to ignore)")
+ else:
+ try:
+ fd = open(dest, "wb")
+ except IOError as e:
+ logging.error("File could not be written. "
+ "The following error was encountered:")
+ logging.error(e)
+ sys.exit(1)
+ else:
+ # download the file(s)
+ if args.verbose >= 1:
+ leave_bars = True
+ else:
+ leave_bars = False
+ with tqdm(leave=leave_bars, total=file_size,
+ unit="B", unit_scale=True,
+ unit_divisor=CHUNKSIZE) as progress_bar:
+ with fd:
+ res = site.connection.get(file_url, stream=True)
+ progress_bar.set_postfix(file=dest, refresh=False)
+ for chunk in res.iter_content(CHUNKSIZE):
+ fd.write(chunk)
+ progress_bar.update(len(chunk))
+
+ # verify file integrity and optionally print details
+ dl_sha1 = verify_hash(dest)
+
+ logging.info(f"Downloaded file SHA1 is {dl_sha1}")
+ logging.info(f"Server file SHA1 is {file_sha1}")
+ if dl_sha1 == file_sha1:
+ logging.info("Hashes match!")
+ # at this point, we've successfully downloaded the file
+ else:
+ logging.error("Hash mismatch! Downloaded file may be corrupt.")
+ sys.exit(1)
+
+ else:
+ # no file information returned
+ logging.error(f"Target '{filename}' does not appear to be "
+ "a valid file.")
+ sys.exit(1)
diff --git a/src/wikiget/validations.py b/src/wikiget/validations.py
new file mode 100644
index 0000000..bd99570
--- /dev/null
+++ b/src/wikiget/validations.py
@@ -0,0 +1,64 @@
+# wikiget - CLI tool for downloading files from Wikimedia sites
+# Copyright (C) 2018, 2019, 2020 Cody Logan
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Wikiget is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Wikiget is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Wikiget. If not, see <https://www.gnu.org/licenses/>.
+
+import hashlib
+import re
+
+from . import BLOCKSIZE
+
+
+def valid_file(search_string):
+ """
+ Determines if the given string contains a valid file name, defined as a
+ string ending with a '.' and at least one character, beginning with 'File:'
+ or 'Image:', the standard file prefixes in MediaWiki.
+ :param search_string: string to validate
+ :returns: a regex Match object if there's a match or None otherwise
+ """
+ # second group could also restrict to file extensions with three or more
+ # letters with ([^/\r\n\t\f\v]+\.\w{3,})
+ file_regex = re.compile(r"(File:|Image:)([^/\r\n\t\f\v]+\.\w+)$", re.I)
+ return file_regex.search(search_string)
+
+
+def valid_site(search_string):
+ """
+ Determines if the given string contains a valid site name, defined as a
+ string ending with 'wikipedia.org' or 'wikimedia.org'. This covers all
+ subdomains of those domains. Eventually, it should be possible to support
+ any MediaWiki site, regardless of domain name.
+ :param search_string: string to validate
+ :returns: a regex Match object if there's a match or None otherwise
+ """
+ site_regex = re.compile(r"wiki[mp]edia\.org$", re.I)
+ return site_regex.search(search_string)
+
+
+def verify_hash(filename):
+ """
+ Calculates the SHA1 hash of the given file for comparison with a known
+ value.
+ :param filename: name of the file to calculate a hash for
+ :return: hash digest
+ """
+ hasher = hashlib.sha1()
+ with open(filename, "rb") as dl:
+ buf = dl.read(BLOCKSIZE)
+ while len(buf) > 0:
+ hasher.update(buf)
+ buf = dl.read(BLOCKSIZE)
+ return hasher.hexdigest()
diff --git a/src/wikiget/version.py b/src/wikiget/version.py
new file mode 100644
index 0000000..dd9b22c
--- /dev/null
+++ b/src/wikiget/version.py
@@ -0,0 +1 @@
+__version__ = "0.5.1"
diff --git a/src/wikiget/wikiget.py b/src/wikiget/wikiget.py
new file mode 100644
index 0000000..a8679c9
--- /dev/null
+++ b/src/wikiget/wikiget.py
@@ -0,0 +1,157 @@
+# wikiget - CLI tool for downloading files from Wikimedia sites
+# Copyright (C) 2018-2021 Cody Logan and contributors
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Wikiget is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Wikiget is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Wikiget. If not, see <https://www.gnu.org/licenses/>.
+
+import argparse
+import logging
+import sys
+
+from . import DEFAULT_SITE, DEFAULT_PATH, wikiget_version
+from .dl import download
+
+
+def main():
+ """
+ Main entry point for console script. Automatically compiled by setuptools
+ when installed with `pip install` or `python setup.py install`.
+ """
+
+ parser = argparse.ArgumentParser(description="""
+ A tool for downloading files from
+ MediaWiki sites using the file name or
+ description page URL
+ """,
+ epilog="""
+ Copyright (C) 2018-2021 Cody Logan
+ and contributors.
+ License GPLv3+: GNU GPL version 3 or later
+ <http://www.gnu.org/licenses/gpl.html>.
+ This is free software; you are free to
+ change and redistribute it under certain
+ conditions. There is NO WARRANTY, to the
+ extent permitted by law.
+ """)
+ parser.add_argument("FILE", help="""
+ name of the file to download with the File:
+ prefix, or the URL of its file description page
+ """)
+ parser.add_argument("-V", "--version", action="version",
+ version=f"%(prog)s {wikiget_version}")
+ message_options = parser.add_mutually_exclusive_group()
+ message_options.add_argument("-q", "--quiet",
+ help="suppress warning messages",
+ action="store_true")
+ message_options.add_argument("-v", "--verbose",
+ help="print detailed information; "
+ "use -vv for even more detail",
+ action="count", default=0)
+ parser.add_argument("-f", "--force",
+ help="force overwriting existing files",
+ action="store_true")
+ parser.add_argument("-s", "--site", default=DEFAULT_SITE,
+ help="MediaWiki site to download from "
+ "(default: %(default)s)")
+ parser.add_argument("-p", "--path", default=DEFAULT_PATH,
+ help="MediaWiki site path, where api.php is located "
+ "(default: %(default)s)")
+ parser.add_argument("--username", default="",
+ help="MediaWiki site username, for private wikis")
+ parser.add_argument("--password", default="",
+ help="MediaWiki site password, for private wikis")
+ output_options = parser.add_mutually_exclusive_group()
+ output_options.add_argument("-o", "--output",
+ help="write download to OUTPUT")
+ output_options.add_argument("-a", "--batch",
+ help="treat FILE as a textfile containing "
+ "multiple files to download, one URL or "
+ "filename per line", action="store_true")
+ parser.add_argument("-l", "--logfile", default="",
+ help="save log output to LOGFILE")
+
+ args = parser.parse_args()
+
+ loglevel = logging.WARNING
+ if args.verbose >= 2:
+ # this includes API and library messages
+ loglevel = logging.DEBUG
+ elif args.verbose >= 1:
+ loglevel = logging.INFO
+ elif args.quiet:
+ loglevel = logging.ERROR
+
+ # configure logging:
+ # console log level is set via -v, -vv, and -q options
+ # file log level is always info (TODO: add debug option)
+ if args.logfile:
+ # log to console and file
+ logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)-7s] %(message)s",
+ filename=args.logfile
+ )
+
+ console = logging.StreamHandler()
+ # TODO: even when loglevel is set to logging.DEBUG,
+ # debug messages aren't printing to console
+ console.setLevel(loglevel)
+ console.setFormatter(
+ logging.Formatter("[%(levelname)s] %(message)s")
+ )
+ logging.getLogger("").addHandler(console)
+ else:
+ # log only to console
+ logging.basicConfig(
+ level=loglevel,
+ format="[%(levelname)s] %(message)s"
+ )
+
+ # log events are appended to the file if it already exists,
+ # so note the start of a new download session
+ logging.info(f"Starting download session using wikiget {wikiget_version}")
+ # logging.info(f"Log level is set to {loglevel}")
+
+ if args.batch:
+ # batch download mode
+ input_file = args.FILE
+ dl_list = []
+
+ logging.info(f"Using batch file '{input_file}'.")
+
+ try:
+ fd = open(input_file, "r")
+ except IOError as e:
+ logging.error("File could not be read. "
+ "The following error was encountered:")
+ logging.error(e)
+ sys.exit(1)
+ else:
+ with fd:
+ # store file contents in memory in case something
+ # happens to the file while we're downloading
+ for _, line in enumerate(fd):
+ dl_list.append(line)
+
+ # TODO: validate file contents before download process starts
+ for line_num, url in enumerate(dl_list, start=1):
+ url = url.strip()
+ # keep track of batch file line numbers for
+ # debugging/logging purposes
+ logging.info(f"Downloading '{url}' at line {line_num}:")
+ download(url, args)
+ else:
+ # single download mode
+ dl = args.FILE
+ download(dl, args)