#!/usr/bin/env python3 # -*- encoding: utf-8 -*- import pprint import json import math import os import pickle import shutil import subprocess import sys import traceback from collections import OrderedDict from concurrent.futures import ProcessPoolExecutor as PoolExecutor from io import StringIO from pathlib import Path from typing import Any, Dict, List, Optional, Set, Tuple, Type import colored as clrlib import gallery_dl import gallery_dl.config import gallery_dl.extractor import gallery_dl.job import gallery_dl.postprocessor.common import gallery_dl.util import reddit_imgs.sync from .system.cmdline_parser import parse_cmdline from .system.downloader.cache import get_normalized_link, get_path_for_caching from .system.flattener import flatten_generator from .system.urlmatcher import search_urls gdl_pf: Type[gallery_dl.util.PathFormat] = ( gallery_dl.util.PathFormat if not hasattr(gallery_dl.util, 'PathFormatOriginal') else gallery_dl.util.PathFormatOriginal) STOP_JOBS_FLAG_PATH = Path('stop_jobs.flag') FORBIDDEN_WORKER_SPLITS = { 'deviantart', } MAX_WORKERS = 12 SPLIT_WORKER_AFTER_N_LINKS = 10000 USE_FIREFOX_COOKIES = True DEBUG_WORKER = None IGNORE_WORKERS: Set[str] = set() REDOWNLOAD = False REDOWNLOAD_EMPTIES = False CUSTOM_WORKER_SPLITS: Dict[str, int] = {} SKIP_INDEXED_FILES = True RETRY_ERROR_MASK = 0 FORBIDDEN_WORKER_SPLITS = {'deviantart'} DOWNLOAD_SINGLE_MANUAL_LINK: Optional[str] = None GDL_ERRORS = [ 'GENERIC_ERR', # 1 'UNKNOWN_ERR', # 2 'HTTP_ERR', # 4 '404_ERR', # 8 'AUTH_ERR', # 16 'FORMAT_ERR', # 32 'LACKS_EXTRACTOR_ERR', # 64 'OS_ERR', # 128 'NOT_IN_DISK_ERR', # 256 ] GDL_ERRORS_DICT = {(1 << k): v for k, v in enumerate(GDL_ERRORS)} def cmdline(encoded_args: str = None): if encoded_args is None: return run_with_config() else: return parse_cmdline(run_with_config, encoded_args) def run_with_config( max_workers: int = None, debug_worker: str = None, manual_link: str = None, ): global DEBUG_WORKER global MAX_WORKERS global DOWNLOAD_SINGLE_MANUAL_LINK DOWNLOAD_SINGLE_MANUAL_LINK = manual_link DEBUG_WORKER = debug_worker if max_workers is not None: MAX_WORKERS = max_workers return main_loading_preprocessed() def main_loading_preprocessed(): print('Initial data loading...') kwargs = json.loads(Path('r_fetch_preprocessed.json').read_text()) global SKIP_INDEXED_FILES, RETRY_ERROR_MASK, CUSTOM_WORKER_SPLITS, SPLIT_WORKER_AFTER_N_LINKS, REDOWNLOAD_EMPTIES, REDOWNLOAD SKIP_INDEXED_FILES = kwargs.pop('SKIP_INDEXED_FILES') RETRY_ERROR_MASK = kwargs.pop('RETRY_ERROR_MASK') CUSTOM_WORKER_SPLITS = kwargs.pop('CUSTOM_WORKER_SPLITS') SPLIT_WORKER_AFTER_N_LINKS = kwargs.pop('SPLIT_WORKER_AFTER_N_LINKS') REDOWNLOAD_EMPTIES = kwargs.pop('REDOWNLOAD_EMPTIES') REDOWNLOAD = kwargs.pop('REDOWNLOAD') kwargs['workers_state_path'] = Path(kwargs['workers_state_path']) main(**kwargs) def main(files_from_links: Dict[str, List[str]], links_no_files: List[str], files_sizes: Dict[str, int], link_statuses: Dict[str, int], workers_nicely_grouped: List[str], workers_state_path: Path, links_to_worker: Dict[str, List[str]], link_keys: Dict[str, str]): global DEBUG_WORKER # if DEBUG_WORKER is None: # raise Exception('DEBUG_ONLY: preventing cron jobs from running') workers_state_path = Path('i_gdl_w') workers_state_path.mkdir(exist_ok=True, parents=True) for wsp in workers_state_path.iterdir(): wsp.unlink() print('Initial data loaded.') def save_ending_files(): nonlocal links_no_files links_no_files2 = list(map(lambda a: a[0], filter(lambda a: len(a[1]) <= 0 and a[0] not in links_no_files, files_from_links.items()))) + links_no_files files_from_links2 = dict( filter(lambda a: len(a[1]) > 0, files_from_links.items())) links_no_files2_sorted = sorted(links_no_files2) links_for_files = dict() for link, files in files_from_links2.items(): for file in files: if file not in links_for_files: links_for_files[file] = list() links_for_files[file].append(link) del file del link del files os.sync() Path('i_gdl_lnf.json').write_text( json.dumps(links_no_files2_sorted, indent=1)) Path('i_gdl_ffl.json').write_text(json.dumps( files_from_links2, indent=1, sort_keys=True)) Path('i_gdl_lff.json').write_text(json.dumps( links_for_files, indent=1, sort_keys=True)) Path('i_gdl_fsz.json').write_text( json.dumps(files_sizes, indent=1, sort_keys=True)) Path('i_gdl_spl.json').write_text(json.dumps( link_statuses, indent=1, sort_keys=True)) os.sync() print('Performing partial save...') save_ending_files() print('Performed partial save.') configure_gdl() gallery_dl.output.select = lambda: ColoredLineOutput(False) totalfiles = 0 thread_ids = workers_nicely_grouped.copy() for line, thread_id in enumerate(thread_ids): workers_state_path.joinpath(thread_id+'=line').write_text(str(line)) linkcount = len(links_to_worker[thread_id]) workers_state_path.joinpath(thread_id).write_text( f'waiting:{linkcount}:{linkcount}:0:0') do_fancy_multithreading_panel = False thread_id_count = len(thread_ids) if DOWNLOAD_SINGLE_MANUAL_LINK is not None or DEBUG_WORKER is not None: if DOWNLOAD_SINGLE_MANUAL_LINK is not None: DEBUG_WORKER = 'manual' links_to_worker[DEBUG_WORKER] = [DOWNLOAD_SINGLE_MANUAL_LINK] print(f'Will debug {repr(DEBUG_WORKER)}.') thread_id = DEBUG_WORKER links_list = links_to_worker[DEBUG_WORKER] download_link_list( links_list, thread_id, None, f'Debugging {repr(DEBUG_WORKER)}...', workers_state_path.joinpath(thread_id), ) return if links_to_worker: with PoolExecutor(min(MAX_WORKERS, thread_id_count)) as pe: if do_fancy_multithreading_panel: print(f'\033[2J', end='', flush=True) print(f'\033[0;0H', end='', flush=True) print('Downloading...', flush=True) if do_fancy_multithreading_panel: print(f'\033[0;0H', end='', flush=True) largest_tid_size = max(map(len, thread_ids)) line2tid = dict() def done_callback_generator(line): nonlocal totalfiles def terminate_process_pool(): os.system('sync') os.system("bash -c \"ps -aux | grep './redditgetter.py' | grep -v grep | sed -e 's/ */ /g' | cut -d' ' -f2 | xargs -r -- kill -15\"") sys.exit(0xFF) def done_callback(job): nonlocal totalfiles thread_id = line2tid[line] links_list = links_to_worker[thread_id] try: workers_state_path.joinpath(thread_id).write_text( f'finished:{len(links_list)}:0:0:0') print(clrlib.stylize( f'Received job #{line}: {thread_id}', [ clrlib.fg('white'), clrlib.bg('green'), clrlib.attr('bold'), ] )) downloaded_links = list() totalbytes = 0 thisfiles = 0 true = True downloaded_links = job.result() for link, files in downloaded_links: if true: statusdir = get_path_for_caching( link, Path('i_gdl_s')) statusdir.mkdir(parents=True, exist_ok=True) statusfile = statusdir.joinpath('_gdl_status.json') statuses = dict() if statusfile.exists(): statuses = json.loads(statusfile.read_text()) link_statuses[link] = statuses.get(link, 0xFF) if link not in files_from_links: files_from_links[link] = list() lenfiles = len(files) totalfiles += lenfiles for file in files: filepath = Path(file) thisfiles += 1 if filepath.exists(): files_from_links[link].append(file) st_size = filepath.stat().st_size files_sizes[file] = st_size totalbytes += st_size workers_state_path.joinpath(thread_id).write_text( f'finished:{len(links_list)}:0:{totalbytes}:{thisfiles}') save_ending_files() except: sio = StringIO() traceback.print_exc(file=sio) excTxt = sio.getvalue() try: workers_state_path.joinpath(thread_id).write_text( f'failed:{len(links_list)}:0:0:0') except: pass try: workers_state_path.joinpath(thread_id+'=exc').write_text(excTxt) except: pass try: pe.shutdown(wait=False) except: pass print(excTxt) terminate_process_pool() return return done_callback for line, thread_id in enumerate(thread_ids): line2tid[line] = thread_id links_list = links_to_worker[thread_id] workers_state_path.joinpath(thread_id).write_text( f'enqueued:{len(links_list)}:{len(links_list)}:0:0') print(clrlib.stylize(f'Starting job #{line}: {thread_id}', [ clrlib.fg('white'), clrlib.bg('light_red'), clrlib.attr('bold'), ])) jobstardedmsg = clrlib.stylize(f'Starting job #{line}: {thread_id}', [ clrlib.fg('black'), clrlib.bg('light_yellow'), clrlib.attr('bold'), ]) thread_id_nmsz = len(thread_id) thread_id_display = thread_id + ' ' * (largest_tid_size - thread_id_nmsz) job = pe.submit( download_link_list, links_list, thread_id_display, line+3 if do_fancy_multithreading_panel else None, jobstardedmsg, workers_state_path.joinpath(thread_id), ) job.add_done_callback(done_callback_generator(line)) save_ending_files() if (p := Path('latest_image_download.txt')).exists(): p.unlink() if workers_state_path.exists(): for p in workers_state_path.glob('*'): p.unlink() shutil.rmtree(workers_state_path) print(f'Downloaded {totalfiles} files') def download_link_list(links: List[str], thread_id: str, line: Optional[int] = None, job_started_msg: Optional[str] = None, thread_state_path: Optional[Path] = None, ) -> List[Tuple[str, List[str]]]: '''Downloads a link list inside a ProcessPoolExecutor''' if STOP_JOBS_FLAG_PATH.exists(): raise InterruptedError(STOP_JOBS_FLAG_PATH) if job_started_msg is not None: print(job_started_msg) has_its_own_line = line is not None link_count = len(links) remaining_links = link_count configure_gdl() if thread_state_path is not None: thread_state_path.write_text( f'running:{link_count}:{remaining_links}:0:0') def get_printer(): return ColoredLineOutput( has_its_own_line, prefix=(f'\033[{line};0H' if has_its_own_line else '') + clrlib.stylize('%9d' % remaining_links, [clrlib.fg('light_cyan')]) + clrlib.stylize('@', [clrlib.fg('light_red')]) + clrlib.stylize(thread_id, [clrlib.fg('yellow')]) + clrlib.stylize('= ', [clrlib.fg('dark_gray')]), suffix=('\033[K\033[0;0H' if has_its_own_line else ''), prefixsz=len(('%9d' % 0)+' '+thread_id), suffixsz=0, write_successes_to=Path('latest_image_download.txt'), ) gallery_dl.output.select = get_printer result = list() totalbytes = 0 totalfiles = 0 try: for link in links: scrubbing = True cachedir = get_path_for_caching(link, Path('i_gdl_c')) statusdir = get_path_for_caching(link, Path('i_gdl_s')) cachedir.mkdir(parents=True, exist_ok=True) statusdir.mkdir(parents=True, exist_ok=True) metafile = cachedir.joinpath('_gdl_meta.json') statusfile = statusdir.joinpath('_gdl_status.json') meta = dict() statuses = dict() link_already_downloaded = False if metafile.exists(): try: meta = json.loads(metafile.read_text()) except json.JSONDecodeError: pass if statusfile.exists(): try: statuses = json.loads(statusfile.read_text()) except json.JSONDecodeError: pass if link in meta and link in statuses: link_already_downloaded = True rc = statuses.get(link, 0xFF) if rc == 0: for fl in meta[link]: pth = Path(fl) try: if not pth.exists(): link_already_downloaded = False break except OSError: link_already_downloaded = False break if len(meta[link]) == 0 and REDOWNLOAD_EMPTIES: link_already_downloaded = False if (rc & RETRY_ERROR_MASK) != 0: link_already_downloaded = False if not link_already_downloaded or REDOWNLOAD: scrubbing = False if thread_state_path is not None: thread_state_path.write_text( f'running:{link_count}:{remaining_links}:{totalbytes}:{totalfiles}:{link}') job = DownloadJobWithCallSaverPostProcessor(link) job.out = get_printer() job.out.message(link, clrlib.fg('light_magenta')) rc = job.run() os.sync() # print('FINAL', job.cspp.calls) # raise Exception(job.cspp.calls) # files = job.cspp.calls['run_final'].copy() # Only brings the last element files = job.cspp.calls['prepare'].copy() files = list(filter(len, files)) has_changed = True while has_changed: has_changed = False for seq, fl in enumerate(files): if not (pth := Path(fl)).exists(): candidates = sorted(list(filter( lambda p: (p.name.startswith(pth.name) and p.suffix != '.part' and p.suffix != '.json'), pth.parent.iterdir())), key=lambda p: len(p.name) ) if len(candidates) > 0: files[seq] = str(candidates[0]) has_changed = True break else: rc |= 256 # raise Exception(pth.name, candidates, files) del has_changed meta[link] = files statuses[link] = rc metafile.write_text(json.dumps(meta, indent=1)) statusfile.write_text(json.dumps(statuses, indent=1)) os.sync() for fl in meta[link]: code = statuses[link] pth = Path(fl) if not pth.exists(): if code != 0: continue else: raise FileNotFoundError((link, link_already_downloaded, meta[link])) else: totalfiles += 1 totalbytes += pth.stat().st_size result.append((link, meta[link])) remaining_links -= 1 if thread_state_path is not None: scrubbing_running = 'scrubbing' if scrubbing else 'running' thread_state_path.write_text( f'{scrubbing_running}:{link_count}:{remaining_links}:{totalbytes}:{totalfiles}:{link}') if STOP_JOBS_FLAG_PATH.exists(): raise InterruptedError(STOP_JOBS_FLAG_PATH) finally: print((f'\033[{line};0H' if has_its_own_line else '') + clrlib.stylize(thread_id.strip(), [clrlib.fg('yellow'), clrlib.attr('bold')]) + clrlib.stylize('#', [clrlib.fg('light_red')]) + clrlib.stylize('Done', [clrlib.fg('light_green')]) + ('\033[K' if has_its_own_line else '') ) return result def configure_gdl(): '''Configures Gallery-DL for usage.''' parser = gallery_dl.option.build_parser() args = parser.parse_args([ *([] if USE_FIREFOX_COOKIES else ['--cookies=i_gdl/.cookies']), '--dest=i_gdl', '--write-metadata', # '--write-tags', # '--write-log=i_gdl_log.txt', '--write-unsupported=i_gdl_unsupported.txt', # '--quiet', *(['--verbose'] if DEBUG_WORKER else []), '--retries=1', # '--retries=7', # '--limit-rate=1500k', ]) gallery_dl.output.initialize_logging(args.loglevel) # configuration if args.load_config: gallery_dl.config.load() if args.cfgfiles: gallery_dl.config.load(args.cfgfiles, strict=True) if args.yamlfiles: gallery_dl.config.load(args.yamlfiles, strict=True, fmt="yaml") if args.postprocessors: gallery_dl.config.set((), "postprocessors", args.postprocessors) if args.abort: gallery_dl.config.set((), "skip", "abort:" + str(args.abort)) for opts in args.options: gallery_dl.config.set(*opts) # loglevels gallery_dl.output.configure_logging(args.loglevel) gallery_dl.output.select = ColoredLineOutput gallery_dl.util.PathFormatOriginal = gdl_pf gallery_dl.util.PathFormat = OverriddenPathFormat class DownloadJobWithCallSaverPostProcessor(gallery_dl.job.DownloadJob): def __init__(self, url, parent=None): super().__init__(url, parent) self.cspp = CallSaverPostProcessor( self) if parent is None else parent.cspp def initialize(self, kwdict=None): super().initialize(kwdict) if not isinstance(self.hooks, tuple): self.hooks['prepare'].append(self.cspp.prepare) class ColoredLineOutput(gallery_dl.output.TerminalOutput): def __init__(self, sameline=False, prefix="", suffix="", prefixsz=0, suffixsz=0, write_successes_to=None): super().__init__() self.sameline = sameline self.eol = '\r' if sameline else '\n' self.prefix = prefix self.suffix = suffix self.prefixsz = prefixsz self.suffixsz = suffixsz self.write_successes_to = write_successes_to self._termsize_update() def start(self, path): self.message(path, clrlib.fg("light_yellow"), ) def skip(self, path): self.message(path, clrlib.attr('dim'), ) def success(self, path, tries): self.message(path, clrlib.attr('bold'), clrlib.fg('light_green'), ) if self.write_successes_to is not None: self.write_successes_to.write_text(path) def message(self, txt: str, *attrs: List[str], do_print: bool = True) -> str: """Prints a message with given formatters""" clrtxt = clrlib.stylize(self.shorten(txt), attrs) fmtd = f"{self.prefix}{clrtxt}{self.suffix}" if do_print: print(fmtd, flush=True, end=self.eol) return fmtd def shorten(self, txt): self._termsize_update() self.width = self.termsize - self.prefixsz - self.suffixsz - 1 return super().shorten(txt) def _termsize_update(self): self.termsize = shutil.get_terminal_size().columns class OverriddenPathFormat(gdl_pf): def __init__(self, extractor): super().__init__(extractor) self.clean_path = FixFileNameFormatterWrapper(self.clean_path) class CallSaverPostProcessor(gallery_dl.postprocessor.common.PostProcessor): def __init__(self, job): super().__init__(job) self.calls = dict( prepare=list(), run=list(), run_metadata=list(), run_after=list(), run_final=list(), ) def prepare(self, pathfmt: gallery_dl.util.PathFormat): """Update file paths, etc.""" directory_formatters = pathfmt.directory_formatters filename_formatter = pathfmt.filename_formatter clean_segment = pathfmt.clean_segment clean_path = pathfmt.clean_path kwdict_fallback = pathfmt.kwdict.get('_fallback', None) # pp = pprint.PrettyPrinter() # pp.pprint(pathfmt) # pp.pprint(pathfmt.__dict__) pathfmt.directory_formatters = None pathfmt.filename_formatter = None pathfmt.clean_segment = None pathfmt.clean_path = None if kwdict_fallback is not None: pathfmt.kwdict['_fallback'] = None cloned_pathfmt: gallery_dl.util.PathFormat = pickle.loads(pickle.dumps(pathfmt)) pathfmt.directory_formatters = directory_formatters pathfmt.filename_formatter = filename_formatter pathfmt.clean_segment = clean_segment pathfmt.clean_path = clean_path if kwdict_fallback is not None: pathfmt.kwdict['_fallback'] = kwdict_fallback cloned_pathfmt.directory_formatters = directory_formatters cloned_pathfmt.filename_formatter = filename_formatter cloned_pathfmt.clean_segment = clean_segment cloned_pathfmt.clean_path = clean_path if kwdict_fallback is not None: cloned_pathfmt.kwdict['_fallback'] = kwdict_fallback cloned_pathfmt.build_path() # print(cloned_pathfmt.path) # print(cloned_pathfmt.filename) # print(cloned_pathfmt.kwdict) # print(cloned_pathfmt) self.calls['prepare'].append(cloned_pathfmt.path) return pathfmt def run(self, pathfmt: gallery_dl.util.PathFormat): """Execute the postprocessor for a file""" self.calls['run'].append(pathfmt.path) def run_metadata(self, pathfmt: gallery_dl.util.PathFormat): """Execute the postprocessor for a file""" self.calls['run_metadata'].append(pathfmt.path) def run_after(self, pathfmt: gallery_dl.util.PathFormat): """Execute postprocessor after moving a file to its target location""" self.calls['run_after'].append(pathfmt.path) def run_final(self, pathfmt: gallery_dl.util.PathFormat, status: int): """Postprocessor finalization after all files have been downloaded""" self.calls['run_final'].append((pathfmt.path, status)) class FixFileNameFormatterWrapper: """Wraps file name formatter for ensuring a valid file name length""" def __init__(self, formatter: gallery_dl.util.Formatter): self.formatter = formatter def __call__(self, *args, **kwargs) -> str: path = self.formatter(*args, **kwargs) parts = list(map(fix_filename_ending_extension, map(fix_filename_length, map(fix_filename_ending_extension, Path(path).parts)))) return str(Path(*parts)) def fix_filename_length(filename: str) -> str: """Ensures a segment has a valid file name length""" if len(filename.encode()) > 240: extension = Path(filename).suffix extension_bytes_length = len(extension.encode()) stem_bytes = Path(filename).stem.encode() fixed_stem_bytes = stem_bytes[:240-extension_bytes_length] fixed_stem = fixed_stem_bytes.decode(errors="ignore") return fixed_stem + extension return filename def fix_filename_ending_extension(filename: str) -> str: if (fp := Path(filename)).stem[-1:] in ('.', ' '): return str(fp.parent.joinpath(f"{fp.stem.rstrip('. ')}{fp.suffix}")) return filename if __name__ == "__main__": kwargs: Dict[str, Any] = dict() main(**kwargs)