reddit-image-wall-getter/reddit_imgs/system/downloader/modules/gallerydl.py

100 lines
3.5 KiB
Python

#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import subprocess
from ._cacheable import get_link_bytes
from ... import simpleDownloader
from ..downloadedData import DownloadedData
from .direct_link import DirectLink
from .. import cache
from pathlib import Path
import gallery_dl.extractor
def works_on(domain):
return domain == 'gallery_link'
class GalleryDlWrapper(DirectLink):
def recognizes(self, link):
return '<!--' not in link and gallery_dl.extractor.find(cache.get_normalized_link(link)) is not None
def download(self, goal_link):
print(' '*50, end='', flush=True)
print('\r', end='', flush=True)
print(' `--> Looking album up', end='', flush=True)
print('\r', end='', flush=True)
dd = DownloadedData()
if Path("gallery_dl_tmp.txt").exists():
if Path("gallery_dl_tmp.txt").read_text() == goal_link:
if Path("gallery_dl_tmp.d").exists():
dd.loadfrom("gallery_dl_tmp.d")
links = list()
lineno = 0
r = subprocess.Popen(
[
'gallery-dl-cookie', '--get-urls', cache.get_normalized_link(goal_link)
],
bufsize=1,
text=True,
universal_newlines=True,
stdout=subprocess.PIPE
)
for stdout_line_with_terminators in iter(r.stdout.readline, ''):
stdout_line = stdout_line_with_terminators.strip()
if stdout_line.startswith('| http'):
stdout_line = stdout_line[2:]
if stdout_line.startswith('ytdl:'):
continue
links.append(stdout_line)
link = stdout_line
if len(link) > 58:
link = link[:27]+'[\u2026]'+link[-28:]
print(' '*79, end='', flush=True)
print('\r', end='', flush=True)
print(' `--> Got image %03d: %s' %
(lineno+1, link), end='', flush=True)
print('\r', end='', flush=True)
lineno += 1
del lineno
r.communicate()
r.wait()
print(' '*79, end='', flush=True)
print('\r', end='', flush=True)
if (len(links) == 1 and links[0] == '') or (len(links) >= 1 and links[0].startswith('text:')):
print(' '*50, end='', flush=True)
print('\r', end='', flush=True)
print(' `--> Album is empty', end='', flush=True)
print('\r', end='', flush=True)
return dd
# raise Exception(links)
for seq, link in enumerate(links):
if link in dd.storedLinks():
continue
if seq%10 == 0 and seq != 0:
dd.into("gallery_dl_tmp.d")
Path("gallery_dl_tmp.txt").write_text(goal_link)
print(' '*50, end='', flush=True)
print('\r', end='', flush=True)
print(' `--> Album image #%03d of %03d' %
(seq+1, len(links)), end='', flush=True)
print('\r', end='', flush=True)
# print('\n'+link)
cached = cache.get_path_for_caching(link)
simpleDownloader.cleanCookies()
bts = get_link_bytes(
link, self.needsPromiscuity(link))
simpleDownloader.cleanCookies()
if bts is not None:
dd.put(link, bts)
if Path("gallery_dl_tmp.txt").exists():
Path("gallery_dl_tmp.txt").unlink()
if Path("gallery_dl_tmp.d").exists():
dd.remove("gallery_dl_tmp.d")
return dd
def get_class():
return GalleryDlWrapper