35 lines
969 B
Python
35 lines
969 B
Python
|
#!/usr/bin/env python3
|
||
|
# -*- encoding: utf-8 -*-
|
||
|
|
||
|
import json
|
||
|
from pathlib import Path
|
||
|
import reddit_imgs.fetch
|
||
|
|
||
|
|
||
|
def main():
|
||
|
if not Path("i_c.json").exists():
|
||
|
print("Executing prerrequisite...")
|
||
|
reddit_imgs.fetch.main()
|
||
|
posts = json.loads(Path('i_c.json').read_text())
|
||
|
|
||
|
cached_hash_file = Path('i_c_h.json')
|
||
|
if not cached_hash_file.exists():
|
||
|
cached_hash_file.write_text('{}')
|
||
|
cached_hash = json.loads(cached_hash_file.read_text())
|
||
|
|
||
|
imageFiles = [
|
||
|
download['file']
|
||
|
for post in posts
|
||
|
for download in post['downloaded']]
|
||
|
imageFilesSize = len(imageFiles)
|
||
|
for seq, imageFile in enumerate(imageFiles):
|
||
|
if imageFile not in cached_hash:
|
||
|
print(f"Handling post {seq+1} of {imageFilesSize}... ")
|
||
|
cached_hash[imageFile] = Path(imageFile+'.sha256').read_text()
|
||
|
|
||
|
cached_hash_file.write_text(json.dumps(cached_hash, indent=1))
|
||
|
|
||
|
|
||
|
if __name__ == "__main__":
|
||
|
main()
|