Skip to content

Commit

Permalink
#21 - Changed the way files are automatically renamed to avoid collis…
Browse files Browse the repository at this point in the history
…ions like this one

Former-commit-id: e1935b3f416cbd7e1f03539489a645469739784f [formerly 810afdc2513a8a86c14f056f6ece96c5d391cc60]
Former-commit-id: f164d92
  • Loading branch information
Nick Dawson committed Jul 15, 2022
1 parent 3179c64 commit 6992c15
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 195 deletions.
69 changes: 44 additions & 25 deletions gparch.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"""

VERSION = "2.1.2"
VERSION = "2.2.0"

# Define Scopes for Application
SCOPES = [
Expand Down Expand Up @@ -134,9 +134,9 @@ def __init__(self, credentials_path, directory, thread_count, debug):
self.timer = time()
self.downloads = 0
self.debug = debug

if self.debug:
safe_mkdir('debug')
safe_mkdir("debug")

# Define/Init Database
self.db_path = self.base_dir + "/" + DATABASE_NAME
Expand Down Expand Up @@ -201,10 +201,11 @@ def get_session_stats(self):

def download_media_item(self, entry):
try:
url, path, description = entry
uuid, album_uuid, url, path, description = entry
if not os.path.isfile(path):
r = requests.get(url)
if r.status_code == 200:
path = auto_filename(path)
if description:
try:
img = Image.open(io.BytesIO(r.content))
Expand All @@ -217,19 +218,28 @@ def download_media_item(self, entry):

# This is a known bug with piexif (https://github.com/hMatoba/Piexif/issues/95)
if 41729 in exif_dict["Exif"]:
exif_dict["Exif"][41729] = bytes(exif_dict["Exif"][41729])
exif_dict["Exif"][41729] = bytes(
exif_dict["Exif"][41729]
)

exif_bytes = piexif.dump(exif_dict)
img.save(path, exif=exif_bytes)
except ValueError:
# This value here is to catch a specific scenario with file extensions that have
# descriptions that are unsupported by Pillow so the program can't modify the EXIF data.
print(' [INFO] media file unsupported, can\'t write description to EXIF data.')
print(
" [INFO] media file unsupported, can't write description to EXIF data."
)
open(path, "wb").write(r.content)
else:
open(path, "wb").write(r.content)

self.downloads += 1
return True
return (
uuid,
path,
album_uuid,
)

else:
return False
Expand All @@ -241,8 +251,16 @@ def download(self, entries, desc, thread_count):
result = ThreadPool(thread_count).imap_unordered(
self.download_media_item, entries
)
for _ in tqdm(result, unit=" media items", total=len(entries), desc=desc):
pass
for downloaded_entry in tqdm(
result, unit=" media items", total=len(entries), desc=desc
):
if downloaded_entry:
uuid, path, album_uuid = downloaded_entry
self.insert_media_item(
uuid,
path,
album_uuid,
)

def select_media_item(self, uuid):
return self.cur.execute(
Expand Down Expand Up @@ -273,17 +291,12 @@ def process_media_items(self, media_items, save_directory, album_uuid=None):
item_path = None

# Select the media item from the database
# -> if it exists then insert a new media item and set the item_path to the newly set path
# -> if it doesn't exist then generate the item_path
# -> if it already exists then just pull the item_path from the existing db entry
item_db_entry = self.select_media_item(item["id"])
if not item_db_entry:
item["filename"] = sanitize(item["filename"])
item_path = auto_filename(f'{save_directory}/{item["filename"]}')
self.insert_media_item(
item["id"],
item_path,
album_uuid,
)
item_path = f'{save_directory}/{item["filename"]}'
else:
item_path = item_db_entry[1]

Expand All @@ -297,6 +310,8 @@ def process_media_items(self, media_items, save_directory, album_uuid=None):
if "image" in item["mimeType"]:
media.append(
(
item["id"],
album_uuid,
item["baseUrl"] + "=d",
item_path,
item["description"],
Expand All @@ -306,6 +321,8 @@ def process_media_items(self, media_items, save_directory, album_uuid=None):
elif "video" in item["mimeType"]:
media.append(
(
item["id"],
album_uuid,
item["baseUrl"] + "=dv",
item_path,
item["description"],
Expand Down Expand Up @@ -340,7 +357,7 @@ def download_single_album(self, album, shared=False):
# Next check to see if the album has a title, if it doesn't give it default name
if "title" not in album:
album["title"] = "Unnamed Album"

# Sanitize album title
album["title"] = sanitize(album["title"])

Expand All @@ -366,9 +383,9 @@ def download_single_album(self, album, shared=False):
request = self.service.mediaItems().search(body=request_body).execute()
else:
break

if self.debug:
save_json(album_items, 'debug/' + album["title"] + '.json')
save_json(album_items, "debug/" + album["title"] + ".json")

# Directory where the album exists
album_path = None
Expand All @@ -387,15 +404,17 @@ def download_single_album(self, album, shared=False):
self.insert_album(album["id"], album_path, album["title"], shared)

processed_items = self.process_media_items(album_items, album_path, album["id"])

if processed_items:
self.download(
processed_items,
f"Downloading {'Shared ' if shared else ''}Album: \"{album['title']}\"",
self.thread_count,
)
else:
print(f"Downloading {'Shared ' if shared else ''}Album: \"{album['title']}\"")
print(
f"Downloading {'Shared ' if shared else ''}Album: \"{album['title']}\""
)
print("Everything already downloaded.")

def list_media_items(self):
Expand All @@ -406,7 +425,7 @@ def list_media_items(self):
return {}
while True:
if self.debug:
save_json(request, 'debug/media' + str(num) + '.json')
save_json(request, "debug/media" + str(num) + ".json")
if "mediaItems" in request:
media_items_list += request["mediaItems"]
if "nextPageToken" in request:
Expand All @@ -431,7 +450,7 @@ def list_albums(self):
return {}
while True:
if self.debug:
save_json(request, 'debug/albums' + str(num) + '.json')
save_json(request, "debug/albums" + str(num) + ".json")
if "albums" in request:
album_list += request["albums"]
if "nextPageToken" in request:
Expand All @@ -456,7 +475,7 @@ def list_shared_albums(self):
return {}
while True:
if self.debug:
save_json(request, 'debug/shared_albums' + str(num) + '.json')
save_json(request, "debug/shared_albums" + str(num) + ".json")
shared_album_list += request["sharedAlbums"]
if "nextPageToken" in request:
next_page = request["nextPageToken"]
Expand Down Expand Up @@ -487,7 +506,7 @@ def search_favorites(self):
return {}
while True:
if self.debug:
save_json(request, 'debug/favorites' + str(num) + '.json')
save_json(request, "debug/favorites" + str(num) + ".json")
if "mediaItems" in request:
favorites_list += request["mediaItems"]
if "nextPageToken" in request:
Expand Down
5 changes: 1 addition & 4 deletions gparch_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,7 @@
type=str,
)
parser.add_argument(
"-d",
"--debug",
help="enable debugging mode",
action="store_true"
"-d", "--debug", help="enable debugging mode", action="store_true"
)
parser.add_argument(
"-t",
Expand Down
166 changes: 0 additions & 166 deletions gui/main.ui

This file was deleted.

0 comments on commit 6992c15

Please sign in to comment.