diff --git a/README.md b/README.md index 5f8c495..e1982e8 100644 --- a/README.md +++ b/README.md @@ -147,14 +147,17 @@ Plex Image Cleanup has multiple Global Options to change how it runs these are s ### Example .env File ``` -PLEX_URL=http://192.168.1.12:32400 -PLEX_TOKEN=123456789 PLEX_PATH=C:\Plex Media Server MODE=report SCHEDULE= +PLEX_URL=http://192.168.1.12:32400 +PLEX_TOKEN=123456789 DISCORD=https://discord.com/api/webhooks/###################/#################################################################### TIMEOUT=600 SLEEP=60 +IGNORE_RUNNING=False +LOCAL_DB=False +USE_EXISTING=False PHOTO_TRANSCODER=False EMPTY_TRASH=False CLEAN_BUNDLES=False @@ -165,59 +168,67 @@ LOG_REQUESTS=False ### Base Options -#### Plex URL & Token - -The script will expect to connect to your Plex Server using your `Plex URL` and `Plex Token` Options ([Finding a Token](https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/)). - -* **Environment Variables:** - - * `PLEX_URL=http://192.168.1.12:32400` - - * `PLEX_TOKEN=123456789` - -* **Shell Commands:** - - * `-u "http://192.168.1.12:32400"` or `--url "http://192.168.1.12:32400"` - - * `-t "123456789"` or `--token "123456789"` - #### Plex Path -The only other required Option is the `Plex Path` Option which is the Plex Config Directory containing the servers Metadata including `Cache`, `Metadata`, and `Plug-in Support`. +The only required Option is the `Plex Path` Option which is the Plex Config Directory containing the servers Metadata including `Cache`, `Metadata`, and `Plug-in Support`. To set the `Plex Path` for the run: - * **Environment Variable:** `PLEX_PATH=C:\Plex Media Server` - * **Shell Command:** `-p "C:\Plex Media Server"` or `--plex "C:\Plex Media Server"` - -**Will also check `/plex` relative to the base directory of the script if neither of the above are specified.** +* Will also check `/plex` relative to the base directory of the script if neither of the above are specified. #### Mode How Plex Image Cleanup runs depends on the `Mode` Option that's currently set for that run. -Here, "unused images" refers to unused uploaded images as described above. +* `report`: Metadata Directory File changes will be reported but not performed. +* `move`: Metadata Directory Files will be moved to the PIC Restore Directory. (CAN BE RESTORED) +* `restore`: Restores the Metadata Directory Files from the PIC Restore Directory. +* `clear`: Clears out the PIC Restore Directory. (CANNOT BE RESTORED) +* `remove`: Metadata Directory Files will be removed. (CANNOT BE RESTORED) +* `nothing`: Metadata Directory Files will not even be looked at. -* `report`: Reports statistics on unused images [count, size] but takes no action on them [like moving or deleting]. +To set the Global `Mode` for the run: +* **Environment Variable:** `MODE=remove` +* **Shell Command:** `-m remove` or `--mode remove` -* `move`: Moves unused images to the PIC Restore Directory. From there they can be `restore`d or `clear`ed. +### Database -* `restore`: Restores the unused images from the PIC Restore Directory [as created by `move`] to the Metadata Directory. This restores Plex to its state prior to running PIC. +The script needs to query the server's plex database to make sure it doesn't remove actively selected images. -* `clear`: Deletes the unused images from the PIC Restore Directory [as created by `move`]. (CANNOT BE UNDONE) +#### Download From Plex API -* `remove`: Deletes the unused images from the Metadata Directory immediately, without the stop in the PIC Restore Directory. (CANNOT BE UNDONE) +By default, the script will expect to connect to your Plex Server to download the Database using your `Plex URL` and `Plex Token` Options ([Finding a Token](https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/)). -* `nothing`: Does nothing with unused images; no report, no action. +* **Environment Variables:** + * `PLEX_URL=http://192.168.1.12:32400` + * `PLEX_TOKEN=123456789` +* **Shell Commands:** + * `-u "http://192.168.1.12:32400"` or `--url "http://192.168.1.12:32400"` + * `-t "123456789"` or `--token "123456789"` -To set the Global `Mode` for the run: +#### Copy From Local -* **Environment Variable:** `MODE=remove` +Alternatively the database can be copied from your local config directory you supplied in the [`Plex Path`](#plex-path) Option by using the `Local DB` Option. -* **Shell Command:** `-m remove` or `--mode remove` +* **Environment Variable:** `LOCAL_DB=True` +* **Shell Command:** `-l` or `--local` + +**IMPORTANT! When Copying the Local Database, it is recommended to restart Plex before running this script and to make sure Plex is idle.** + +Restarting allows for all temp SQLite files to be written to the primary Plex DB ensuring that all currently selected posters are properly known and preserved. + +The script will not run when the temp SQLite files are found. To ignore this error, use the `Ignore Running` Option. + +* **Environment Variable:** `IGNORE_RUNNING=True` +* **Shell Command:** `-i` or `--ignore` + +#### Use Existing + +A previously downloaded or copied database can be used if it's less than 2 hours old by using the `Use Existing` Option. If the database is more than 2 hours old a new one will be downloaded or copied. -**NOTE: `report` is the default mode if you do not specify a mode.** +* **Environment Variable:** `USE_EXISTING=True` +* **Shell Command:** `-e` or `--existing` ### Other Operations diff --git a/VERSION b/VERSION index 7dea76e..159b807 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0.1 +1.0.2! diff --git a/config/example.env b/config/example.env index 7ea027b..7e3965c 100644 --- a/config/example.env +++ b/config/example.env @@ -1,11 +1,14 @@ -PLEX_URL=http://192.168.1.12:32400 -PLEX_TOKEN=123456789 PLEX_PATH=C:\Plex Media Server MODE=report SCHEDULE= +PLEX_URL=http://192.168.1.12:32400 +PLEX_TOKEN=123456789 DISCORD=https://discord.com/api/webhooks/###################/#################################################################### TIMEOUT=600 SLEEP=60 +IGNORE_RUNNING=False +LOCAL_DB=False +USE_EXISTING=False PHOTO_TRANSCODER=False EMPTY_TRASH=False CLEAN_BUNDLES=False diff --git a/plex_image_cleanup.py b/plex_image_cleanup.py index 607055a..e39d8d7 100644 --- a/plex_image_cleanup.py +++ b/plex_image_cleanup.py @@ -1,8 +1,7 @@ -import glob, os, shutil, sys, time +import glob, os, re, shutil, sqlite3, sys, time, zipfile from concurrent.futures import ProcessPoolExecutor +from contextlib import closing from datetime import datetime -from hashlib import sha1 -from pathlib import Path from urllib.parse import quote if sys.version_info[0] != 3 or sys.version_info[1] < 11: @@ -27,18 +26,6 @@ def not_failed(exception): return not isinstance(exception, Failed) -meta_dirs = { - "movie": "Movies", - "show": "TV Shows", - "season": "TV Shows", - "episode": "TV Shows", - "artist": "Artists", - "album": "Albums", - "track": "Albums", - "photo": "Photos", - "collection": "Collections", - "playlist": "Playlists" -} modes = { "nothing": { "ed": "", "ing": "", "space": "", @@ -68,14 +55,17 @@ def not_failed(exception): mode_descriptions = '\n\t'.join([f"{m}: {d}" for m, d in modes.items()]) sc_options = ["mode", "photo-transcoder", "empty-trash", "clean-bundles", "optimize-db"] options = [ - {"arg": "u", "key": "url", "env": "PLEX_URL", "type": "str", "default": None, "help": "Plex URL of the Server you want to connect to."}, - {"arg": "t", "key": "token", "env": "PLEX_TOKEN", "type": "str", "default": None, "help": "Plex Token of the Server you want to connect to."}, {"arg": "p", "key": "plex", "env": "PLEX_PATH", "type": "str", "default": None, "help": "Path to the Plex Config Directory (Contains Directories: Cache, Metadata, Plug-in Support)."}, {"arg": "m", "key": "mode", "env": "MODE", "type": "str", "default": "report", "help": f"Global Mode to Run the Script in ({', '.join(modes)}). (Default: report)"}, {"arg": "sc", "key": "schedule", "env": "SCHEDULE", "type": "str", "default": None, "help": "Schedule to run in continuous mode."}, + {"arg": "u", "key": "url", "env": "PLEX_URL", "type": "str", "default": None, "help": "Plex URL of the Server you want to connect to."}, + {"arg": "t", "key": "token", "env": "PLEX_TOKEN", "type": "str", "default": None, "help": "Plex Token of the Server you want to connect to."}, {"arg": "di", "key": "discord", "env": "DISCORD", "type": "str", "default": None, "help": "Webhook URL to channel for Notifications."}, {"arg": "ti", "key": "timeout", "env": "TIMEOUT", "type": "int", "default": 600, "help": "Connection Timeout in Seconds that's greater than 0. (Default: 600)"}, {"arg": "s", "key": "sleep", "env": "SLEEP", "type": "int", "default": 60, "help": "Sleep Timer between Empty Trash, Clean Bundles, and Optimize DB. (Default: 60)"}, + {"arg": "i", "key": "ignore", "env": "IGNORE_RUNNING", "type": "bool", "default": False, "help": "Ignore Warnings that Plex is currently Running."}, + {"arg": "l", "key": "local", "env": "LOCAL_DB", "type": "bool", "default": False, "help": "The script will copy the database file rather than downloading it through the Plex API (Helps with Large DBs)."}, + {"arg": "e", "key": "existing", "env": "USE_EXISTING", "type": "bool", "default": False, "help": "Use the existing database if less then 2 hours old."}, {"arg": "pt", "key": "photo-transcoder", "env": "PHOTO_TRANSCODER", "type": "bool", "default": False, "help": "Global Toggle to Clean Plex's PhotoTranscoder Directory."}, {"arg": "et", "key": "empty-trash", "env": "EMPTY_TRASH", "type": "bool", "default": False, "help": "Global Toggle to Run Plex's Empty Trash Operation."}, {"arg": "cb", "key": "clean-bundles", "env": "CLEAN_BUNDLES", "type": "bool", "default": False, "help": "Global Toggle to Run Plex's Clean Bundles Operation."}, @@ -85,10 +75,10 @@ def not_failed(exception): ] script_name = "Plex Image Cleanup" plex_db_name = "com.plexapp.plugins.library.db" -base_dir = Path(__file__).parent -config_dir = base_dir / "config" +base_dir = os.path.dirname(os.path.abspath(__file__)) +config_dir = os.path.join(base_dir, "config") pmmargs = PMMArgs("meisnate12/Plex-Image-Cleanup", base_dir, options, use_nightly=False) -logger = logging.PMMLogger(script_name, "plex_image_cleanup", config_dir / "logs", discord_url=pmmargs["discord"], is_trace=pmmargs["trace"], log_requests=pmmargs["log-requests"]) +logger = logging.PMMLogger(script_name, "plex_image_cleanup", os.path.join(config_dir, "logs"), discord_url=pmmargs["discord"], is_trace=pmmargs["trace"], log_requests=pmmargs["log-requests"]) logger.secret([pmmargs["url"], pmmargs["discord"], pmmargs["token"], quote(str(pmmargs["url"])), requests.utils.urlparse(pmmargs["url"]).netloc]) requests.Session.send = util.update_send(requests.Session.send, pmmargs["timeout"]) plexapi.BASE_HEADERS["X-Plex-Client-Identifier"] = pmmargs.uuid @@ -98,40 +88,32 @@ def pic_thread(attrs): executor.submit(run_plex_image_cleanup, *[attrs]) def run_plex_image_cleanup(attrs): - try: - logger.header(pmmargs, sub=True, discord_update=True) - logger.separator("Validating Options", space=False, border=False) - do_transcode = attrs["photo-transcoder"] if "photo-transcoder" in attrs else pmmargs["photo-transcoder"] - do_trash = attrs["empty-trash"] if "empty-trash" in attrs else pmmargs["empty-trash"] - do_bundles = attrs["clean-bundles"] if "clean-bundles" in attrs else pmmargs["clean-bundles"] - do_optimize = attrs["optimize-db"] if "optimize-db" in attrs else pmmargs["optimize-db"] - if "mode" in attrs and attrs["mode"]: - mode = str(attrs["mode"]).lower() - elif pmmargs["mode"]: - mode = str(pmmargs["mode"]).lower() - else: - mode = "report" - description = f"Running in {mode.capitalize()} Mode" - extras = [] - if do_trash: - extras.append("Empty Trash") - if do_bundles: - extras.append("Clean Bundles") - if do_optimize: - extras.append("Optimize DB") - if do_transcode: - extras.append("PhotoTrancoder") - if extras: - description += f" with {', '.join(extras[:-1])}{', and ' if len(extras) > 1 else ''}{extras[-1]} set to True" - logger.info(description) - except Exception as e: - logger.stacktrace() - logger.critical(e, discord=True) - raise - except KeyboardInterrupt: - logger.separator(f"User Canceled Run {script_name}") - logger.remove_main_handler() - raise + logger.header(pmmargs, sub=True, discord_update=True) + logger.separator("Validating Options", space=False, border=False) + do_transcode = attrs["photo-transcoder"] if "photo-transcoder" in attrs else pmmargs["photo-transcoder"] + do_trash = attrs["empty-trash"] if "empty-trash" in attrs else pmmargs["empty-trash"] + do_bundles = attrs["clean-bundles"] if "clean-bundles" in attrs else pmmargs["clean-bundles"] + do_optimize = attrs["optimize-db"] if "optimize-db" in attrs else pmmargs["optimize-db"] + local_run = pmmargs["local"] + if "mode" in attrs and attrs["mode"]: + mode = str(attrs["mode"]).lower() + elif pmmargs["mode"]: + mode = str(pmmargs["mode"]).lower() + else: + mode = "report" + description = f"Running in {mode.capitalize()} Mode" + extras = [] + if do_trash: + extras.append("Empty Trash") + if do_bundles: + extras.append("Clean Bundles") + if do_optimize: + extras.append("Optimize DB") + if do_transcode: + extras.append("PhotoTrancoder") + if extras: + description += f" with {', '.join(extras[:-1])}{', and ' if len(extras) > 1 else ''}{extras[-1]} set to True" + logger.info(description) try: logger.info("Script Started", log=False, discord=True, start="script") @@ -145,49 +127,57 @@ def run_plex_image_cleanup(attrs): raise Failed(f"Mode Error: {mode} Invalid. Options: \n\t{mode_descriptions}") logger.info(f"{mode.capitalize()}: {modes[mode]['desc']}") do_metadata = mode in ["report", "move", "remove"] + if do_metadata and not local_run and not pmmargs["url"] and not pmmargs["token"]: + local_run = True + logger.warning("No Plex URL and Plex Token Given assuming Local Run") # Check Plex Path if not pmmargs["plex"]: - pmmargs["plex"] = base_dir / "plex" - if not pmmargs["plex"].exists(): + if not os.path.exists(os.path.join(base_dir, "plex")): raise Failed("Args Error: No Plex Path Provided") - logger.warning(f"No Plex Path Provided. Using default: {pmmargs['plex']}") - pmmargs["plex"] = Path(pmmargs["plex"]).resolve() - transcoder_dir = pmmargs["plex"] / "Cache" / "PhotoTranscoder" - meta_dir = pmmargs["plex"] / "Metadata" - restore_dir = pmmargs["plex"] / "PIC Restore" - - if not pmmargs["plex"].exists(): - raise Failed(f"Directory Error: Plex Directory Not Found: {pmmargs['plex']}") - elif mode != "nothing" and not meta_dir.exists(): + logger.warning(f"No Plex Path Provided. Using default: {os.path.join(base_dir, 'plex')}") + pmmargs["plex"] = os.path.join(base_dir, "plex") + pmmargs["plex"] = os.path.abspath(pmmargs["plex"]) + transcoder_dir = os.path.join(pmmargs["plex"], "Cache", "PhotoTranscoder") + databases_dir = os.path.join(pmmargs["plex"], "Plug-in Support", "Databases") + meta_dir = os.path.join(pmmargs["plex"], "Metadata") + restore_dir = os.path.join(pmmargs["plex"], "PIC Restore") + + if not os.path.exists(pmmargs["plex"]): + raise Failed(f"Directory Error: Plex Databases Directory Not Found: {os.path.abspath(pmmargs['plex'])}") + elif local_run and not os.path.exists(databases_dir): + raise Failed(f"Directory Error: Plug-in Support\\Databases Directory Not Found: {databases_dir}") + elif mode != "nothing" and not os.path.exists(meta_dir): raise Failed(f"Directory Error: Metadata Directory Not Found: {meta_dir}") - elif do_transcode and not transcoder_dir.exists(): + elif do_transcode and not os.path.exists(transcoder_dir): logger.error(f"Directory Error: PhotoTranscoder Directory Not Found and will not be cleaned: {transcoder_dir}") do_transcode = False # Connection to Plex - logger.info("Connecting To Plex") - if not pmmargs["url"]: - raise Failed("Args Error: No Plex URL Provided") - if not pmmargs["token"]: - raise Failed("Args Error: No Plex Token Provided") - plexapi.server.TIMEOUT = pmmargs["timeout"] - os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(pmmargs["timeout"]) - - @retry(stop_max_attempt_number=5, wait_incrementing_start=60000, wait_incrementing_increment=60000, retry_on_exception=not_failed) - def plex_connect(): - try: - return PlexServer(pmmargs["url"], pmmargs["token"], timeout=pmmargs["timeout"]) - except Unauthorized: - raise Failed("Plex Error: Plex token is invalid") - except Exception as e1: - logger.error(e1) - raise - server = plex_connect() - logger.info("Successfully Connected to Plex") + server = None + if do_trash or do_bundles or do_optimize or (do_metadata and not local_run): + logger.info("Connecting To Plex") + if not pmmargs["url"]: + raise Failed("Args Error: No Plex URL Provided") + if not pmmargs["token"]: + raise Failed("Args Error: No Plex Token Provided") + plexapi.server.TIMEOUT = pmmargs["timeout"] + os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(pmmargs["timeout"]) + + @retry(stop_max_attempt_number=5, wait_incrementing_start=60000, wait_incrementing_increment=60000, retry_on_exception=not_failed) + def plex_connect(): + try: + return PlexServer(pmmargs["url"], pmmargs["token"], timeout=pmmargs["timeout"]) + except Unauthorized: + raise Failed("Plex Error: Plex token is invalid") + except Exception as e1: + logger.error(e1) + raise + server = plex_connect() + logger.info("Successfully Connected to Plex") try: - if do_metadata and restore_dir.exists(): + if do_metadata and os.path.exists(restore_dir): logger.error(f"{mode} mode invalid while the PIC Restore Directory exists.", discord=True, rows=[ [("PIC Path", restore_dir)], [("Mode Options", @@ -197,114 +187,175 @@ def plex_connect(): f"Mode Options:\n" f" Mode: restore (Restore the bloat images back into Plex)\n" f" Mode: remove (Remove the bloat images)") - elif do_metadata: - - def get_items(sec): - for parent in sec.all(): - yield parent - - if parent.type in ("show", "artist"): - for child in parent: - yield child - - if child.type in ("season", "album"): - for grandchild in child: - yield grandchild + if do_metadata: + + # Check if Running + if local_run: + if any([os.path.exists(os.path.join(databases_dir, f"{plex_db_name}-{t}")) for t in ["shm", "wal"]]): + temp_db_warning = "At least one of the SQLite temp files is next to the Plex DB; this indicates Plex is still running\n" \ + "and copying the DB carries a small risk of data loss as the temp files may not have updated the\n" \ + "main DB yet.\n" \ + "If you restarted Plex just before running Plex Image Cleanup, and are still getting this error, it\n" \ + "can be ignored by using `--ignore` or setting `IGNORE_RUNNING=True` in the .env file." + if not pmmargs["ignore"]: + raise Failed(temp_db_warning) + logger.info(temp_db_warning) + logger.info("Warning Ignored") + + # Download DB + logger.separator("Database") + dbpath = os.path.join(config_dir, plex_db_name) + temp_dir = os.path.join(config_dir, "temp") + + is_usable = False + if pmmargs["existing"]: + if os.path.exists(dbpath): + is_usable, time_ago = util.in_the_last(dbpath, hours=2) + if is_usable: + logger.info(f"Using existing database (age: {time_ago})") + else: + logger.info(f"Existing database too old to use (age: {time_ago})") + else: + logger.warning(f"Existing Database not found {'making' if local_run else 'downloading'} a new copy") - if sec.type != "photo": - for col in sec.collections(): - yield col + report.append([("Database", "")]) + fields = [] + if is_usable: + report.append([("", "Using Existing Database")]) + else: + report.append([("", f"{'Copied' if local_run else 'Downloaded'} New Database")]) + if os.path.exists(dbpath): + os.remove(dbpath) + if os.path.exists(temp_dir): + shutil.rmtree(temp_dir) + os.makedirs(temp_dir) + if local_run: + logger.info(f"Copying database from {os.path.join(databases_dir, plex_db_name)}", start="database") + util.copy_with_progress(os.path.join(databases_dir, plex_db_name), dbpath, description=f"Copying database file to: {dbpath}") + else: + logger.info("Downloading Database via the Plex API. First Plex will make a backup of your database.\n" + "To see progress, log into Plex and go to Settings | Manage | Console and filter on Database.\n" + "You can also look at the Plex Dashboard to see the progress of the Database backup.", start="database") + logger.info() + + # fetch the data to be saved + headers = {'X-Plex-Token': server._token} + response = server._session.get(server.url('/diagnostics/databases'), headers=headers, stream=True) + if response.status_code not in (200, 201, 204): + message = f"({response.status_code}) {codes.get(response.status_code)[0]}; {response.url} " + raise Failed(f"Database Download Failed Try Using Local Copy: {message} " + response.text.replace('\n', ' ')) + os.makedirs(temp_dir, exist_ok=True) + + filename = None + if response.headers.get('Content-Disposition'): + filename = re.findall(r'filename=\"(.+)\"', response.headers.get('Content-Disposition')) + filename = filename[0] if filename[0] else None + if not filename: + raise Failed("DB Filename not found") + filename = os.path.basename(filename) + fullpath = os.path.join(temp_dir, filename) + extension = os.path.splitext(fullpath)[-1] + if not extension: + contenttype = response.headers.get('content-type') + if contenttype and 'image' in contenttype: + fullpath += contenttype.split('/')[1] + + with tqdm(unit='B', unit_scale=True, total=int(response.headers.get('content-length', 0)), desc=f"| {filename}") as bar: + with open(fullpath, 'wb') as handle: + for chunk in response.iter_content(chunk_size=4024): + handle.write(chunk) + bar.update(len(chunk)) + + # check we want to unzip the contents + if fullpath.endswith('zip'): + with zipfile.ZipFile(fullpath, 'r') as handle: + handle.extractall(temp_dir) + + if backup_file := next((o for o in os.listdir(temp_dir) if str(o).startswith("databaseBackup")), None): + shutil.move(os.path.join(temp_dir, backup_file), dbpath) + if os.path.exists(temp_dir): + shutil.rmtree(temp_dir) + if not os.path.exists(dbpath): + raise Failed(f"File Error: Database File Could not {'Copied' if local_run else 'Downloaded'}") + logger.info(f"Plex Database {'Copy' if local_run else 'Download'} Complete") + logger.info(f"Database {'Copied' if local_run else 'Downloaded'} to: {dbpath}") + logger.info(f"Runtime: {logger.runtime()}") + fields.append(("Copied" if local_run else "Downloaded", f"{logger.runtime('database')}")) - for ply in sec.playlists(): - yield ply + # Query DB + urls = [] + with sqlite3.connect(dbpath) as connection: + logger.info() + logger.info("Database Opened Querying For In-Use Images", start="query") + connection.row_factory = sqlite3.Row + with closing(connection.cursor()) as cursor: + for field in ["user_thumb_url", "user_art_url", "user_banner_url"]: + cursor.execute(f"SELECT {field} AS url FROM metadata_items WHERE {field} like 'upload://%' OR {field} like 'metadata://%'") + urls.extend([requests.utils.urlparse(r["url"]).path.split("/")[-1] for r in cursor.fetchall() if r and r["url"]]) + logger.info(f"{len(urls)} In-Use Images Found") + logger.info(f"Runtime: {logger.runtime()}") + fields.append(("Query", f"{logger.runtime('query')}")) - def get_resources(media): - for method in ["posters", "arts", "themes"]: - for res in getattr(media, method, lambda: [])(): - yield res + report.append(fields) + # Scan for Bloat Images logger.separator(f"{modes[mode]['ing']} Bloat Images") - logger.info(f"Scanning Plex Items For Bloat Images: {meta_dir}", start="scanning") - - total_images = 0 - total_size = 0 - paths_worked = [] - for section in server.library.sections(): - section_images = 0 - logger.info(f"Scanning Library: {section.title}", start=section.title) - for item in tqdm(get_items(section), unit=" plex items", desc="| Scanning Plex Items for Bloat Images"): - for resource in get_resources(item): - if resource.ratingKey.startswith("upload://") and not resource.selected: - if item.type == "season": - guid = item.parentGuid - elif item.type == "episode": - guid = item.grandparentGuid - elif item.type == "track": - guid = item.parentGuid - else: - guid = item.guid - - guid_hash = sha1(guid.encode("utf-8")).hexdigest() - resource_path = resource.ratingKey.split("://")[-1] - - local_path = Path(meta_dirs[item.type]) / guid_hash[0] / f"{guid_hash[1:]}.bundle" / "Uploads" / resource_path - source_path = meta_dir / local_path - if source_path in paths_worked: - continue - paths_worked.append(source_path) - - if source_path.exists(): - section_images += 1 - total_size += source_path.stat().st_size - if mode == "move": - destination_path = restore_dir / local_path.with_suffix(".jpg") - msg = f"MOVE: {source_path} --> {destination_path}" - destination_path.parent.mkdir(parents=True, exist_ok=True) - source_path.rename(destination_path) - elif mode == "remove": - msg = f"REMOVE: {source_path}" - source_path.unlink() - else: - msg = f"BLOAT FILE: {source_path}" - else: - msg = f"BLOAT FILE NOT FOUND: {source_path}" - - if mode == "report": - logger.debug(msg) - else: - logger.trace(msg) - total_images += section_images - logger.info(f"{section_images} Bloat Images Found in {section.title}") + logger.info(f"Scanning Metadata Directory For Bloat Images: {meta_dir}", start="scanning") + bloat_paths = [ + os.path.join(r, f) for r, d, fs in tqdm(os.walk(meta_dir), unit=" directories", desc="| Scanning Metadata for Bloat Images") for f in fs + if 'Contents' not in r and "." not in f and f not in urls + ] + logger.info(f"{len(bloat_paths)} Bloat Images Found") + logger.info(f"Runtime: {logger.runtime()}") + + # Work on Bloat Images + if bloat_paths: + logger.info() + logger.info(f"{modes[mode]['ing']} Bloat Images", start="work") + logger["size"] = 0 + messages = [] + for path in tqdm(bloat_paths, unit=f" {modes[mode]['ed'].lower()}", desc=f"| {modes[mode]['ing']} Bloat Images"): + logger["size"] += os.path.getsize(path) + if mode == "move": + messages.append(f"MOVE: {path} --> {os.path.join(restore_dir, path.removeprefix(meta_dir)[1:])}.jpg") + util.move_path(path, meta_dir, restore_dir, suffix=".jpg") + elif mode == "remove": + messages.append(f"REMOVE: {path}") + os.remove(path) + else: + messages.append(f"BLOAT FILE: {path}") + for message in messages: + if mode == "report": + logger.debug(message) + else: + logger.trace(message) + logger.info(f"{modes[mode]['ing']} Complete: {modes[mode]['ed']} {len(bloat_paths)} Bloat Images") + space = util.format_bytes(logger["size"]) + logger.info(f"{modes[mode]['space']}: {space}") logger.info(f"Runtime: {logger.runtime()}") - - logger.info(f"{modes[mode]['ing']} Complete: {modes[mode]['ed']} {total_images} Bloat Images") - space = util.format_bytes(total_size) - logger.info(f"{modes[mode]['space']}: {space}") - logger.info(f"Runtime: {logger.runtime(name='scanning')}") - report.append([(f"{modes[mode]['ing']} Bloat Images", "")]) - report.append([("", f"{space} of {modes[mode]['space']} {modes[mode]['ing']} {total_images} Files")]) - report.append([(f"{mode.capitalize()} Time", f"{logger.runtime('scanning')}")]) + report.append([(f"{modes[mode]['ing']} Bloat Images", "")]) + report.append([("", f"{space} of {modes[mode]['space']} {modes[mode]['ing']} {len(bloat_paths)} Files")]) + report.append([("Scan Time", f"{logger.runtime('scanning')}"), (f"{mode.capitalize()} Time", f"{logger.runtime('work')}")]) elif mode in ["restore", "clear"]: - if not restore_dir.exists(): + if not os.path.exists(restore_dir): raise Failed(f"Restore Failed: PIC Restore Directory does not exist: {restore_dir}") if mode == "restore": logger.separator("Restore Renamed Bloat Images") logger.info("Scanning for Renamed Bloat Images to Restore", start="scanning") - restore_images = [Path(f) for f in tqdm(glob.iglob(os.path.join(restore_dir, "**", "*.jpg"), recursive=True), unit=" image", desc="| Scanning for Renamed Bloat Images to Restore")] + restore_images = [f for f in tqdm(glob.iglob(os.path.join(restore_dir, "**", "*.jpg"), recursive=True), unit=" image", desc="| Scanning for Renamed Bloat Images to Restore")] logger.info(f"Scanning Complete: Found {len(restore_images)} Renamed Bloat Images to Restore") logger.info(f"Runtime: {logger.runtime()}") logger.info() logger.info("Restoring Renamed Bloat Images", start="work") - for source_path in tqdm(restore_images, unit=" restored", desc="| Restoring Renamed Bloat Images"): - destination_path = (meta_dir / str(source_path).removeprefix(str(restore_dir))[1:]).with_suffix(".jpg") - messages.append(f"RENAME: {source_path}\n ----> {destination_path}\n") - destination_path.parent.mkdir(exist_ok=True) - source_path.rename(destination_path) + for path in tqdm(restore_images, unit=" restored", desc="| Restoring Renamed Bloat Images"): + messages.append(f"RENAME: {path}\n ----> {os.path.join(meta_dir, path.removeprefix(restore_dir)[1:]).removesuffix('.jpg')}\n") + util.move_path(path, restore_dir, meta_dir, suffix='.jpg', append=False) shutil.rmtree(restore_dir) for message in messages: logger.trace(message) + messages = [] logger.info(f"Restore Complete: Restored {len(restore_images)} Renamed Bloat Images") logger.info(f"Runtime: {logger.runtime()}") report.append([("Restore Renamed Bloat Images", "")]) @@ -313,18 +364,18 @@ def get_resources(media): logger.separator("Removing PIC Restore Directory") logger.info("Scanning PIC Restore for Bloat Images to Remove", start="scanning") - del_paths = [Path(r) / f for r, d, fs in tqdm(os.walk(restore_dir), unit=" directories", desc="| Scanning PIC Restore for Bloat Images to Remove") for f in fs] + del_paths = [os.path.join(r, f) for r, d, fs in tqdm(os.walk(restore_dir), unit=" directories", desc="| Scanning PIC Restore for Bloat Images to Remove") for f in fs] logger.info(f"Scanning Complete: Found {len(del_paths)} Bloat Images in the PIC Directory to Remove") logger.info(f"Runtime: {logger.runtime()}") logger.info() + messages = [] logger.info("Removing PIC Restore Bloat Images", start="work") logger["size"] = 0 for path in tqdm(del_paths, unit=" removed", desc="| Removing PIC Restore Bloat Images"): messages.append(f"REMOVE: {path}") - path = Path(path) - logger["size"] += path.stat().st_size - path.unlink() + logger["size"] += os.path.getsize(path) + os.remove(path) shutil.rmtree(restore_dir) for message in messages: logger.trace(message) @@ -352,10 +403,10 @@ def get_resources(media): logger["size"] = 0 messages = [] for f in tqdm(transcode_images, unit=" removed", desc=f"| {head}"): - file = transcoder_dir / f + file = os.path.join(transcoder_dir, f) messages.append(f"REMOVE: {file}") - logger["size"] += file.stat().st_size - file.unlink() + logger["size"] += os.path.getsize(file) + os.remove(file) for message in messages: logger.trace(message)