From 275f54aa097a038e28cfb5466d5b72fe688eed4c Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 06:44:44 -0400 Subject: [PATCH 01/24] refactor: move builder to package --- modules/{builder.py => builder/__init__.py} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename modules/{builder.py => builder/__init__.py} (99%) diff --git a/modules/builder.py b/modules/builder/__init__.py similarity index 99% rename from modules/builder.py rename to modules/builder/__init__.py index 90980273c..15bc3706d 100644 --- a/modules/builder.py +++ b/modules/builder/__init__.py @@ -17,7 +17,7 @@ advance_new_agent = ["item_metadata_language", "item_use_original_title"] advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"] all_builders = anidb.builders + anilist.builders + icheckmovies.builders + imdb.builders + \ - letterboxd.builders + mal.builders + mojo.builders + plex.builders + reciperr.builders + tautulli.builders + \ + letterboxd.builders + mal.builders + mojo.builders + plex.builder + reciperr.builders + tautulli.builders + \ tmdb.builders + trakt.builders + tvdb.builders + mdblist.builders + radarr.builders + sonarr.builders show_only_builders = [ "tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_show", "tvdb_show_details", "tmdb_airing_today", @@ -1046,7 +1046,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): self._mal(method_name, method_data) elif method_name in mojo.builders: self._mojo(method_name, method_data) - elif method_name in plex.builders or method_final in plex.searches: + elif method_name in plex.builder or method_final in plex.searches: self._plex(method_name, method_data) elif method_name in reciperr.builders: self._reciperr(method_name, method_data) From 1505ee8acb8116e24d766ac9c3ac7a961e8584c8 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 07:01:03 -0400 Subject: [PATCH 02/24] fix: plex.builder -> plex.builders --- modules/builder/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 15bc3706d..90980273c 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -17,7 +17,7 @@ advance_new_agent = ["item_metadata_language", "item_use_original_title"] advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"] all_builders = anidb.builders + anilist.builders + icheckmovies.builders + imdb.builders + \ - letterboxd.builders + mal.builders + mojo.builders + plex.builder + reciperr.builders + tautulli.builders + \ + letterboxd.builders + mal.builders + mojo.builders + plex.builders + reciperr.builders + tautulli.builders + \ tmdb.builders + trakt.builders + tvdb.builders + mdblist.builders + radarr.builders + sonarr.builders show_only_builders = [ "tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_show", "tvdb_show_details", "tmdb_airing_today", @@ -1046,7 +1046,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): self._mal(method_name, method_data) elif method_name in mojo.builders: self._mojo(method_name, method_data) - elif method_name in plex.builder or method_final in plex.searches: + elif method_name in plex.builders or method_final in plex.searches: self._plex(method_name, method_data) elif method_name in reciperr.builders: self._reciperr(method_name, method_data) From eb88474cbc3448f196d3f93ccdfe89d9f870ae5e Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 07:09:21 -0400 Subject: [PATCH 03/24] refactor: separate out _config.py --- modules/builder/__init__.py | 168 ------------------------------------ modules/builder/_config.py | 168 ++++++++++++++++++++++++++++++++++++ 2 files changed, 168 insertions(+), 168 deletions(-) create mode 100644 modules/builder/_config.py diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 90980273c..8ee174f97 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -13,174 +13,6 @@ from urllib.parse import quote logger = util.logger - -advance_new_agent = ["item_metadata_language", "item_use_original_title"] -advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"] -all_builders = anidb.builders + anilist.builders + icheckmovies.builders + imdb.builders + \ - letterboxd.builders + mal.builders + mojo.builders + plex.builders + reciperr.builders + tautulli.builders + \ - tmdb.builders + trakt.builders + tvdb.builders + mdblist.builders + radarr.builders + sonarr.builders -show_only_builders = [ - "tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_show", "tvdb_show_details", "tmdb_airing_today", - "tmdb_on_the_air", "builder_level", "item_tmdb_season_titles", "sonarr_all", "sonarr_taglist" -] -movie_only_builders = [ - "letterboxd_list", "letterboxd_list_details", "icheckmovies_list", "icheckmovies_list_details", "stevenlu_popular", - "tmdb_collection", "tmdb_collection_details", "tmdb_movie", "tmdb_movie_details", "tmdb_now_playing", "item_edition", - "tvdb_movie", "tvdb_movie_details", "tmdb_upcoming", "trakt_boxoffice", "reciperr_list", "radarr_all", "radarr_taglist", - "mojo_world", "mojo_domestic", "mojo_international", "mojo_record", "mojo_all_time", "mojo_never" -] -music_only_builders = ["item_album_sorting"] -summary_details = [ - "summary", "tmdb_summary", "tmdb_description", "tmdb_biography", "tvdb_summary", - "tvdb_description", "trakt_description", "letterboxd_description", "icheckmovies_description" -] -poster_details = ["url_poster", "tmdb_poster", "tmdb_profile", "tvdb_poster", "file_poster"] -background_details = ["url_background", "tmdb_background", "tvdb_background", "file_background"] -boolean_details = [ - "show_filtered", "show_missing", "save_report", "missing_only_released", "only_filter_missing", - "delete_below_minimum", "asset_folders", "create_asset_folders" -] -scheduled_boolean = ["visible_library", "visible_home", "visible_shared"] -string_details = ["sort_title", "content_rating", "name_mapping"] -ignored_details = [ - "smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "variables", "test", "suppress_overlays", - "delete_not_scheduled", "tmdb_person", "build_collection", "collection_order", "builder_level", "overlay", "kometa_poster", - "validate_builders", "libraries", "sync_to_users", "exclude_users", "collection_name", "playlist_name", "name", "limit", - "blank_collection", "allowed_library_types", "run_definition", "delete_playlist", "ignore_blank_results", "only_run_on_create", - "delete_collections_named", "tmdb_person_offset", "append_label", "key_name", "translation_key", "translation_prefix", "tmdb_birthday" -] -details = [ - "ignore_ids", "ignore_imdb_ids", "server_preroll", "changes_webhooks", "collection_filtering", "collection_mode", "url_theme", - "file_theme", "minimum_items", "label", "album_sorting", "cache_builders", "tmdb_region", "default_percent" -] + boolean_details + scheduled_boolean + string_details -collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test", "item_label"] + \ - poster_details + background_details + summary_details + string_details + all_builders -item_false_details = ["item_lock_background", "item_lock_poster", "item_lock_title"] -item_bool_details = ["item_tmdb_season_titles", "revert_overlay", "item_assets", "item_refresh"] + item_false_details -item_details = ["non_item_remove_label", "item_label", "item_genre", "item_edition", "item_radarr_tag", "item_sonarr_tag", "item_refresh_delay"] + item_bool_details + list(plex.item_advance_keys.keys()) -none_details = ["label.sync", "item_label.sync", "item_genre.sync", "radarr_taglist", "sonarr_taglist", "item_edition"] -none_builders = ["radarr_tag_list", "sonarr_taglist"] -radarr_details = [ - "radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_folder", "radarr_monitor", - "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag", "item_radarr_tag", "radarr_ignore_cache", -] -sonarr_details = [ - "sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", - "sonarr_series", "sonarr_quality", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_tag", "item_sonarr_tag", "sonarr_ignore_cache" -] -album_details = ["non_item_remove_label", "item_label", "item_album_sorting"] -sub_filters = [ - "filepath", "audio_track_title", "subtitle_track_title", "resolution", "audio_language", "subtitle_language", "has_dolby_vision", - "channels", "height", "width", "aspect", "audio_codec", "audio_profile", "video_codec", "video_profile", "versions" -] -filters_by_type = { - "movie_show_season_episode_artist_album_track": ["title", "summary", "collection", "has_collection", "added", "last_played", "user_rating", "plays", "filepath", "label", "audio_track_title", "subtitle_track_title", "versions"], - "movie_show_season_episode_album_track": ["year"], - "movie_show_season_episode_artist_album": ["has_overlay"], - "movie_show_season_episode": ["resolution", "audio_language", "subtitle_language", "has_dolby_vision", "channels", "height", "width", "aspect", "audio_codec", "audio_profile", "video_codec", "video_profile"], - "movie_show_episode_album": ["release", "critic_rating", "history"], - "movie_show_episode_track": ["duration"], - "movie_show_artist_album": ["genre"], - "movie_show_episode": ["actor", "content_rating", "audience_rating"], - "movie_show": ["studio", "original_language", "tmdb_vote_count", "tmdb_vote_average", "tmdb_year", "tmdb_genre", "tmdb_title", "tmdb_keyword", "imdb_keyword"], - "movie_episode": ["director", "producer", "writer"], - "movie_artist": ["country"], - "show_artist": ["folder"], - "show_season": ["episodes"], - "artist_album": ["tracks"], - "movie": ["edition", "has_edition", "stinger_rating", "has_stinger"], - "show": ["seasons", "tmdb_status", "tmdb_type", "origin_country", "network", "first_episode_aired", "last_episode_aired", "last_episode_aired_or_never", "tvdb_title", "tvdb_status", "tvdb_genre"], - "artist": ["albums"], - "album": ["record_label"] -} -filters = { - "movie": [item for check, sub in filters_by_type.items() for item in sub if "movie" in check], - "show": [item for check, sub in filters_by_type.items() for item in sub if "show" in check], - "season": [item for check, sub in filters_by_type.items() for item in sub if "season" in check], - "episode": [item for check, sub in filters_by_type.items() for item in sub if "episode" in check], - "artist": [item for check, sub in filters_by_type.items() for item in sub if "artist" in check], - "album": [item for check, sub in filters_by_type.items() for item in sub if "album" in check], - "track": [item for check, sub in filters_by_type.items() for item in sub if "track" in check] -} -tmdb_filters = [ - "original_language", "origin_country", "tmdb_vote_count", "tmdb_vote_average", "tmdb_year", "tmdb_keyword", "tmdb_genre", - "first_episode_aired", "last_episode_aired", "last_episode_aired_or_never", "tmdb_status", "tmdb_type", "tmdb_title" -] -tvdb_filters = ["tvdb_title", "tvdb_status", "tvdb_genre"] -imdb_filters = ["imdb_keyword"] -string_filters = [ - "title", "summary", "studio", "edition", "record_label", "folder", "filepath", "audio_track_title", "subtitle_track_title", "tmdb_title", - "audio_codec", "audio_profile", "video_codec", "video_profile", "tvdb_title", "tvdb_status" -] -string_modifiers = ["", ".not", ".is", ".isnot", ".begins", ".ends", ".regex"] -tag_filters = [ - "actor", "collection", "content_rating", "country", "director", "network", "genre", "label", "producer", "year", - "origin_country", "writer", "resolution", "audio_language", "subtitle_language", "tmdb_keyword", "tmdb_genre", "imdb_keyword", "tvdb_genre" -] -tag_modifiers = ["", ".not", ".regex", ".count_gt", ".count_gte", ".count_lt", ".count_lte"] -boolean_filters = ["has_collection", "has_edition", "has_overlay", "has_dolby_vision", "has_stinger"] -date_filters = ["release", "added", "last_played", "first_episode_aired", "last_episode_aired", "last_episode_aired_or_never"] -date_modifiers = ["", ".not", ".before", ".after", ".regex"] -number_filters = [ - "year", "tmdb_year", "critic_rating", "audience_rating", "user_rating", "tmdb_vote_count", "tmdb_vote_average", "plays", "duration", - "channels", "height", "width", "aspect", "versions", "stinger_rating"] -number_modifiers = ["", ".not", ".gt", ".gte", ".lt", ".lte"] -special_filters = [ - "history", "episodes", "seasons", "albums", "tracks", "original_language", "original_language.not", - "tmdb_status", "tmdb_status.not", "tmdb_type", "tmdb_type.not" -] -all_filters = boolean_filters + special_filters + \ - [f"{f}{m}" for f in string_filters for m in string_modifiers] + \ - [f"{f}{m}" for f in tag_filters for m in tag_modifiers] + \ - [f"{f}{m}" for f in date_filters for m in date_modifiers] + \ - [f"{f}{m}" for f in number_filters for m in number_modifiers] -date_attributes = plex.date_attributes + ["first_episode_aired", "last_episode_aired", "last_episode_aired_or_never"] -year_attributes = plex.year_attributes + ["tmdb_year"] -number_attributes = plex.number_attributes + ["channels", "height", "width", "tmdb_vote_count"] -tag_attributes = plex.tag_attributes -string_attributes = plex.string_attributes + string_filters -float_attributes = plex.float_attributes + ["aspect", "tmdb_vote_average"] -boolean_attributes = plex.boolean_attributes + boolean_filters -smart_invalid = ["collection_order", "builder_level"] -smart_only = ["collection_filtering"] -smart_url_invalid = ["filters", "run_again", "sync_mode", "show_filtered", "show_missing", "save_report", "smart_label"] + radarr_details + sonarr_details -custom_sort_builders = [ - "plex_search", "plex_watchlist", "plex_pilots", "tmdb_list", "tmdb_popular", "tmdb_now_playing", "tmdb_top_rated", - "tmdb_trending_daily", "tmdb_trending_weekly", "tmdb_discover", "reciperr_list", "trakt_chart", "trakt_userlist", - "tvdb_list", "imdb_chart", "imdb_list", "imdb_award", "imdb_search", "imdb_watchlist", "stevenlu_popular", "anidb_popular", - "tmdb_upcoming", "tmdb_airing_today", "tmdb_on_the_air", "trakt_list", "trakt_watchlist", "trakt_collection", - "trakt_trending", "trakt_popular", "trakt_boxoffice", "trakt_collected_daily", "trakt_collected_weekly", - "trakt_collected_monthly", "trakt_collected_yearly", "trakt_collected_all", "trakt_recommendations", - "trakt_recommended_personal", "trakt_recommended_daily", "trakt_recommended_weekly", "trakt_recommended_monthly", - "trakt_recommended_yearly", "trakt_recommended_all", "trakt_watched_daily", "trakt_watched_weekly", - "trakt_watched_monthly", "trakt_watched_yearly", "trakt_watched_all", - "tautulli_popular", "tautulli_watched", "mdblist_list", "letterboxd_list", "icheckmovies_list", - "anilist_top_rated", "anilist_popular", "anilist_trending", "anilist_search", "anilist_userlist", - "mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_movie", "mal_ova", "mal_special", "mal_search", - "mal_popular", "mal_favorite", "mal_suggested", "mal_userlist", "mal_season", "mal_genre", "mal_studio", - "mojo_world", "mojo_domestic", "mojo_international", "mojo_record", "mojo_all_time", "mojo_never" -] -episode_parts_only = ["plex_pilots"] -overlay_only = ["overlay", "suppress_overlays"] -overlay_attributes = [ - "filters", "limit", "show_missing", "save_report", "missing_only_released", "minimum_items", "cache_builders", "tmdb_region", "default_percent" -] + all_builders + overlay_only -parts_collection_valid = [ - "filters", "plex_all", "plex_search", "trakt_list", "trakt_list_details", "collection_filtering", "collection_mode", "label", "visible_library", "limit", - "visible_home", "visible_shared", "show_missing", "save_report", "missing_only_released", "server_preroll", "changes_webhooks", - "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh", "item_refresh_delay", "imdb_list", "imdb_search", - "cache_builders", "url_theme", "file_theme", "item_label", "default_percent", "non_item_remove_label" -] + episode_parts_only + summary_details + poster_details + background_details + string_details -playlist_attributes = [ - "filters", "name_mapping", "show_filtered", "show_missing", "save_report", "allowed_library_types", "run_definition", - "missing_only_released", "only_filter_missing", "delete_below_minimum", "ignore_ids", "ignore_imdb_ids", - "server_preroll", "changes_webhooks", "minimum_items", "cache_builders", "default_percent" -] + custom_sort_builders + summary_details + poster_details + radarr_details + sonarr_details -music_attributes = [ - "non_item_remove_label", "item_label", "collection_filtering", "item_lock_background", "item_lock_poster", "item_lock_title", - "item_assets", "item_refresh", "item_refresh_delay", "plex_search", "plex_all", "filters" -] + details + summary_details + poster_details + background_details - class CollectionBuilder: def __init__(self, config, metadata, name, data, library=None, overlay=None, extra=None): self.config = config diff --git a/modules/builder/_config.py b/modules/builder/_config.py new file mode 100644 index 000000000..23e49e2ed --- /dev/null +++ b/modules/builder/_config.py @@ -0,0 +1,168 @@ +from modules import anidb, anilist, icheckmovies, imdb, letterboxd, mal, mojo, plex, radarr, reciperr, sonarr, tautulli, tmdb, trakt, tvdb, mdblist + +advance_new_agent = ["item_metadata_language", "item_use_original_title"] +advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"] +all_builders = anidb.builders + anilist.builders + icheckmovies.builders + imdb.builders + \ + letterboxd.builders + mal.builders + mojo.builders + plex.builders + reciperr.builders + tautulli.builders + \ + tmdb.builders + trakt.builders + tvdb.builders + mdblist.builders + radarr.builders + sonarr.builders +show_only_builders = [ + "tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_show", "tvdb_show_details", "tmdb_airing_today", + "tmdb_on_the_air", "builder_level", "item_tmdb_season_titles", "sonarr_all", "sonarr_taglist" +] +movie_only_builders = [ + "letterboxd_list", "letterboxd_list_details", "icheckmovies_list", "icheckmovies_list_details", "stevenlu_popular", + "tmdb_collection", "tmdb_collection_details", "tmdb_movie", "tmdb_movie_details", "tmdb_now_playing", "item_edition", + "tvdb_movie", "tvdb_movie_details", "tmdb_upcoming", "trakt_boxoffice", "reciperr_list", "radarr_all", "radarr_taglist", + "mojo_world", "mojo_domestic", "mojo_international", "mojo_record", "mojo_all_time", "mojo_never" +] +music_only_builders = ["item_album_sorting"] +summary_details = [ + "summary", "tmdb_summary", "tmdb_description", "tmdb_biography", "tvdb_summary", + "tvdb_description", "trakt_description", "letterboxd_description", "icheckmovies_description" +] +poster_details = ["url_poster", "tmdb_poster", "tmdb_profile", "tvdb_poster", "file_poster"] +background_details = ["url_background", "tmdb_background", "tvdb_background", "file_background"] +boolean_details = [ + "show_filtered", "show_missing", "save_report", "missing_only_released", "only_filter_missing", + "delete_below_minimum", "asset_folders", "create_asset_folders" +] +scheduled_boolean = ["visible_library", "visible_home", "visible_shared"] +string_details = ["sort_title", "content_rating", "name_mapping"] +ignored_details = [ + "smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "variables", "test", "suppress_overlays", + "delete_not_scheduled", "tmdb_person", "build_collection", "collection_order", "builder_level", "overlay", "kometa_poster", + "validate_builders", "libraries", "sync_to_users", "exclude_users", "collection_name", "playlist_name", "name", "limit", + "blank_collection", "allowed_library_types", "run_definition", "delete_playlist", "ignore_blank_results", "only_run_on_create", + "delete_collections_named", "tmdb_person_offset", "append_label", "key_name", "translation_key", "translation_prefix", "tmdb_birthday" +] +details = [ + "ignore_ids", "ignore_imdb_ids", "server_preroll", "changes_webhooks", "collection_filtering", "collection_mode", "url_theme", + "file_theme", "minimum_items", "label", "album_sorting", "cache_builders", "tmdb_region", "default_percent" +] + boolean_details + scheduled_boolean + string_details +collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test", "item_label"] + \ + poster_details + background_details + summary_details + string_details + all_builders +item_false_details = ["item_lock_background", "item_lock_poster", "item_lock_title"] +item_bool_details = ["item_tmdb_season_titles", "revert_overlay", "item_assets", "item_refresh"] + item_false_details +item_details = ["non_item_remove_label", "item_label", "item_genre", "item_edition", "item_radarr_tag", "item_sonarr_tag", "item_refresh_delay"] + item_bool_details + list(plex.item_advance_keys.keys()) +none_details = ["label.sync", "item_label.sync", "item_genre.sync", "radarr_taglist", "sonarr_taglist", "item_edition"] +none_builders = ["radarr_tag_list", "sonarr_taglist"] +radarr_details = [ + "radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_folder", "radarr_monitor", + "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag", "item_radarr_tag", "radarr_ignore_cache", +] +sonarr_details = [ + "sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", + "sonarr_series", "sonarr_quality", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_tag", "item_sonarr_tag", "sonarr_ignore_cache" +] +album_details = ["non_item_remove_label", "item_label", "item_album_sorting"] +sub_filters = [ + "filepath", "audio_track_title", "subtitle_track_title", "resolution", "audio_language", "subtitle_language", "has_dolby_vision", + "channels", "height", "width", "aspect", "audio_codec", "audio_profile", "video_codec", "video_profile", "versions" +] +filters_by_type = { + "movie_show_season_episode_artist_album_track": ["title", "summary", "collection", "has_collection", "added", "last_played", "user_rating", "plays", "filepath", "label", "audio_track_title", "subtitle_track_title", "versions"], + "movie_show_season_episode_album_track": ["year"], + "movie_show_season_episode_artist_album": ["has_overlay"], + "movie_show_season_episode": ["resolution", "audio_language", "subtitle_language", "has_dolby_vision", "channels", "height", "width", "aspect", "audio_codec", "audio_profile", "video_codec", "video_profile"], + "movie_show_episode_album": ["release", "critic_rating", "history"], + "movie_show_episode_track": ["duration"], + "movie_show_artist_album": ["genre"], + "movie_show_episode": ["actor", "content_rating", "audience_rating"], + "movie_show": ["studio", "original_language", "tmdb_vote_count", "tmdb_vote_average", "tmdb_year", "tmdb_genre", "tmdb_title", "tmdb_keyword", "imdb_keyword"], + "movie_episode": ["director", "producer", "writer"], + "movie_artist": ["country"], + "show_artist": ["folder"], + "show_season": ["episodes"], + "artist_album": ["tracks"], + "movie": ["edition", "has_edition", "stinger_rating", "has_stinger"], + "show": ["seasons", "tmdb_status", "tmdb_type", "origin_country", "network", "first_episode_aired", "last_episode_aired", "last_episode_aired_or_never", "tvdb_title", "tvdb_status", "tvdb_genre"], + "artist": ["albums"], + "album": ["record_label"] +} +filters = { + "movie": [item for check, sub in filters_by_type.items() for item in sub if "movie" in check], + "show": [item for check, sub in filters_by_type.items() for item in sub if "show" in check], + "season": [item for check, sub in filters_by_type.items() for item in sub if "season" in check], + "episode": [item for check, sub in filters_by_type.items() for item in sub if "episode" in check], + "artist": [item for check, sub in filters_by_type.items() for item in sub if "artist" in check], + "album": [item for check, sub in filters_by_type.items() for item in sub if "album" in check], + "track": [item for check, sub in filters_by_type.items() for item in sub if "track" in check] +} +tmdb_filters = [ + "original_language", "origin_country", "tmdb_vote_count", "tmdb_vote_average", "tmdb_year", "tmdb_keyword", "tmdb_genre", + "first_episode_aired", "last_episode_aired", "last_episode_aired_or_never", "tmdb_status", "tmdb_type", "tmdb_title" +] +tvdb_filters = ["tvdb_title", "tvdb_status", "tvdb_genre"] +imdb_filters = ["imdb_keyword"] +string_filters = [ + "title", "summary", "studio", "edition", "record_label", "folder", "filepath", "audio_track_title", "subtitle_track_title", "tmdb_title", + "audio_codec", "audio_profile", "video_codec", "video_profile", "tvdb_title", "tvdb_status" +] +string_modifiers = ["", ".not", ".is", ".isnot", ".begins", ".ends", ".regex"] +tag_filters = [ + "actor", "collection", "content_rating", "country", "director", "network", "genre", "label", "producer", "year", + "origin_country", "writer", "resolution", "audio_language", "subtitle_language", "tmdb_keyword", "tmdb_genre", "imdb_keyword", "tvdb_genre" +] +tag_modifiers = ["", ".not", ".regex", ".count_gt", ".count_gte", ".count_lt", ".count_lte"] +boolean_filters = ["has_collection", "has_edition", "has_overlay", "has_dolby_vision", "has_stinger"] +date_filters = ["release", "added", "last_played", "first_episode_aired", "last_episode_aired", "last_episode_aired_or_never"] +date_modifiers = ["", ".not", ".before", ".after", ".regex"] +number_filters = [ + "year", "tmdb_year", "critic_rating", "audience_rating", "user_rating", "tmdb_vote_count", "tmdb_vote_average", "plays", "duration", + "channels", "height", "width", "aspect", "versions", "stinger_rating"] +number_modifiers = ["", ".not", ".gt", ".gte", ".lt", ".lte"] +special_filters = [ + "history", "episodes", "seasons", "albums", "tracks", "original_language", "original_language.not", + "tmdb_status", "tmdb_status.not", "tmdb_type", "tmdb_type.not" +] +all_filters = boolean_filters + special_filters + \ + [f"{f}{m}" for f in string_filters for m in string_modifiers] + \ + [f"{f}{m}" for f in tag_filters for m in tag_modifiers] + \ + [f"{f}{m}" for f in date_filters for m in date_modifiers] + \ + [f"{f}{m}" for f in number_filters for m in number_modifiers] +date_attributes = plex.date_attributes + ["first_episode_aired", "last_episode_aired", "last_episode_aired_or_never"] +year_attributes = plex.year_attributes + ["tmdb_year"] +number_attributes = plex.number_attributes + ["channels", "height", "width", "tmdb_vote_count"] +tag_attributes = plex.tag_attributes +string_attributes = plex.string_attributes + string_filters +float_attributes = plex.float_attributes + ["aspect", "tmdb_vote_average"] +boolean_attributes = plex.boolean_attributes + boolean_filters +smart_invalid = ["collection_order", "builder_level"] +smart_only = ["collection_filtering"] +smart_url_invalid = ["filters", "run_again", "sync_mode", "show_filtered", "show_missing", "save_report", "smart_label"] + radarr_details + sonarr_details +custom_sort_builders = [ + "plex_search", "plex_watchlist", "plex_pilots", "tmdb_list", "tmdb_popular", "tmdb_now_playing", "tmdb_top_rated", + "tmdb_trending_daily", "tmdb_trending_weekly", "tmdb_discover", "reciperr_list", "trakt_chart", "trakt_userlist", + "tvdb_list", "imdb_chart", "imdb_list", "imdb_award", "imdb_search", "imdb_watchlist", "stevenlu_popular", "anidb_popular", + "tmdb_upcoming", "tmdb_airing_today", "tmdb_on_the_air", "trakt_list", "trakt_watchlist", "trakt_collection", + "trakt_trending", "trakt_popular", "trakt_boxoffice", "trakt_collected_daily", "trakt_collected_weekly", + "trakt_collected_monthly", "trakt_collected_yearly", "trakt_collected_all", "trakt_recommendations", + "trakt_recommended_personal", "trakt_recommended_daily", "trakt_recommended_weekly", "trakt_recommended_monthly", + "trakt_recommended_yearly", "trakt_recommended_all", "trakt_watched_daily", "trakt_watched_weekly", + "trakt_watched_monthly", "trakt_watched_yearly", "trakt_watched_all", + "tautulli_popular", "tautulli_watched", "mdblist_list", "letterboxd_list", "icheckmovies_list", + "anilist_top_rated", "anilist_popular", "anilist_trending", "anilist_search", "anilist_userlist", + "mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_movie", "mal_ova", "mal_special", "mal_search", + "mal_popular", "mal_favorite", "mal_suggested", "mal_userlist", "mal_season", "mal_genre", "mal_studio", + "mojo_world", "mojo_domestic", "mojo_international", "mojo_record", "mojo_all_time", "mojo_never" +] +episode_parts_only = ["plex_pilots"] +overlay_only = ["overlay", "suppress_overlays"] +overlay_attributes = [ + "filters", "limit", "show_missing", "save_report", "missing_only_released", "minimum_items", "cache_builders", "tmdb_region", "default_percent" +] + all_builders + overlay_only +parts_collection_valid = [ + "filters", "plex_all", "plex_search", "trakt_list", "trakt_list_details", "collection_filtering", "collection_mode", "label", "visible_library", "limit", + "visible_home", "visible_shared", "show_missing", "save_report", "missing_only_released", "server_preroll", "changes_webhooks", + "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh", "item_refresh_delay", "imdb_list", "imdb_search", + "cache_builders", "url_theme", "file_theme", "item_label", "default_percent", "non_item_remove_label" +] + episode_parts_only + summary_details + poster_details + background_details + string_details +playlist_attributes = [ + "filters", "name_mapping", "show_filtered", "show_missing", "save_report", "allowed_library_types", "run_definition", + "missing_only_released", "only_filter_missing", "delete_below_minimum", "ignore_ids", "ignore_imdb_ids", + "server_preroll", "changes_webhooks", "minimum_items", "cache_builders", "default_percent" +] + custom_sort_builders + summary_details + poster_details + radarr_details + sonarr_details +music_attributes = [ + "non_item_remove_label", "item_label", "collection_filtering", "item_lock_background", "item_lock_poster", "item_lock_title", + "item_assets", "item_refresh", "item_refresh_delay", "plex_search", "plex_all", "filters" +] + details + summary_details + poster_details + background_details From 1f78e9283db2218fe16a8809e4130d89352ef51f Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 07:32:02 -0400 Subject: [PATCH 04/24] refactor: _add_methods --- modules/builder/__init__.py | 992 +------------------------------- modules/builder/_add_methods.py | 992 ++++++++++++++++++++++++++++++++ 2 files changed, 993 insertions(+), 991 deletions(-) create mode 100644 modules/builder/_add_methods.py diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 8ee174f97..a0d19a818 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -1,7 +1,6 @@ import os, re, time from arrapi import ArrException -from datetime import datetime, timedelta -from dateutil.relativedelta import relativedelta +from datetime import datetime from modules import anidb, anilist, icheckmovies, imdb, letterboxd, mal, mojo, plex, radarr, reciperr, sonarr, tautulli, tmdb, trakt, tvdb, mdblist, util from modules.util import Failed, FilterFailed, NonExisting, NotScheduled, NotScheduledRange, Deleted from modules.overlay import Overlay @@ -9,7 +8,6 @@ from plexapi.audio import Artist, Album, Track from plexapi.exceptions import NotFound from plexapi.video import Movie, Show, Season, Episode -from requests.exceptions import ConnectionError from urllib.parse import quote logger = util.logger @@ -985,994 +983,6 @@ def apply_vars(input_str, var_set, var_key, var_limit): logger.info("") logger.info("Validation Successful") - def _summary(self, method_name, method_data): - if method_name == "summary": - self.summaries[method_name] = str(method_data).replace("<>", self.key_name) if self.key_name else method_data - elif method_name == "tmdb_summary": - self.summaries[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, "TMDb ID"), self.library.is_movie).overview - elif method_name == "tmdb_description": - self.summaries[method_name] = self.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).description - elif method_name == "tmdb_biography": - self.summaries[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, "TMDb Person ID")).biography - elif method_name == "tvdb_summary": - self.summaries[method_name] = self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).summary - elif method_name == "tvdb_description": - summary, _ = self.config.TVDb.get_list_description(method_data) - if summary: - self.summaries[method_name] = summary - elif method_name == "trakt_description": - try: - self.summaries[method_name] = self.config.Trakt.list_description(self.config.Trakt.validate_list(method_data)[0]) - except Failed as e: - logger.error(f"Trakt Error: List description not found: {e}") - elif method_name == "letterboxd_description": - self.summaries[method_name] = self.config.Letterboxd.get_list_description(method_data, self.language) - elif method_name == "icheckmovies_description": - self.summaries[method_name] = self.config.ICheckMovies.get_list_description(method_data, self.language) - - def _poster(self, method_name, method_data): - if method_name == "url_poster": - try: - if not method_data.startswith("https://theposterdb.com/api/assets/"): - image_response = self.config.get(method_data, headers=util.header()) - if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: - raise ConnectionError - self.posters[method_name] = method_data - except ConnectionError: - logger.warning(f"{self.Type} Warning: No Poster Found at {method_data}") - elif method_name == "tmdb_list_poster": - self.posters[method_name] = self.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).poster_url - elif method_name == "tvdb_list_poster": - _, poster = self.config.TVDb.get_list_description(method_data) - if poster: - self.posters[method_name] = poster - elif method_name == "tmdb_poster": - self.posters[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_url - elif method_name == "tmdb_profile": - self.posters[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_url - elif method_name == "tvdb_poster": - self.posters[method_name] = f"{self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).poster_url}" - elif method_name == "file_poster": - if os.path.exists(os.path.abspath(method_data)): - self.posters[method_name] = os.path.abspath(method_data) - else: - logger.error(f"{self.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") - - def _background(self, method_name, method_data): - if method_name == "url_background": - try: - image_response = self.config.get(method_data, headers=util.header()) - if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: - raise ConnectionError - self.backgrounds[method_name] = method_data - except ConnectionError: - logger.warning(f"{self.Type} Warning: No Background Found at {method_data}") - elif method_name == "tmdb_background": - self.backgrounds[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).backdrop_url - elif method_name == "tvdb_background": - self.posters[method_name] = f"{self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).background_url}" - elif method_name == "file_background": - if os.path.exists(os.path.abspath(method_data)): - self.backgrounds[method_name] = os.path.abspath(method_data) - else: - logger.error(f"{self.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") - - def _details(self, method_name, method_data, method_final, methods): - if method_name == "url_theme": - self.url_theme = method_data - elif method_name == "file_theme": - if os.path.exists(os.path.abspath(method_data)): - self.file_theme = os.path.abspath(method_data) - else: - logger.error(f"{self.Type} Error: Theme Path Does Not Exist: {os.path.abspath(method_data)}") - elif method_name == "tmdb_region": - self.tmdb_region = util.parse(self.Type, method_name, method_data, options=self.config.TMDb.iso_3166_1) - elif method_name == "collection_mode": - try: - self.details[method_name] = util.check_collection_mode(method_data) - except Failed as e: - logger.error(e) - elif method_name == "collection_filtering": - if method_data and str(method_data).lower() in plex.collection_filtering_options: - self.details[method_name] = str(method_data).lower() - else: - logger.error(f"Config Error: {method_data} collection_filtering invalid\n\tadmin (Always the server admin user)\n\tuser (User currently viewing the content)") - elif method_name == "minimum_items": - self.minimum = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1) - elif method_name == "cache_builders": - self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", minimum=0) - elif method_name == "default_percent": - self.default_percent = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1, maximum=100) - elif method_name == "server_preroll": - self.server_preroll = util.parse(self.Type, method_name, method_data) - elif method_name == "ignore_ids": - self.ignore_ids.extend(util.parse(self.Type, method_name, method_data, datatype="intlist")) - elif method_name == "ignore_imdb_ids": - self.ignore_imdb_ids.extend(util.parse(self.Type, method_name, method_data, datatype="list")) - elif method_name == "label": - if "label" in methods and "label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use label and label.sync together") - if "label.remove" in methods and "label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use label.remove and label.sync together") - if method_final == "label" and "label_sync_mode" in methods and self.data[methods["label_sync_mode"]] == "sync": - self.details["label.sync"] = util.get_list(method_data) if method_data else [] - else: - self.details[method_final] = util.get_list(method_data) if method_data else [] - elif method_name == "changes_webhooks": - self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="list") if method_data else None - elif method_name in scheduled_boolean: - if isinstance(method_data, bool): - self.details[method_name] = method_data - elif isinstance(method_data, (int, float)): - self.details[method_name] = method_data > 0 - elif str(method_data).lower() in ["t", "true"]: - self.details[method_name] = True - elif str(method_data).lower() in ["f", "false"]: - self.details[method_name] = False - else: - try: - util.schedule_check(method_name, util.parse(self.Type, method_name, method_data), self.current_time, self.config.run_hour) - self.details[method_name] = True - except NotScheduled: - self.details[method_name] = False - elif method_name in boolean_details: - default = self.details[method_name] if method_name in self.details else None - self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="bool", default=default) - elif method_name in string_details: - self.details[method_name] = str(method_data) - - def _item_details(self, method_name, method_data, method_mod, method_final, methods): - if method_name == "item_label": - if "item_label" in methods and "item_label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_label and item_label.sync together") - if "item_label.remove" in methods and "item_label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_label.remove and item_label.sync together") - self.item_details[method_final] = util.get_list(method_data) if method_data else [] - if method_name == "item_genre": - if "item_genre" in methods and "item_genre.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_genre and item_genre.sync together") - if "item_genre.remove" in methods and "item_genre.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together") - self.item_details[method_final] = util.get_list(method_data) if method_data else [] - elif method_name == "item_edition": - self.item_details[method_final] = str(method_data) if method_data else "" # noqa - elif method_name == "non_item_remove_label": - if not method_data: - raise Failed(f"{self.Type} Error: non_item_remove_label is blank") - self.item_details[method_final] = util.get_list(method_data) - elif method_name in ["item_radarr_tag", "item_sonarr_tag"]: - if method_name in methods and f"{method_name}.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.sync together") - if f"{method_name}.remove" in methods and f"{method_name}.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use {method_name}.remove and {method_name}.sync together") - if method_name in methods and f"{method_name}.remove" in methods: - raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.remove together") - self.item_details[method_name] = util.get_list(method_data, lower=True) - self.item_details["apply_tags"] = method_mod[1:] if method_mod else "" - elif method_name == "item_refresh_delay": - self.item_details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", default=0, minimum=0) - elif method_name in item_bool_details: - if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): - self.item_details[method_name] = True - elif method_name in item_false_details: - self.item_details[method_name] = False - elif method_name in plex.item_advance_keys: - key, options = plex.item_advance_keys[method_name] - if method_name in advance_new_agent and self.library.agent not in plex.new_plex_agents: - logger.error(f"Metadata Error: {method_name} attribute only works for with the New Plex Movie Agent and New Plex TV Agent") - elif method_name in advance_show and not self.library.is_show: - logger.error(f"Metadata Error: {method_name} attribute only works for show libraries") - elif str(method_data).lower() not in options: - logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid") - else: - self.item_details[method_name] = str(method_data).lower() # noqa - - def _radarr(self, method_name, method_data): - if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: - self.radarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") - elif method_name == "radarr_folder": - self.radarr_details["folder"] = method_data - elif method_name == "radarr_availability": - if str(method_data).lower() in radarr.availability_translation: - self.radarr_details["availability"] = str(method_data).lower() - else: - raise Failed(f"{self.Type} Error: {method_name} attribute must be either announced, cinemas, released or db") - elif method_name == "radarr_quality": - self.radarr_details["quality"] = method_data - elif method_name == "radarr_tag": - self.radarr_details["tag"] = util.get_list(method_data, lower=True) - elif method_name == "radarr_taglist": - self.builders.append((method_name, util.get_list(method_data, lower=True))) - elif method_name == "radarr_all": - self.builders.append((method_name, True)) - - def _sonarr(self, method_name, method_data): - if method_name in ["sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_ignore_cache"]: - self.sonarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") - elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]: - self.sonarr_details[method_name[7:]] = method_data - elif method_name == "sonarr_monitor": - if str(method_data).lower() in sonarr.monitor_translation: - self.sonarr_details["monitor"] = str(method_data).lower() - else: - raise Failed(f"{self.Type} Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none") - elif method_name == "sonarr_series": - if str(method_data).lower() in sonarr.series_types: - self.sonarr_details["series"] = str(method_data).lower() - else: - raise Failed(f"{self.Type} Error: {method_name} attribute must be either standard, daily, or anime") - elif method_name == "sonarr_tag": - self.sonarr_details["tag"] = util.get_list(method_data, lower=True) - elif method_name == "sonarr_taglist": - self.builders.append((method_name, util.get_list(method_data, lower=True))) - elif method_name == "sonarr_all": - self.builders.append((method_name, True)) - - def _anidb(self, method_name, method_data): - if method_name == "anidb_popular": - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=30, maximum=30))) - elif method_name in ["anidb_id", "anidb_relation"]: - for anidb_id in self.config.AniDB.validate_anidb_ids(method_data): - self.builders.append((method_name, anidb_id)) - elif method_name == "anidb_tag": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {} - if "tag" not in dict_methods: - raise Failed(f"{self.Type} Error: anidb_tag tag attribute is required") - elif not dict_data[dict_methods["tag"]]: - raise Failed(f"{self.Type} Error: anidb_tag tag attribute is blank") - else: - new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["tag"]], "AniDB Tag ID") - new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) - self.builders.append((method_name, new_dictionary)) - - def _anilist(self, method_name, method_data): - if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]: - for anilist_id in self.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): - self.builders.append((method_name, anilist_id)) - elif method_name in ["anilist_popular", "anilist_trending", "anilist_top_rated"]: - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) - elif method_name == "anilist_userlist": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = { - "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), - "list_name": util.parse(self.Type, "list_name", dict_data, methods=dict_methods, parent=method_name), - "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.userlist_sort_options), - } - score_dict = {} - for search_method, search_data in dict_data.items(): - search_attr, modifier = os.path.splitext(str(search_method).lower()) - if search_attr == "score" and modifier in [".gt", ".gte", ".lt", ".lte"]: - score = util.parse(self.Type, search_method, dict_data, methods=dict_methods, datatype="int", default=-1, minimum=0, maximum=10, parent=method_name) - if score > -1: - score_dict[modifier] = score - elif search_attr not in ["username", "list_name", "sort_by"]: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - new_dictionary["score"] = score_dict - self.builders.append((method_name, self.config.AniList.validate_userlist(new_dictionary))) - elif method_name == "anilist_search": - if self.current_time.month in [12, 1, 2]: current_season = "winter" - elif self.current_time.month in [3, 4, 5]: current_season = "spring" - elif self.current_time.month in [6, 7, 8]: current_season = "summer" - else: current_season = "fall" - default_year = self.current_year + 1 if self.current_time.month == 12 else self.current_year - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {} - for search_method, search_data in dict_data.items(): - lower_method = str(search_method).lower() - search_attr, modifier = os.path.splitext(lower_method) - if lower_method not in anilist.searches: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - elif search_attr == "season": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, parent=method_name, default=current_season, options=util.seasons) - if new_dictionary[search_attr] == "current": - new_dictionary[search_attr] = current_season - if "year" not in dict_methods: - logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {default_year} by default") - new_dictionary["year"] = default_year - elif search_attr == "year": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1) - elif search_data is None: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") - elif search_attr == "adult": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="bool", parent=method_name) - elif search_attr == "country": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.country_codes, parent=method_name) - elif search_attr == "source": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.media_source, parent=method_name) - elif search_attr in ["episodes", "duration", "score", "popularity"]: - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name) - elif search_attr in ["format", "status", "genre", "tag", "tag_category"]: - new_dictionary[lower_method] = self.config.AniList.validate(search_attr.replace("_", " ").title(), util.parse(self.Type, search_method, search_data)) - elif search_attr in ["start", "end"]: - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") - elif search_attr == "min_tag_percent": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100) - elif search_attr == "search": - new_dictionary[search_attr] = str(search_data) - elif lower_method not in ["sort_by", "limit"]: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - if len(new_dictionary) == 0: - raise Failed(f"{self.Type} Error: {method_name} must have at least one valid search option") - new_dictionary["sort_by"] = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options) - new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) - self.builders.append((method_name, new_dictionary)) - - def _icheckmovies(self, method_name, method_data): - if method_name.startswith("icheckmovies_list"): - icheckmovies_lists = self.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.language) - for icheckmovies_list in icheckmovies_lists: - self.builders.append(("icheckmovies_list", icheckmovies_list)) - if method_name.endswith("_details"): - self.summaries[method_name] = self.config.ICheckMovies.get_list_description(icheckmovies_lists[0], self.language) - - def _imdb(self, method_name, method_data): - if method_name == "imdb_id": - for value in util.get_list(method_data): - if str(value).startswith("tt"): - self.builders.append((method_name, value)) - else: - raise Failed(f"{self.Type} Error: imdb_id {value} must begin with tt") - elif method_name == "imdb_list": - try: - for imdb_dict in self.config.IMDb.validate_imdb_lists(self.Type, method_data, self.language): - self.builders.append((method_name, imdb_dict)) - except Failed as e: - logger.error(e) - elif method_name == "imdb_chart": - for value in util.get_list(method_data): - if value in imdb.movie_charts and not self.library.is_movie: - raise Failed(f"{self.Type} Error: chart: {value} does not work with show libraries") - elif value in imdb.show_charts and self.library.is_movie: - raise Failed(f"{self.Type} Error: chart: {value} does not work with movie libraries") - elif value in imdb.movie_charts or value in imdb.show_charts: - self.builders.append((method_name, value)) - else: - raise Failed(f"{self.Type} Error: chart: {value} is invalid options are {[i for i in imdb.charts]}") - elif method_name == "imdb_watchlist": - for imdb_user in self.config.IMDb.validate_imdb_watchlists(self.Type, method_data, self.language): - self.builders.append((method_name, imdb_user)) - elif method_name == "imdb_award": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - event_id = util.parse(self.Type, "event_id", dict_data, parent=method_name, methods=dict_methods, regex=(r"(ev\d+)", "ev0000003")) - git_event, year_options = self.config.IMDb.get_event_years(event_id) - if not year_options: - raise Failed(f"{self.Type} Error: imdb_award event_id attribute: No event found at {imdb.base_url}/event/{event_id}") - if "event_year" not in dict_methods: - raise Failed(f"{self.Type} Error: imdb_award event_year attribute not found") - og_year = dict_data[dict_methods["event_year"]] - if not og_year: - raise Failed(f"{self.Type} Error: imdb_award event_year attribute is blank") - if og_year in ["all", "latest"]: - event_year = og_year - elif not isinstance(og_year, list) and "-" in str(og_year) and len(str(og_year)) > 7: - try: - min_year, max_year = og_year.split("-") - min_year = int(min_year) - max_year = int(max_year) if max_year != "current" else None - event_year = [] - for option in year_options: - check = int(option.split("-")[0] if "-" in option else option) - if check >= min_year and (max_year is None or check <= max_year): - event_year.append(option) - except ValueError: - raise Failed(f"{self.Type} Error: imdb_award event_year attribute invalid: {og_year}") - else: - event_year = util.parse(self.Type, "event_year", og_year, parent=method_name, datatype="strlist", options=year_options) - if (event_year == "all" or len(event_year) > 1) and not git_event: - raise Failed(f"{self.Type} Error: Only specific events work when using multiple years. Event Options: [{', '.join([k for k in self.config.IMDb.events_validation])}]") - award_filters = [] - if "award_filter" in dict_methods: - if not dict_data[dict_methods["award_filter"]]: - raise Failed(f"{self.Type} Error: imdb_award award_filter attribute is blank") - award_filters = util.parse(self.Type, "award_filter", dict_data[dict_methods["award_filter"]], datatype="lowerlist") - category_filters = [] - if "category_filter" in dict_methods: - if not dict_data[dict_methods["category_filter"]]: - raise Failed(f"{self.Type} Error: imdb_award category_filter attribute is blank") - category_filters = util.parse(self.Type, "category_filter", dict_data[dict_methods["category_filter"]], datatype="lowerlist") - final_category = [] - final_awards = [] - if award_filters or category_filters: - award_names, category_names = self.config.IMDb.get_award_names(event_id, year_options[0] if event_year == "latest" else event_year) - lower_award = {a.lower(): a for a in award_names if a} - for award_filter in award_filters: - if award_filter in lower_award: - final_awards.append(lower_award[award_filter]) - else: - raise Failed(f"{self.Type} Error: imdb_award award_filter attribute invalid: {award_filter} must be in in [{', '.join([v for _, v in lower_award.items()])}]") - lower_category = {c.lower(): c for c in category_names if c} - for category_filter in category_filters: - if category_filter in lower_category: - final_category.append(lower_category[category_filter]) - else: - raise Failed(f"{self.Type} Error: imdb_award category_filter attribute invalid: {category_filter} must be in in [{', '.join([v for _, v in lower_category.items()])}]") - self.builders.append((method_name, { - "event_id": event_id, "event_year": event_year, "award_filter": final_awards if final_awards else None, "category_filter": final_category if final_category else None, - "winning": util.parse(self.Type, "winning", dict_data, parent=method_name, methods=dict_methods, datatype="bool", default=False) - })) - elif method_name == "imdb_search": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, minimum=0, default=100, parent=method_name)} - for search_method, search_data in dict_data.items(): - lower_method = str(search_method).lower() - search_attr, modifier = os.path.splitext(lower_method) - if search_data is None: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") - elif lower_method not in imdb.imdb_search_attributes: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - elif search_attr == "sort_by": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name, options=imdb.sort_options) - elif search_attr == "title": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name) - elif search_attr == "type": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.title_type_options) - elif search_attr == "topic": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.topic_options) - elif search_attr == "release": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="date", parent=method_name, date_return="%Y-%m-%d") - elif search_attr == "rating": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="float", parent=method_name, minimum=0.1, maximum=10) - elif search_attr in ["votes", "imdb_top", "imdb_bottom", "popularity", "runtime"]: - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name, minimum=0) - elif search_attr == "genre": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.genre_options) - elif search_attr == "event": - events = [] - for event in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - if event in imdb.event_options: - events.append(event) - else: - res = re.search(r'(ev\d+)', event) - if res: - events.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ev\\d+ e.g. ev0000292 or be one of {', '.join([e for e in imdb.event_options])}") - if events: - new_dictionary[lower_method] = events - elif search_attr == "company": - companies = [] - for company in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - if company in imdb.company_options: - companies.append(company) - else: - res = re.search(r'(co\d+)', company) - if res: - companies.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern co\\d+ e.g. co0098836 or be one of {', '.join([e for e in imdb.company_options])}") - if companies: - new_dictionary[lower_method] = companies - elif search_attr == "content_rating": - final_list = [] - for content in util.get_list(search_data): - if content: - final_dict = {"region": "US", "rating": None} - if not isinstance(content, dict): - final_dict["rating"] = str(content) - else: - if "rating" not in content or not content["rating"]: - raise Failed(f"{method_name} {search_method} attribute: rating attribute is required") - final_dict["rating"] = str(content["rating"]) - if "region" not in content or not content["region"]: - logger.warning(f"{method_name} {search_method} attribute: region attribute not found defaulting to 'US'") - elif len(str(content["region"])) != 2: - logger.warning(f"{method_name} {search_method} attribute: region attribute: {str(content['region'])} must be only 2 characters defaulting to 'US'") - else: - final_dict["region"] = str(content["region"]).upper() - final_list.append(final_dict) - if final_list: - new_dictionary[lower_method] = final_list - elif search_attr == "country": - countries = [] - for country in util.parse(self.Type, search_method, search_data, datatype="upperlist", parent=method_name): - if country: - if len(str(country)) != 2: - raise Failed(f"{method_name} {search_method} attribute: {country} must be only 2 characters i.e. 'US'") - countries.append(str(country)) - if countries: - new_dictionary[lower_method] = countries - elif search_attr in ["keyword", "language", "alternate_version", "crazy_credit", "location", "goof", "plot", "quote", "soundtrack", "trivia"]: - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name) - elif search_attr == "cast": - casts = [] - for cast in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - res = re.search(r'(nm\d+)', cast) - if res: - casts.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern nm\\d+ e.g. nm00988366") - if casts: - new_dictionary[lower_method] = casts - elif search_attr == "series": - series = [] - for show in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - res = re.search(r'(tt\d+)', show) - if res: - series.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern tt\\d+ e.g. tt00988366") - if series: - new_dictionary[lower_method] = series - elif search_attr == "list": - lists = [] - for new_list in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - res = re.search(r'(ls\d+)', new_list) - if res: - lists.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ls\\d+ e.g. ls000024621") - if lists: - new_dictionary[lower_method] = lists - elif search_attr == "adult": - if util.parse(self.Type, search_method, search_data, datatype="bool", parent=method_name): - new_dictionary[lower_method] = True - elif search_attr != "limit": - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - if len(new_dictionary) > 1: - self.builders.append((method_name, new_dictionary)) - else: - raise Failed(f"{self.Type} Error: {method_name} had no valid fields") - - def _letterboxd(self, method_name, method_data): - if method_name.startswith("letterboxd_list"): - letterboxd_lists = self.config.Letterboxd.validate_letterboxd_lists(self.Type, method_data, self.language) - for letterboxd_list in letterboxd_lists: - self.builders.append(("letterboxd_list", letterboxd_list)) - if method_name.endswith("_details"): - self.summaries[method_name] = self.config.Letterboxd.get_list_description(letterboxd_lists[0]["url"], self.language) - - def _mal(self, method_name, method_data): - if method_name == "mal_id": - for mal_id in util.get_int_list(method_data, "MyAnimeList ID"): - self.builders.append((method_name, mal_id)) - elif method_name in ["mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special", "mal_popular", "mal_favorite", "mal_suggested"]: - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100 if method_name == "mal_suggested" else 500))) - elif method_name in ["mal_season", "mal_userlist", "mal_search"]: - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - if method_name == "mal_season": - if self.current_time.month in [1, 2, 3]: default_season = "winter" - elif self.current_time.month in [4, 5, 6]: default_season = "spring" - elif self.current_time.month in [7, 8, 9]: default_season = "summer" - else: default_season = "fall" - season = util.parse(self.Type, "season", dict_data, methods=dict_methods, parent=method_name, default=default_season, options=util.seasons) - if season == "current": - season = default_season - self.builders.append((method_name, { - "season": season, - "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="members", options=mal.season_sort_options, translation=mal.season_sort_translation), - "year": util.parse(self.Type, "year", dict_data, datatype="int", methods=dict_methods, default=self.current_year, parent=method_name, minimum=1917, maximum=self.current_year + 1), - "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=500), - "starting_only": util.parse(self.Type, "starting_only", dict_data, datatype="bool", methods=dict_methods, default=False, parent=method_name) - })) - elif method_name == "mal_userlist": - self.builders.append((method_name, { - "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), - "status": util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, default="all", options=mal.userlist_status), - "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=mal.userlist_sort_options, translation=mal.userlist_sort_translation), - "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=1000) - })) - elif method_name == "mal_search": - final_attributes = {} - final_text = "MyAnimeList Search" - if "sort_by" in dict_methods: - sort = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, options=mal.search_combos) - sort_type, sort_direction = sort.split(".") - final_text += f"\nSorted By: {sort}" - final_attributes["order_by"] = sort_type - final_attributes["sort"] = sort_direction - limit = 0 - if "limit" in dict_methods: - limit = util.parse(self.Type, "limit", dict_data, datatype="int", default=0, methods=dict_methods, parent=method_name) - final_text += f"\nLimit: {limit if limit else 'None'}" - if "query" in dict_methods: - final_attributes["q"] = util.parse(self.Type, "query", dict_data, methods=dict_methods, parent=method_name) - final_text += f"\nQuery: {final_attributes['q']}" - if "prefix" in dict_methods: - final_attributes["letter"] = util.parse(self.Type, "prefix", dict_data, methods=dict_methods, parent=method_name) - final_text += f"\nPrefix: {final_attributes['letter']}" - if "type" in dict_methods: - type_list = util.parse(self.Type, "type", dict_data, datatype="commalist", methods=dict_methods, parent=method_name, options=mal.search_types) - final_attributes["type"] = ",".join(type_list) - final_text += f"\nType: {' or '.join(type_list)}" - if "status" in dict_methods: - final_attributes["status"] = util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, options=mal.search_status) - final_text += f"\nStatus: {final_attributes['status']}" - if "genre" in dict_methods: - genre_str = str(util.parse(self.Type, "genre", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.config.MyAnimeList.genres) - final_text += f"\nGenre: {out_text}" - final_attributes["genres"] = out_ints - if "genre.not" in dict_methods: - genre_str = str(util.parse(self.Type, "genre.not", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.config.MyAnimeList.genres) - final_text += f"\nNot Genre: {out_text}" - final_attributes["genres_exclude"] = out_ints - if "studio" in dict_methods: - studio_str = str(util.parse(self.Type, "studio", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.Type, 'Studio', studio_str, self.config.MyAnimeList.studios) - final_text += f"\nStudio: {out_text}" - final_attributes["producers"] = out_ints - if "content_rating" in dict_methods: - final_attributes["rating"] = util.parse(self.Type, "content_rating", dict_data, methods=dict_methods, parent=method_name, options=mal.search_ratings) - final_text += f"\nContent Rating: {final_attributes['rating']}" - if "score.gte" in dict_methods: - final_attributes["min_score"] = util.parse(self.Type, "score.gte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_text += f"\nScore Greater Than or Equal: {final_attributes['min_score']}" - elif "score.gt" in dict_methods: - original_score = util.parse(self.Type, "score.gt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_attributes["min_score"] = original_score + 0.01 - final_text += f"\nScore Greater Than: {original_score}" - if "score.lte" in dict_methods: - final_attributes["max_score"] = util.parse(self.Type, "score.lte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_text += f"\nScore Less Than or Equal: {final_attributes['max_score']}" - elif "score.lt" in dict_methods: - original_score = util.parse(self.Type, "score.lt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_attributes["max_score"] = original_score - 0.01 - final_text += f"\nScore Less Than: {original_score}" - if "min_score" in final_attributes and "max_score" in final_attributes and final_attributes["max_score"] <= final_attributes["min_score"]: - raise Failed(f"{self.Type} Error: mal_search score.lte/score.lt attribute must be greater than score.gte/score.gt") - if "sfw" in dict_methods: - sfw = util.parse(self.Type, "sfw", dict_data, datatype="bool", methods=dict_methods, parent=method_name) - if sfw: - final_attributes["sfw"] = 1 - final_text += f"\nSafe for Work: {final_attributes['sfw']}" - if not final_attributes: - raise Failed(f"{self.Type} Error: no mal_search attributes found") - self.builders.append((method_name, (final_attributes, final_text, limit))) - elif method_name in ["mal_genre", "mal_studio"]: - logger.warning(f"Config Warning: {method_name} will run as a mal_search") - item_list = util.parse(self.Type, method_name[4:], method_data, datatype="commalist") - all_items = self.config.MyAnimeList.genres if method_name == "mal_genre" else self.config.MyAnimeList.studios - final_items = [str(all_items[i]) for i in item_list if i in all_items] - final_text = f"MyAnimeList Search\n{method_name[4:].capitalize()}: {' or '.join([str(all_items[i]) for i in final_items])}" - self.builders.append(("mal_search", ({"genres" if method_name == "mal_genre" else "producers": ",".join(final_items)}, final_text, 0))) - - def _mojo(self, method_name, method_data): - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - final = {} - if method_name == "mojo_record": - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.top_options) - elif method_name == "mojo_world": - if "year" not in dict_methods: - raise Failed(f"{self.Type} Error: {method_name} year attribute not found") - og_year = dict_data[dict_methods["year"]] - if not og_year: - raise Failed(f"{self.Type} Error: {method_name} year attribute is blank") - if og_year == "current": - final["year"] = str(self.current_year) # noqa - elif str(og_year).startswith("current-"): - try: - final["year"] = str(self.current_year - int(og_year.split("-")[1])) # noqa - if final["year"] not in mojo.year_options: - raise Failed(f"{self.Type} Error: {method_name} year attribute final value must be 1977 or greater: {og_year}") - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} year attribute invalid: {og_year}") - else: - final["year"] = util.parse(self.Type, "year", dict_data, methods=dict_methods, parent=method_name, options=mojo.year_options) - elif method_name == "mojo_all_time": - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.chart_options) - final["content_rating_filter"] = util.parse(self.Type, "content_rating_filter", dict_data, methods=dict_methods, parent=method_name, options=mojo.content_rating_options) if "content_rating_filter" in dict_methods else None - elif method_name == "mojo_never": - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="domestic", options=self.config.BoxOfficeMojo.never_options) - final["never"] = str(util.parse(self.Type, "never", dict_data, methods=dict_methods, parent=method_name, default="1", options=mojo.never_in_options)) if "never" in dict_methods else "1" - elif method_name in ["mojo_domestic", "mojo_international"]: - dome = method_name == "mojo_domestic" - final["range"] = util.parse(self.Type, "range", dict_data, methods=dict_methods, parent=method_name, options=mojo.dome_range_options if dome else mojo.intl_range_options) - if not dome: - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="international", options=self.config.BoxOfficeMojo.intl_options) - chart_date = self.current_time - if final["range"] != "daily": - _m = "range_data" if final["range"] == "yearly" and "year" not in dict_methods and "range_data" in dict_methods else "year" - if _m not in dict_methods: - raise Failed(f"{self.Type} Error: {method_name} {_m} attribute not found") - og_year = dict_data[dict_methods[_m]] - if not og_year: - raise Failed(f"{self.Type} Error: {method_name} {_m} attribute is blank") - if str(og_year).startswith("current-"): - try: - chart_date = self.current_time - relativedelta(years=int(og_year.split("-")[1])) - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} {_m} attribute invalid: {og_year}") - else: - _y = util.parse(self.Type, _m, dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.year_options) - if _y != "current": - chart_date = self.current_time - relativedelta(years=self.current_time.year - _y) - if final["range"] != "yearly": - if "range_data" not in dict_methods: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute not found") - og_data = dict_data[dict_methods["range_data"]] - if not og_data: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute is blank") - - if final["range"] == "holiday": - final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, options=mojo.holiday_options) - elif final["range"] == "daily": - if og_data == "current": - final["range_data"] = datetime.strftime(self.current_time, "%Y-%m-%d") # noqa - elif str(og_data).startswith("current-"): - try: - final["range_data"] = datetime.strftime(self.current_time - timedelta(days=int(og_data.split("-")[1])), "%Y-%m-%d") # noqa - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", datatype="date", date_return="%Y-%m-%d") - if final["range_data"] == "current": - final["range_data"] = datetime.strftime(self.current_time, "%Y-%m-%d") # noqa - elif final["range"] in ["weekend", "weekly"]: - if str(og_data).startswith("current-"): - try: - final_date = chart_date - timedelta(weeks=int(og_data.split("-")[1])) - final_iso = final_date.isocalendar() - final["range_data"] = final_iso.week - final["year"] = final_iso.year - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + [str(i) for i in range(1, 54)]) - current_iso = chart_date.isocalendar() - final["range_data"] = current_iso.week if _v == "current" else _v - final["year"] = current_iso.year - elif final["range"] == "monthly": - if str(og_data).startswith("current-"): - try: - final_date = chart_date - relativedelta(months=int(og_data.split("-")[1])) - final["range_data"] = final_date.month - final["year"] = final_date.year - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + util.lower_months) - final["range_data"] = chart_date.month if _v == "current" else util.lower_months[_v] - elif final["range"] == "quarterly": - if str(og_data).startswith("current-"): - try: - final_date = chart_date - relativedelta(months=int(og_data.split("-")[1]) * 3) - final["range_data"] = mojo.quarters[final_date.month] - final["year"] = final_date.year - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.quarter_options) - final["range_data"] = mojo.quarters[chart_date.month] if _v == "current" else _v - elif final["range"] == "season": - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.season_options) - final["range_data"] = mojo.seasons[chart_date.month] if _v == "current" else _v - else: - final["range_data"] = chart_date.year - if "year" not in final: - final["year"] = chart_date.year - if final["year"] < 1977: - raise Failed(f"{self.Type} Error: {method_name} attribute final date value must be on year 1977 or greater: {final['year']}") - - final["limit"] = util.parse(self.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 - self.builders.append((method_name, final)) - - def _plex(self, method_name, method_data): - if method_name in ["plex_all", "plex_pilots"]: - self.builders.append((method_name, self.builder_level)) - elif method_name == "plex_watchlist": - if method_data not in plex.watchlist_sorts: - logger.warning(f"{self.Type} Warning: Watchlist sort: {method_data} invalid defaulting to added.asc") - self.builders.append((method_name, method_data if method_data in plex.watchlist_sorts else "added.asc")) - elif method_name in ["plex_search", "plex_collectionless"]: - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - if method_name == "plex_search": - try: - self.builders.append((method_name, self.build_filter("plex_search", dict_data))) - except FilterFailed as e: - if self.ignore_blank_results: - raise - else: - raise Failed(str(e)) - elif method_name == "plex_collectionless": - prefix_list = util.parse(self.Type, "exclude_prefix", dict_data, datatype="list", methods=dict_methods) if "exclude_prefix" in dict_methods else [] - exact_list = util.parse(self.Type, "exclude", dict_data, datatype="list", methods=dict_methods) if "exclude" in dict_methods else [] - if len(prefix_list) == 0 and len(exact_list) == 0: - raise Failed(f"{self.Type} Error: you must have at least one exclusion") - exact_list.append(self.name) - self.builders.append((method_name, {"exclude_prefix": prefix_list, "exclude": exact_list})) - else: - try: - self.builders.append(("plex_search", self.build_filter("plex_search", {"any": {method_name: method_data}}))) - except FilterFailed as e: - if self.ignore_blank_results: - raise - else: - raise Failed(str(e)) - - def _reciperr(self, method_name, method_data): - if method_name == "reciperr_list": - for reciperr_list in self.config.Reciperr.validate_list(method_data): - self.builders.append((method_name, reciperr_list)) - elif method_name == "stevenlu_popular": - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, "bool"))) - - def _mdblist(self, method_name, method_data): - for mdb_dict in self.config.MDBList.validate_mdblist_lists(self.Type, method_data): - self.builders.append((method_name, mdb_dict)) - - def _tautulli(self, method_name, method_data): - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - final_dict = { - "list_type": "popular" if method_name == "tautulli_popular" else "watched", - "list_days": util.parse(self.Type, "list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), - "list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), - "list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) - } - buff = final_dict["list_size"] * 3 - if self.library.Tautulli.has_section: - buff = 0 - elif "list_buffer" in dict_methods: - buff = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name) - final_dict["list_buffer"] = buff - self.builders.append((method_name, final_dict)) - - def _tmdb(self, method_name, method_data): - if method_name == "tmdb_discover": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)} - for discover_method, discover_data in dict_data.items(): - lower_method = str(discover_method).lower() - discover_attr, modifier = os.path.splitext(lower_method) - if discover_data is None: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute is blank") - elif discover_method.lower() not in tmdb.discover_all: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") - elif self.library.is_movie and discover_attr in tmdb.discover_tv_only: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for show libraries") - elif self.library.is_show and discover_attr in tmdb.discover_movie_only: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for movie libraries") - elif discover_attr == "region": - new_dictionary[discover_attr] = util.parse(self.Type, discover_method, discover_data.upper(), parent=method_name, regex=("^[A-Z]{2}$", "US")) - elif discover_attr == "sort_by": - options = tmdb.discover_movie_sort if self.library.is_movie else tmdb.discover_tv_sort - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=options) - elif discover_attr == "certification_country": - if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data: - new_dictionary[lower_method] = discover_data - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") - elif discover_attr == "certification": - if "certification_country" in dict_data: - new_dictionary[lower_method] = discover_data - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country") - elif discover_attr == "watch_region": - if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data: - new_dictionary[lower_method] = discover_data.upper() - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") - elif discover_attr == "with_watch_monetization_types": - if "watch_region" in dict_data: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=tmdb.discover_monetization_types) - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region") - elif discover_attr in tmdb.discover_booleans: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="bool", parent=method_name) - elif discover_attr == "vote_average": - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="float", parent=method_name) - elif discover_attr == "with_status": - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) - elif discover_attr == "with_type": - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) - elif discover_attr in tmdb.discover_dates: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") - elif discover_attr in tmdb.discover_years: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1) - elif discover_attr in tmdb.discover_ints: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name) - elif discover_attr in tmdb.discover_strings: - new_dictionary[lower_method] = discover_data - elif discover_attr != "limit": - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") - if len(new_dictionary) > 1: - self.builders.append((method_name, new_dictionary)) - else: - raise Failed(f"{self.Type} Error: {method_name} had no valid fields") - elif method_name in tmdb.int_builders: - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) - else: - values = self.config.TMDb.validate_tmdb_ids(method_data, method_name) - if method_name in tmdb.details_builders: - if method_name.startswith(("tmdb_collection", "tmdb_movie", "tmdb_show")): - item = self.config.TMDb.get_movie_show_or_collection(values[0], self.library.is_movie) - if item.overview: - self.summaries[method_name] = item.overview - if item.backdrop_url: - self.backgrounds[method_name] = item.backdrop_url - if item.poster_url: - self.posters[method_name] = item.poster_url - elif method_name.startswith(("tmdb_actor", "tmdb_crew", "tmdb_director", "tmdb_producer", "tmdb_writer")): - item = self.config.TMDb.get_person(values[0]) - if item.biography: - self.summaries[method_name] = item.biography - if item.profile_path: - self.posters[method_name] = item.profile_url - elif method_name.startswith("tmdb_list"): - item = self.config.TMDb.get_list(values[0]) - if item.description: - self.summaries[method_name] = item.description - if item.poster_url: - self.posters[method_name] = item.poster_url - for value in values: - self.builders.append((method_name[:-8] if method_name in tmdb.details_builders else method_name, value)) - - def _trakt(self, method_name, method_data): - if method_name.startswith("trakt_list"): - trakt_lists = self.config.Trakt.validate_list(method_data) - for trakt_list in trakt_lists: - self.builders.append(("trakt_list", trakt_list)) - if method_name.endswith("_details"): - try: - self.summaries[method_name] = self.config.Trakt.list_description(trakt_lists[0]) - except Failed as e: - logger.error(f"Trakt Error: List description not found: {e}") - elif method_name == "trakt_boxoffice": - if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): - self.builders.append((method_name, 10)) - else: - raise Failed(f"{self.Type} Error: {method_name} must be set to true") - elif method_name == "trakt_recommendations": - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100))) - elif method_name == "sync_to_trakt_list": - if method_data not in self.config.Trakt.slugs: - raise Failed(f"{self.Type} Error: {method_data} invalid. Options {', '.join(self.config.Trakt.slugs)}") - self.sync_to_trakt_list = method_data - elif method_name == "sync_missing_to_trakt_list": - self.sync_missing_to_trakt_list = util.parse(self.Type, method_name, method_data, datatype="bool", default=False) - elif method_name in trakt.builders: - if method_name in ["trakt_chart", "trakt_userlist"]: - trakt_dicts = method_data - final_method = method_name - elif method_name in ["trakt_watchlist", "trakt_collection"]: - trakt_dicts = [] - for trakt_user in util.get_list(method_data, split=False): - trakt_dicts.append({"userlist": method_name[6:], "user": trakt_user}) - final_method = "trakt_userlist" - else: - terms = method_name.split("_") - trakt_dicts = { - "chart": terms[1], - "limit": util.parse(self.Type, method_name, method_data, datatype="int", default=10), - "time_period": terms[2] if len(terms) > 2 else None - } - final_method = "trakt_chart" - if method_name != final_method: - logger.warning(f"{self.Type} Warning: {method_name} will run as {final_method}") - for trakt_dict in self.config.Trakt.validate_chart(self.Type, final_method, trakt_dicts, self.library.is_movie): - self.builders.append((final_method, trakt_dict)) - - def _tvdb(self, method_name, method_data): - values = util.get_list(method_data) - if method_name.endswith("_details"): - if method_name.startswith(("tvdb_movie", "tvdb_show")): - item = self.config.TVDb.get_tvdb_obj(values[0], is_movie=method_name.startswith("tvdb_movie")) - if item.summary: - self.summaries[method_name] = item.summary - if item.background_url: - self.backgrounds[method_name] = item.background_url - if item.poster_url: - self.posters[method_name] = item.poster_url - elif method_name.startswith("tvdb_list"): - description, poster = self.config.TVDb.get_list_description(values[0]) - if description: - self.summaries[method_name] = description - if poster: - self.posters[method_name] = poster - for value in values: - self.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value)) - def _filters(self, method_name, method_data): for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} diff --git a/modules/builder/_add_methods.py b/modules/builder/_add_methods.py new file mode 100644 index 000000000..49e039561 --- /dev/null +++ b/modules/builder/_add_methods.py @@ -0,0 +1,992 @@ +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta +from modules import anilist, imdb, mal, mojo, plex, radarr, sonarr, tmdb, trakt,util +from requests.exceptions import ConnectionError + +def summary(self, method_name, method_data): + if method_name == "summary": + self.summaries[method_name] = str(method_data).replace("<>", self.key_name) if self.key_name else method_data + elif method_name == "tmdb_summary": + self.summaries[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, "TMDb ID"), self.library.is_movie).overview + elif method_name == "tmdb_description": + self.summaries[method_name] = self.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).description + elif method_name == "tmdb_biography": + self.summaries[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, "TMDb Person ID")).biography + elif method_name == "tvdb_summary": + self.summaries[method_name] = self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).summary + elif method_name == "tvdb_description": + summary, _ = self.config.TVDb.get_list_description(method_data) + if summary: + self.summaries[method_name] = summary + elif method_name == "trakt_description": + try: + self.summaries[method_name] = self.config.Trakt.list_description(self.config.Trakt.validate_list(method_data)[0]) + except Failed as e: + logger.error(f"Trakt Error: List description not found: {e}") + elif method_name == "letterboxd_description": + self.summaries[method_name] = self.config.Letterboxd.get_list_description(method_data, self.language) + elif method_name == "icheckmovies_description": + self.summaries[method_name] = self.config.ICheckMovies.get_list_description(method_data, self.language) + +def poster(self, method_name, method_data): + if method_name == "url_poster": + try: + if not method_data.startswith("https://theposterdb.com/api/assets/"): + image_response = self.config.get(method_data, headers=util.header()) + if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: + raise ConnectionError + self.posters[method_name] = method_data + except ConnectionError: + logger.warning(f"{self.Type} Warning: No Poster Found at {method_data}") + elif method_name == "tmdb_list_poster": + self.posters[method_name] = self.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).poster_url + elif method_name == "tvdb_list_poster": + _, poster = self.config.TVDb.get_list_description(method_data) + if poster: + self.posters[method_name] = poster + elif method_name == "tmdb_poster": + self.posters[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_url + elif method_name == "tmdb_profile": + self.posters[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_url + elif method_name == "tvdb_poster": + self.posters[method_name] = f"{self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).poster_url}" + elif method_name == "file_poster": + if os.path.exists(os.path.abspath(method_data)): + self.posters[method_name] = os.path.abspath(method_data) + else: + logger.error(f"{self.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") + +def background(self, method_name, method_data): + if method_name == "url_background": + try: + image_response = self.config.get(method_data, headers=util.header()) + if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: + raise ConnectionError + self.backgrounds[method_name] = method_data + except ConnectionError: + logger.warning(f"{self.Type} Warning: No Background Found at {method_data}") + elif method_name == "tmdb_background": + self.backgrounds[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).backdrop_url + elif method_name == "tvdb_background": + self.posters[method_name] = f"{self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).background_url}" + elif method_name == "file_background": + if os.path.exists(os.path.abspath(method_data)): + self.backgrounds[method_name] = os.path.abspath(method_data) + else: + logger.error(f"{self.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") + +def details(self, method_name, method_data, method_final, methods): + if method_name == "url_theme": + self.url_theme = method_data + elif method_name == "file_theme": + if os.path.exists(os.path.abspath(method_data)): + self.file_theme = os.path.abspath(method_data) + else: + logger.error(f"{self.Type} Error: Theme Path Does Not Exist: {os.path.abspath(method_data)}") + elif method_name == "tmdb_region": + self.tmdb_region = util.parse(self.Type, method_name, method_data, options=self.config.TMDb.iso_3166_1) + elif method_name == "collection_mode": + try: + self.details[method_name] = util.check_collection_mode(method_data) + except Failed as e: + logger.error(e) + elif method_name == "collection_filtering": + if method_data and str(method_data).lower() in plex.collection_filtering_options: + self.details[method_name] = str(method_data).lower() + else: + logger.error(f"Config Error: {method_data} collection_filtering invalid\n\tadmin (Always the server admin user)\n\tuser (User currently viewing the content)") + elif method_name == "minimum_items": + self.minimum = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1) + elif method_name == "cache_builders": + self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", minimum=0) + elif method_name == "default_percent": + self.default_percent = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1, maximum=100) + elif method_name == "server_preroll": + self.server_preroll = util.parse(self.Type, method_name, method_data) + elif method_name == "ignore_ids": + self.ignore_ids.extend(util.parse(self.Type, method_name, method_data, datatype="intlist")) + elif method_name == "ignore_imdb_ids": + self.ignore_imdb_ids.extend(util.parse(self.Type, method_name, method_data, datatype="list")) + elif method_name == "label": + if "label" in methods and "label.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use label and label.sync together") + if "label.remove" in methods and "label.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use label.remove and label.sync together") + if method_final == "label" and "label_sync_mode" in methods and self.data[methods["label_sync_mode"]] == "sync": + self.details["label.sync"] = util.get_list(method_data) if method_data else [] + else: + self.details[method_final] = util.get_list(method_data) if method_data else [] + elif method_name == "changes_webhooks": + self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="list") if method_data else None + elif method_name in scheduled_boolean: + if isinstance(method_data, bool): + self.details[method_name] = method_data + elif isinstance(method_data, (int, float)): + self.details[method_name] = method_data > 0 + elif str(method_data).lower() in ["t", "true"]: + self.details[method_name] = True + elif str(method_data).lower() in ["f", "false"]: + self.details[method_name] = False + else: + try: + util.schedule_check(method_name, util.parse(self.Type, method_name, method_data), self.current_time, self.config.run_hour) + self.details[method_name] = True + except NotScheduled: + self.details[method_name] = False + elif method_name in boolean_details: + default = self.details[method_name] if method_name in self.details else None + self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="bool", default=default) + elif method_name in string_details: + self.details[method_name] = str(method_data) + +def item_details(self, method_name, method_data, method_mod, method_final, methods): + if method_name == "item_label": + if "item_label" in methods and "item_label.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use item_label and item_label.sync together") + if "item_label.remove" in methods and "item_label.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use item_label.remove and item_label.sync together") + self.item_details[method_final] = util.get_list(method_data) if method_data else [] + if method_name == "item_genre": + if "item_genre" in methods and "item_genre.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use item_genre and item_genre.sync together") + if "item_genre.remove" in methods and "item_genre.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together") + self.item_details[method_final] = util.get_list(method_data) if method_data else [] + elif method_name == "item_edition": + self.item_details[method_final] = str(method_data) if method_data else "" # noqa + elif method_name == "non_item_remove_label": + if not method_data: + raise Failed(f"{self.Type} Error: non_item_remove_label is blank") + self.item_details[method_final] = util.get_list(method_data) + elif method_name in ["item_radarr_tag", "item_sonarr_tag"]: + if method_name in methods and f"{method_name}.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.sync together") + if f"{method_name}.remove" in methods and f"{method_name}.sync" in methods: + raise Failed(f"{self.Type} Error: Cannot use {method_name}.remove and {method_name}.sync together") + if method_name in methods and f"{method_name}.remove" in methods: + raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.remove together") + self.item_details[method_name] = util.get_list(method_data, lower=True) + self.item_details["apply_tags"] = method_mod[1:] if method_mod else "" + elif method_name == "item_refresh_delay": + self.item_details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", default=0, minimum=0) + elif method_name in item_bool_details: + if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): + self.item_details[method_name] = True + elif method_name in item_false_details: + self.item_details[method_name] = False + elif method_name in plex.item_advance_keys: + key, options = plex.item_advance_keys[method_name] + if method_name in advance_new_agent and self.library.agent not in plex.new_plex_agents: + logger.error(f"Metadata Error: {method_name} attribute only works for with the New Plex Movie Agent and New Plex TV Agent") + elif method_name in advance_show and not self.library.is_show: + logger.error(f"Metadata Error: {method_name} attribute only works for show libraries") + elif str(method_data).lower() not in options: + logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid") + else: + self.item_details[method_name] = str(method_data).lower() # noqa + +def radarr(self, method_name, method_data): + if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: + self.radarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") + elif method_name == "radarr_folder": + self.radarr_details["folder"] = method_data + elif method_name == "radarr_availability": + if str(method_data).lower() in radarr.availability_translation: + self.radarr_details["availability"] = str(method_data).lower() + else: + raise Failed(f"{self.Type} Error: {method_name} attribute must be either announced, cinemas, released or db") + elif method_name == "radarr_quality": + self.radarr_details["quality"] = method_data + elif method_name == "radarr_tag": + self.radarr_details["tag"] = util.get_list(method_data, lower=True) + elif method_name == "radarr_taglist": + self.builders.append((method_name, util.get_list(method_data, lower=True))) + elif method_name == "radarr_all": + self.builders.append((method_name, True)) + +def sonarr(self, method_name, method_data): + if method_name in ["sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_ignore_cache"]: + self.sonarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") + elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]: + self.sonarr_details[method_name[7:]] = method_data + elif method_name == "sonarr_monitor": + if str(method_data).lower() in sonarr.monitor_translation: + self.sonarr_details["monitor"] = str(method_data).lower() + else: + raise Failed(f"{self.Type} Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none") + elif method_name == "sonarr_series": + if str(method_data).lower() in sonarr.series_types: + self.sonarr_details["series"] = str(method_data).lower() + else: + raise Failed(f"{self.Type} Error: {method_name} attribute must be either standard, daily, or anime") + elif method_name == "sonarr_tag": + self.sonarr_details["tag"] = util.get_list(method_data, lower=True) + elif method_name == "sonarr_taglist": + self.builders.append((method_name, util.get_list(method_data, lower=True))) + elif method_name == "sonarr_all": + self.builders.append((method_name, True)) + +def anidb(self, method_name, method_data): + if method_name == "anidb_popular": + self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=30, maximum=30))) + elif method_name in ["anidb_id", "anidb_relation"]: + for anidb_id in self.config.AniDB.validate_anidb_ids(method_data): + self.builders.append((method_name, anidb_id)) + elif method_name == "anidb_tag": + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {} + if "tag" not in dict_methods: + raise Failed(f"{self.Type} Error: anidb_tag tag attribute is required") + elif not dict_data[dict_methods["tag"]]: + raise Failed(f"{self.Type} Error: anidb_tag tag attribute is blank") + else: + new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["tag"]], "AniDB Tag ID") + new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) + self.builders.append((method_name, new_dictionary)) + +def anilist(self, method_name, method_data): + if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]: + for anilist_id in self.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): + self.builders.append((method_name, anilist_id)) + elif method_name in ["anilist_popular", "anilist_trending", "anilist_top_rated"]: + self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) + elif method_name == "anilist_userlist": + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = { + "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), + "list_name": util.parse(self.Type, "list_name", dict_data, methods=dict_methods, parent=method_name), + "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.userlist_sort_options), + } + score_dict = {} + for search_method, search_data in dict_data.items(): + search_attr, modifier = os.path.splitext(str(search_method).lower()) + if search_attr == "score" and modifier in [".gt", ".gte", ".lt", ".lte"]: + score = util.parse(self.Type, search_method, dict_data, methods=dict_methods, datatype="int", default=-1, minimum=0, maximum=10, parent=method_name) + if score > -1: + score_dict[modifier] = score + elif search_attr not in ["username", "list_name", "sort_by"]: + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") + new_dictionary["score"] = score_dict + self.builders.append((method_name, self.config.AniList.validate_userlist(new_dictionary))) + elif method_name == "anilist_search": + if self.current_time.month in [12, 1, 2]: current_season = "winter" + elif self.current_time.month in [3, 4, 5]: current_season = "spring" + elif self.current_time.month in [6, 7, 8]: current_season = "summer" + else: current_season = "fall" + default_year = self.current_year + 1 if self.current_time.month == 12 else self.current_year + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {} + for search_method, search_data in dict_data.items(): + lower_method = str(search_method).lower() + search_attr, modifier = os.path.splitext(lower_method) + if lower_method not in anilist.searches: + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") + elif search_attr == "season": + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, parent=method_name, default=current_season, options=util.seasons) + if new_dictionary[search_attr] == "current": + new_dictionary[search_attr] = current_season + if "year" not in dict_methods: + logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {default_year} by default") + new_dictionary["year"] = default_year + elif search_attr == "year": + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1) + elif search_data is None: + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") + elif search_attr == "adult": + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="bool", parent=method_name) + elif search_attr == "country": + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.country_codes, parent=method_name) + elif search_attr == "source": + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.media_source, parent=method_name) + elif search_attr in ["episodes", "duration", "score", "popularity"]: + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name) + elif search_attr in ["format", "status", "genre", "tag", "tag_category"]: + new_dictionary[lower_method] = self.config.AniList.validate(search_attr.replace("_", " ").title(), util.parse(self.Type, search_method, search_data)) + elif search_attr in ["start", "end"]: + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") + elif search_attr == "min_tag_percent": + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100) + elif search_attr == "search": + new_dictionary[search_attr] = str(search_data) + elif lower_method not in ["sort_by", "limit"]: + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") + if len(new_dictionary) == 0: + raise Failed(f"{self.Type} Error: {method_name} must have at least one valid search option") + new_dictionary["sort_by"] = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options) + new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) + self.builders.append((method_name, new_dictionary)) + +def icheckmovies(self, method_name, method_data): + if method_name.startswith("icheckmovies_list"): + icheckmovies_lists = self.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.language) + for icheckmovies_list in icheckmovies_lists: + self.builders.append(("icheckmovies_list", icheckmovies_list)) + if method_name.endswith("_details"): + self.summaries[method_name] = self.config.ICheckMovies.get_list_description(icheckmovies_lists[0], self.language) + +def imdb(self, method_name, method_data): + if method_name == "imdb_id": + for value in util.get_list(method_data): + if str(value).startswith("tt"): + self.builders.append((method_name, value)) + else: + raise Failed(f"{self.Type} Error: imdb_id {value} must begin with tt") + elif method_name == "imdb_list": + try: + for imdb_dict in self.config.IMDb.validate_imdb_lists(self.Type, method_data, self.language): + self.builders.append((method_name, imdb_dict)) + except Failed as e: + logger.error(e) + elif method_name == "imdb_chart": + for value in util.get_list(method_data): + if value in imdb.movie_charts and not self.library.is_movie: + raise Failed(f"{self.Type} Error: chart: {value} does not work with show libraries") + elif value in imdb.show_charts and self.library.is_movie: + raise Failed(f"{self.Type} Error: chart: {value} does not work with movie libraries") + elif value in imdb.movie_charts or value in imdb.show_charts: + self.builders.append((method_name, value)) + else: + raise Failed(f"{self.Type} Error: chart: {value} is invalid options are {[i for i in imdb.charts]}") + elif method_name == "imdb_watchlist": + for imdb_user in self.config.IMDb.validate_imdb_watchlists(self.Type, method_data, self.language): + self.builders.append((method_name, imdb_user)) + elif method_name == "imdb_award": + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + event_id = util.parse(self.Type, "event_id", dict_data, parent=method_name, methods=dict_methods, regex=(r"(ev\d+)", "ev0000003")) + git_event, year_options = self.config.IMDb.get_event_years(event_id) + if not year_options: + raise Failed(f"{self.Type} Error: imdb_award event_id attribute: No event found at {imdb.base_url}/event/{event_id}") + if "event_year" not in dict_methods: + raise Failed(f"{self.Type} Error: imdb_award event_year attribute not found") + og_year = dict_data[dict_methods["event_year"]] + if not og_year: + raise Failed(f"{self.Type} Error: imdb_award event_year attribute is blank") + if og_year in ["all", "latest"]: + event_year = og_year + elif not isinstance(og_year, list) and "-" in str(og_year) and len(str(og_year)) > 7: + try: + min_year, max_year = og_year.split("-") + min_year = int(min_year) + max_year = int(max_year) if max_year != "current" else None + event_year = [] + for option in year_options: + check = int(option.split("-")[0] if "-" in option else option) + if check >= min_year and (max_year is None or check <= max_year): + event_year.append(option) + except ValueError: + raise Failed(f"{self.Type} Error: imdb_award event_year attribute invalid: {og_year}") + else: + event_year = util.parse(self.Type, "event_year", og_year, parent=method_name, datatype="strlist", options=year_options) + if (event_year == "all" or len(event_year) > 1) and not git_event: + raise Failed(f"{self.Type} Error: Only specific events work when using multiple years. Event Options: [{', '.join([k for k in self.config.IMDb.events_validation])}]") + award_filters = [] + if "award_filter" in dict_methods: + if not dict_data[dict_methods["award_filter"]]: + raise Failed(f"{self.Type} Error: imdb_award award_filter attribute is blank") + award_filters = util.parse(self.Type, "award_filter", dict_data[dict_methods["award_filter"]], datatype="lowerlist") + category_filters = [] + if "category_filter" in dict_methods: + if not dict_data[dict_methods["category_filter"]]: + raise Failed(f"{self.Type} Error: imdb_award category_filter attribute is blank") + category_filters = util.parse(self.Type, "category_filter", dict_data[dict_methods["category_filter"]], datatype="lowerlist") + final_category = [] + final_awards = [] + if award_filters or category_filters: + award_names, category_names = self.config.IMDb.get_award_names(event_id, year_options[0] if event_year == "latest" else event_year) + lower_award = {a.lower(): a for a in award_names if a} + for award_filter in award_filters: + if award_filter in lower_award: + final_awards.append(lower_award[award_filter]) + else: + raise Failed(f"{self.Type} Error: imdb_award award_filter attribute invalid: {award_filter} must be in in [{', '.join([v for _, v in lower_award.items()])}]") + lower_category = {c.lower(): c for c in category_names if c} + for category_filter in category_filters: + if category_filter in lower_category: + final_category.append(lower_category[category_filter]) + else: + raise Failed(f"{self.Type} Error: imdb_award category_filter attribute invalid: {category_filter} must be in in [{', '.join([v for _, v in lower_category.items()])}]") + self.builders.append((method_name, { + "event_id": event_id, "event_year": event_year, "award_filter": final_awards if final_awards else None, "category_filter": final_category if final_category else None, + "winning": util.parse(self.Type, "winning", dict_data, parent=method_name, methods=dict_methods, datatype="bool", default=False) + })) + elif method_name == "imdb_search": + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, minimum=0, default=100, parent=method_name)} + for search_method, search_data in dict_data.items(): + lower_method = str(search_method).lower() + search_attr, modifier = os.path.splitext(lower_method) + if search_data is None: + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") + elif lower_method not in imdb.imdb_search_attributes: + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") + elif search_attr == "sort_by": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name, options=imdb.sort_options) + elif search_attr == "title": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name) + elif search_attr == "type": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.title_type_options) + elif search_attr == "topic": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.topic_options) + elif search_attr == "release": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="date", parent=method_name, date_return="%Y-%m-%d") + elif search_attr == "rating": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="float", parent=method_name, minimum=0.1, maximum=10) + elif search_attr in ["votes", "imdb_top", "imdb_bottom", "popularity", "runtime"]: + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name, minimum=0) + elif search_attr == "genre": + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.genre_options) + elif search_attr == "event": + events = [] + for event in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + if event in imdb.event_options: + events.append(event) + else: + res = re.search(r'(ev\d+)', event) + if res: + events.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ev\\d+ e.g. ev0000292 or be one of {', '.join([e for e in imdb.event_options])}") + if events: + new_dictionary[lower_method] = events + elif search_attr == "company": + companies = [] + for company in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + if company in imdb.company_options: + companies.append(company) + else: + res = re.search(r'(co\d+)', company) + if res: + companies.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern co\\d+ e.g. co0098836 or be one of {', '.join([e for e in imdb.company_options])}") + if companies: + new_dictionary[lower_method] = companies + elif search_attr == "content_rating": + final_list = [] + for content in util.get_list(search_data): + if content: + final_dict = {"region": "US", "rating": None} + if not isinstance(content, dict): + final_dict["rating"] = str(content) + else: + if "rating" not in content or not content["rating"]: + raise Failed(f"{method_name} {search_method} attribute: rating attribute is required") + final_dict["rating"] = str(content["rating"]) + if "region" not in content or not content["region"]: + logger.warning(f"{method_name} {search_method} attribute: region attribute not found defaulting to 'US'") + elif len(str(content["region"])) != 2: + logger.warning(f"{method_name} {search_method} attribute: region attribute: {str(content['region'])} must be only 2 characters defaulting to 'US'") + else: + final_dict["region"] = str(content["region"]).upper() + final_list.append(final_dict) + if final_list: + new_dictionary[lower_method] = final_list + elif search_attr == "country": + countries = [] + for country in util.parse(self.Type, search_method, search_data, datatype="upperlist", parent=method_name): + if country: + if len(str(country)) != 2: + raise Failed(f"{method_name} {search_method} attribute: {country} must be only 2 characters i.e. 'US'") + countries.append(str(country)) + if countries: + new_dictionary[lower_method] = countries + elif search_attr in ["keyword", "language", "alternate_version", "crazy_credit", "location", "goof", "plot", "quote", "soundtrack", "trivia"]: + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name) + elif search_attr == "cast": + casts = [] + for cast in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + res = re.search(r'(nm\d+)', cast) + if res: + casts.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern nm\\d+ e.g. nm00988366") + if casts: + new_dictionary[lower_method] = casts + elif search_attr == "series": + series = [] + for show in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + res = re.search(r'(tt\d+)', show) + if res: + series.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern tt\\d+ e.g. tt00988366") + if series: + new_dictionary[lower_method] = series + elif search_attr == "list": + lists = [] + for new_list in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + res = re.search(r'(ls\d+)', new_list) + if res: + lists.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ls\\d+ e.g. ls000024621") + if lists: + new_dictionary[lower_method] = lists + elif search_attr == "adult": + if util.parse(self.Type, search_method, search_data, datatype="bool", parent=method_name): + new_dictionary[lower_method] = True + elif search_attr != "limit": + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") + if len(new_dictionary) > 1: + self.builders.append((method_name, new_dictionary)) + else: + raise Failed(f"{self.Type} Error: {method_name} had no valid fields") + +def letterboxd(self, method_name, method_data): + if method_name.startswith("letterboxd_list"): + letterboxd_lists = self.config.Letterboxd.validate_letterboxd_lists(self.Type, method_data, self.language) + for letterboxd_list in letterboxd_lists: + self.builders.append(("letterboxd_list", letterboxd_list)) + if method_name.endswith("_details"): + self.summaries[method_name] = self.config.Letterboxd.get_list_description(letterboxd_lists[0]["url"], self.language) + +def mal(self, method_name, method_data): + if method_name == "mal_id": + for mal_id in util.get_int_list(method_data, "MyAnimeList ID"): + self.builders.append((method_name, mal_id)) + elif method_name in ["mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special", "mal_popular", "mal_favorite", "mal_suggested"]: + self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100 if method_name == "mal_suggested" else 500))) + elif method_name in ["mal_season", "mal_userlist", "mal_search"]: + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + if method_name == "mal_season": + if self.current_time.month in [1, 2, 3]: default_season = "winter" + elif self.current_time.month in [4, 5, 6]: default_season = "spring" + elif self.current_time.month in [7, 8, 9]: default_season = "summer" + else: default_season = "fall" + season = util.parse(self.Type, "season", dict_data, methods=dict_methods, parent=method_name, default=default_season, options=util.seasons) + if season == "current": + season = default_season + self.builders.append((method_name, { + "season": season, + "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="members", options=mal.season_sort_options, translation=mal.season_sort_translation), + "year": util.parse(self.Type, "year", dict_data, datatype="int", methods=dict_methods, default=self.current_year, parent=method_name, minimum=1917, maximum=self.current_year + 1), + "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=500), + "starting_only": util.parse(self.Type, "starting_only", dict_data, datatype="bool", methods=dict_methods, default=False, parent=method_name) + })) + elif method_name == "mal_userlist": + self.builders.append((method_name, { + "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), + "status": util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, default="all", options=mal.userlist_status), + "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=mal.userlist_sort_options, translation=mal.userlist_sort_translation), + "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=1000) + })) + elif method_name == "mal_search": + final_attributes = {} + final_text = "MyAnimeList Search" + if "sort_by" in dict_methods: + sort = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, options=mal.search_combos) + sort_type, sort_direction = sort.split(".") + final_text += f"\nSorted By: {sort}" + final_attributes["order_by"] = sort_type + final_attributes["sort"] = sort_direction + limit = 0 + if "limit" in dict_methods: + limit = util.parse(self.Type, "limit", dict_data, datatype="int", default=0, methods=dict_methods, parent=method_name) + final_text += f"\nLimit: {limit if limit else 'None'}" + if "query" in dict_methods: + final_attributes["q"] = util.parse(self.Type, "query", dict_data, methods=dict_methods, parent=method_name) + final_text += f"\nQuery: {final_attributes['q']}" + if "prefix" in dict_methods: + final_attributes["letter"] = util.parse(self.Type, "prefix", dict_data, methods=dict_methods, parent=method_name) + final_text += f"\nPrefix: {final_attributes['letter']}" + if "type" in dict_methods: + type_list = util.parse(self.Type, "type", dict_data, datatype="commalist", methods=dict_methods, parent=method_name, options=mal.search_types) + final_attributes["type"] = ",".join(type_list) + final_text += f"\nType: {' or '.join(type_list)}" + if "status" in dict_methods: + final_attributes["status"] = util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, options=mal.search_status) + final_text += f"\nStatus: {final_attributes['status']}" + if "genre" in dict_methods: + genre_str = str(util.parse(self.Type, "genre", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.config.MyAnimeList.genres) + final_text += f"\nGenre: {out_text}" + final_attributes["genres"] = out_ints + if "genre.not" in dict_methods: + genre_str = str(util.parse(self.Type, "genre.not", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.config.MyAnimeList.genres) + final_text += f"\nNot Genre: {out_text}" + final_attributes["genres_exclude"] = out_ints + if "studio" in dict_methods: + studio_str = str(util.parse(self.Type, "studio", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.Type, 'Studio', studio_str, self.config.MyAnimeList.studios) + final_text += f"\nStudio: {out_text}" + final_attributes["producers"] = out_ints + if "content_rating" in dict_methods: + final_attributes["rating"] = util.parse(self.Type, "content_rating", dict_data, methods=dict_methods, parent=method_name, options=mal.search_ratings) + final_text += f"\nContent Rating: {final_attributes['rating']}" + if "score.gte" in dict_methods: + final_attributes["min_score"] = util.parse(self.Type, "score.gte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_text += f"\nScore Greater Than or Equal: {final_attributes['min_score']}" + elif "score.gt" in dict_methods: + original_score = util.parse(self.Type, "score.gt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_attributes["min_score"] = original_score + 0.01 + final_text += f"\nScore Greater Than: {original_score}" + if "score.lte" in dict_methods: + final_attributes["max_score"] = util.parse(self.Type, "score.lte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_text += f"\nScore Less Than or Equal: {final_attributes['max_score']}" + elif "score.lt" in dict_methods: + original_score = util.parse(self.Type, "score.lt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_attributes["max_score"] = original_score - 0.01 + final_text += f"\nScore Less Than: {original_score}" + if "min_score" in final_attributes and "max_score" in final_attributes and final_attributes["max_score"] <= final_attributes["min_score"]: + raise Failed(f"{self.Type} Error: mal_search score.lte/score.lt attribute must be greater than score.gte/score.gt") + if "sfw" in dict_methods: + sfw = util.parse(self.Type, "sfw", dict_data, datatype="bool", methods=dict_methods, parent=method_name) + if sfw: + final_attributes["sfw"] = 1 + final_text += f"\nSafe for Work: {final_attributes['sfw']}" + if not final_attributes: + raise Failed(f"{self.Type} Error: no mal_search attributes found") + self.builders.append((method_name, (final_attributes, final_text, limit))) + elif method_name in ["mal_genre", "mal_studio"]: + logger.warning(f"Config Warning: {method_name} will run as a mal_search") + item_list = util.parse(self.Type, method_name[4:], method_data, datatype="commalist") + all_items = self.config.MyAnimeList.genres if method_name == "mal_genre" else self.config.MyAnimeList.studios + final_items = [str(all_items[i]) for i in item_list if i in all_items] + final_text = f"MyAnimeList Search\n{method_name[4:].capitalize()}: {' or '.join([str(all_items[i]) for i in final_items])}" + self.builders.append(("mal_search", ({"genres" if method_name == "mal_genre" else "producers": ",".join(final_items)}, final_text, 0))) + +def mojo(self, method_name, method_data): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + final = {} + if method_name == "mojo_record": + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.top_options) + elif method_name == "mojo_world": + if "year" not in dict_methods: + raise Failed(f"{self.Type} Error: {method_name} year attribute not found") + og_year = dict_data[dict_methods["year"]] + if not og_year: + raise Failed(f"{self.Type} Error: {method_name} year attribute is blank") + if og_year == "current": + final["year"] = str(self.current_year) # noqa + elif str(og_year).startswith("current-"): + try: + final["year"] = str(self.current_year - int(og_year.split("-")[1])) # noqa + if final["year"] not in mojo.year_options: + raise Failed(f"{self.Type} Error: {method_name} year attribute final value must be 1977 or greater: {og_year}") + except ValueError: + raise Failed(f"{self.Type} Error: {method_name} year attribute invalid: {og_year}") + else: + final["year"] = util.parse(self.Type, "year", dict_data, methods=dict_methods, parent=method_name, options=mojo.year_options) + elif method_name == "mojo_all_time": + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.chart_options) + final["content_rating_filter"] = util.parse(self.Type, "content_rating_filter", dict_data, methods=dict_methods, parent=method_name, options=mojo.content_rating_options) if "content_rating_filter" in dict_methods else None + elif method_name == "mojo_never": + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="domestic", options=self.config.BoxOfficeMojo.never_options) + final["never"] = str(util.parse(self.Type, "never", dict_data, methods=dict_methods, parent=method_name, default="1", options=mojo.never_in_options)) if "never" in dict_methods else "1" + elif method_name in ["mojo_domestic", "mojo_international"]: + dome = method_name == "mojo_domestic" + final["range"] = util.parse(self.Type, "range", dict_data, methods=dict_methods, parent=method_name, options=mojo.dome_range_options if dome else mojo.intl_range_options) + if not dome: + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="international", options=self.config.BoxOfficeMojo.intl_options) + chart_date = self.current_time + if final["range"] != "daily": + _m = "range_data" if final["range"] == "yearly" and "year" not in dict_methods and "range_data" in dict_methods else "year" + if _m not in dict_methods: + raise Failed(f"{self.Type} Error: {method_name} {_m} attribute not found") + og_year = dict_data[dict_methods[_m]] + if not og_year: + raise Failed(f"{self.Type} Error: {method_name} {_m} attribute is blank") + if str(og_year).startswith("current-"): + try: + chart_date = self.current_time - relativedelta(years=int(og_year.split("-")[1])) + except ValueError: + raise Failed(f"{self.Type} Error: {method_name} {_m} attribute invalid: {og_year}") + else: + _y = util.parse(self.Type, _m, dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.year_options) + if _y != "current": + chart_date = self.current_time - relativedelta(years=self.current_time.year - _y) + if final["range"] != "yearly": + if "range_data" not in dict_methods: + raise Failed(f"{self.Type} Error: {method_name} range_data attribute not found") + og_data = dict_data[dict_methods["range_data"]] + if not og_data: + raise Failed(f"{self.Type} Error: {method_name} range_data attribute is blank") + + if final["range"] == "holiday": + final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, options=mojo.holiday_options) + elif final["range"] == "daily": + if og_data == "current": + final["range_data"] = datetime.strftime(self.current_time, "%Y-%m-%d") # noqa + elif str(og_data).startswith("current-"): + try: + final["range_data"] = datetime.strftime(self.current_time - timedelta(days=int(og_data.split("-")[1])), "%Y-%m-%d") # noqa + except ValueError: + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", datatype="date", date_return="%Y-%m-%d") + if final["range_data"] == "current": + final["range_data"] = datetime.strftime(self.current_time, "%Y-%m-%d") # noqa + elif final["range"] in ["weekend", "weekly"]: + if str(og_data).startswith("current-"): + try: + final_date = chart_date - timedelta(weeks=int(og_data.split("-")[1])) + final_iso = final_date.isocalendar() + final["range_data"] = final_iso.week + final["year"] = final_iso.year + except ValueError: + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + [str(i) for i in range(1, 54)]) + current_iso = chart_date.isocalendar() + final["range_data"] = current_iso.week if _v == "current" else _v + final["year"] = current_iso.year + elif final["range"] == "monthly": + if str(og_data).startswith("current-"): + try: + final_date = chart_date - relativedelta(months=int(og_data.split("-")[1])) + final["range_data"] = final_date.month + final["year"] = final_date.year + except ValueError: + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + util.lower_months) + final["range_data"] = chart_date.month if _v == "current" else util.lower_months[_v] + elif final["range"] == "quarterly": + if str(og_data).startswith("current-"): + try: + final_date = chart_date - relativedelta(months=int(og_data.split("-")[1]) * 3) + final["range_data"] = mojo.quarters[final_date.month] + final["year"] = final_date.year + except ValueError: + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.quarter_options) + final["range_data"] = mojo.quarters[chart_date.month] if _v == "current" else _v + elif final["range"] == "season": + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.season_options) + final["range_data"] = mojo.seasons[chart_date.month] if _v == "current" else _v + else: + final["range_data"] = chart_date.year + if "year" not in final: + final["year"] = chart_date.year + if final["year"] < 1977: + raise Failed(f"{self.Type} Error: {method_name} attribute final date value must be on year 1977 or greater: {final['year']}") + + final["limit"] = util.parse(self.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 + self.builders.append((method_name, final)) + +def plex(self, method_name, method_data): + if method_name in ["plex_all", "plex_pilots"]: + self.builders.append((method_name, self.builder_level)) + elif method_name == "plex_watchlist": + if method_data not in plex.watchlist_sorts: + logger.warning(f"{self.Type} Warning: Watchlist sort: {method_data} invalid defaulting to added.asc") + self.builders.append((method_name, method_data if method_data in plex.watchlist_sorts else "added.asc")) + elif method_name in ["plex_search", "plex_collectionless"]: + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + if method_name == "plex_search": + try: + self.builders.append((method_name, self.build_filter("plex_search", dict_data))) + except FilterFailed as e: + if self.ignore_blank_results: + raise + else: + raise Failed(str(e)) + elif method_name == "plex_collectionless": + prefix_list = util.parse(self.Type, "exclude_prefix", dict_data, datatype="list", methods=dict_methods) if "exclude_prefix" in dict_methods else [] + exact_list = util.parse(self.Type, "exclude", dict_data, datatype="list", methods=dict_methods) if "exclude" in dict_methods else [] + if len(prefix_list) == 0 and len(exact_list) == 0: + raise Failed(f"{self.Type} Error: you must have at least one exclusion") + exact_list.append(self.name) + self.builders.append((method_name, {"exclude_prefix": prefix_list, "exclude": exact_list})) + else: + try: + self.builders.append(("plex_search", self.build_filter("plex_search", {"any": {method_name: method_data}}))) + except FilterFailed as e: + if self.ignore_blank_results: + raise + else: + raise Failed(str(e)) + +def reciperr(self, method_name, method_data): + if method_name == "reciperr_list": + for reciperr_list in self.config.Reciperr.validate_list(method_data): + self.builders.append((method_name, reciperr_list)) + elif method_name == "stevenlu_popular": + self.builders.append((method_name, util.parse(self.Type, method_name, method_data, "bool"))) + +def mdblist(self, method_name, method_data): + for mdb_dict in self.config.MDBList.validate_mdblist_lists(self.Type, method_data): + self.builders.append((method_name, mdb_dict)) + +def tautulli(self, method_name, method_data): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + final_dict = { + "list_type": "popular" if method_name == "tautulli_popular" else "watched", + "list_days": util.parse(self.Type, "list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), + "list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), + "list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) + } + buff = final_dict["list_size"] * 3 + if self.library.Tautulli.has_section: + buff = 0 + elif "list_buffer" in dict_methods: + buff = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name) + final_dict["list_buffer"] = buff + self.builders.append((method_name, final_dict)) + +def tmdb(self, method_name, method_data): + if method_name == "tmdb_discover": + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)} + for discover_method, discover_data in dict_data.items(): + lower_method = str(discover_method).lower() + discover_attr, modifier = os.path.splitext(lower_method) + if discover_data is None: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute is blank") + elif discover_method.lower() not in tmdb.discover_all: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") + elif self.library.is_movie and discover_attr in tmdb.discover_tv_only: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for show libraries") + elif self.library.is_show and discover_attr in tmdb.discover_movie_only: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for movie libraries") + elif discover_attr == "region": + new_dictionary[discover_attr] = util.parse(self.Type, discover_method, discover_data.upper(), parent=method_name, regex=("^[A-Z]{2}$", "US")) + elif discover_attr == "sort_by": + options = tmdb.discover_movie_sort if self.library.is_movie else tmdb.discover_tv_sort + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=options) + elif discover_attr == "certification_country": + if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data: + new_dictionary[lower_method] = discover_data + else: + raise Failed(f"{self.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") + elif discover_attr == "certification": + if "certification_country" in dict_data: + new_dictionary[lower_method] = discover_data + else: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country") + elif discover_attr == "watch_region": + if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data: + new_dictionary[lower_method] = discover_data.upper() + else: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") + elif discover_attr == "with_watch_monetization_types": + if "watch_region" in dict_data: + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=tmdb.discover_monetization_types) + else: + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region") + elif discover_attr in tmdb.discover_booleans: + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="bool", parent=method_name) + elif discover_attr == "vote_average": + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="float", parent=method_name) + elif discover_attr == "with_status": + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) + elif discover_attr == "with_type": + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) + elif discover_attr in tmdb.discover_dates: + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") + elif discover_attr in tmdb.discover_years: + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1) + elif discover_attr in tmdb.discover_ints: + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name) + elif discover_attr in tmdb.discover_strings: + new_dictionary[lower_method] = discover_data + elif discover_attr != "limit": + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") + if len(new_dictionary) > 1: + self.builders.append((method_name, new_dictionary)) + else: + raise Failed(f"{self.Type} Error: {method_name} had no valid fields") + elif method_name in tmdb.int_builders: + self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) + else: + values = self.config.TMDb.validate_tmdb_ids(method_data, method_name) + if method_name in tmdb.details_builders: + if method_name.startswith(("tmdb_collection", "tmdb_movie", "tmdb_show")): + item = self.config.TMDb.get_movie_show_or_collection(values[0], self.library.is_movie) + if item.overview: + self.summaries[method_name] = item.overview + if item.backdrop_url: + self.backgrounds[method_name] = item.backdrop_url + if item.poster_url: + self.posters[method_name] = item.poster_url + elif method_name.startswith(("tmdb_actor", "tmdb_crew", "tmdb_director", "tmdb_producer", "tmdb_writer")): + item = self.config.TMDb.get_person(values[0]) + if item.biography: + self.summaries[method_name] = item.biography + if item.profile_path: + self.posters[method_name] = item.profile_url + elif method_name.startswith("tmdb_list"): + item = self.config.TMDb.get_list(values[0]) + if item.description: + self.summaries[method_name] = item.description + if item.poster_url: + self.posters[method_name] = item.poster_url + for value in values: + self.builders.append((method_name[:-8] if method_name in tmdb.details_builders else method_name, value)) + +def trakt(self, method_name, method_data): + if method_name.startswith("trakt_list"): + trakt_lists = self.config.Trakt.validate_list(method_data) + for trakt_list in trakt_lists: + self.builders.append(("trakt_list", trakt_list)) + if method_name.endswith("_details"): + try: + self.summaries[method_name] = self.config.Trakt.list_description(trakt_lists[0]) + except Failed as e: + logger.error(f"Trakt Error: List description not found: {e}") + elif method_name == "trakt_boxoffice": + if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): + self.builders.append((method_name, 10)) + else: + raise Failed(f"{self.Type} Error: {method_name} must be set to true") + elif method_name == "trakt_recommendations": + self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100))) + elif method_name == "sync_to_trakt_list": + if method_data not in self.config.Trakt.slugs: + raise Failed(f"{self.Type} Error: {method_data} invalid. Options {', '.join(self.config.Trakt.slugs)}") + self.sync_to_trakt_list = method_data + elif method_name == "sync_missing_to_trakt_list": + self.sync_missing_to_trakt_list = util.parse(self.Type, method_name, method_data, datatype="bool", default=False) + elif method_name in trakt.builders: + if method_name in ["trakt_chart", "trakt_userlist"]: + trakt_dicts = method_data + final_method = method_name + elif method_name in ["trakt_watchlist", "trakt_collection"]: + trakt_dicts = [] + for trakt_user in util.get_list(method_data, split=False): + trakt_dicts.append({"userlist": method_name[6:], "user": trakt_user}) + final_method = "trakt_userlist" + else: + terms = method_name.split("_") + trakt_dicts = { + "chart": terms[1], + "limit": util.parse(self.Type, method_name, method_data, datatype="int", default=10), + "time_period": terms[2] if len(terms) > 2 else None + } + final_method = "trakt_chart" + if method_name != final_method: + logger.warning(f"{self.Type} Warning: {method_name} will run as {final_method}") + for trakt_dict in self.config.Trakt.validate_chart(self.Type, final_method, trakt_dicts, self.library.is_movie): + self.builders.append((final_method, trakt_dict)) + +def tvdb(self, method_name, method_data): + values = util.get_list(method_data) + if method_name.endswith("_details"): + if method_name.startswith(("tvdb_movie", "tvdb_show")): + item = self.config.TVDb.get_tvdb_obj(values[0], is_movie=method_name.startswith("tvdb_movie")) + if item.summary: + self.summaries[method_name] = item.summary + if item.background_url: + self.backgrounds[method_name] = item.background_url + if item.poster_url: + self.posters[method_name] = item.poster_url + elif method_name.startswith("tvdb_list"): + description, poster = self.config.TVDb.get_list_description(values[0]) + if description: + self.summaries[method_name] = description + if poster: + self.posters[method_name] = poster + for value in values: + self.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value)) From 57b84f95d0a5484ec13b56bdfd26f8f4915c1451 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 07:36:11 -0400 Subject: [PATCH 05/24] fix: imports in _add_methods --- modules/builder/_add_methods.py | 45 ++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/modules/builder/_add_methods.py b/modules/builder/_add_methods.py index 49e039561..6c4347569 100644 --- a/modules/builder/_add_methods.py +++ b/modules/builder/_add_methods.py @@ -1,9 +1,12 @@ +import os, re from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta from modules import anilist, imdb, mal, mojo, plex, radarr, sonarr, tmdb, trakt,util +from modules.util import Failed, FilterFailed, NotScheduled from requests.exceptions import ConnectionError +from _config import * -def summary(self, method_name, method_data): +def summary(self, logger, method_name, method_data): if method_name == "summary": self.summaries[method_name] = str(method_data).replace("<>", self.key_name) if self.key_name else method_data elif method_name == "tmdb_summary": @@ -28,7 +31,7 @@ def summary(self, method_name, method_data): elif method_name == "icheckmovies_description": self.summaries[method_name] = self.config.ICheckMovies.get_list_description(method_data, self.language) -def poster(self, method_name, method_data): +def poster(self, logger, method_name, method_data): if method_name == "url_poster": try: if not method_data.startswith("https://theposterdb.com/api/assets/"): @@ -56,7 +59,7 @@ def poster(self, method_name, method_data): else: logger.error(f"{self.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") -def background(self, method_name, method_data): +def background(self, logger, method_name, method_data): if method_name == "url_background": try: image_response = self.config.get(method_data, headers=util.header()) @@ -75,7 +78,7 @@ def background(self, method_name, method_data): else: logger.error(f"{self.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") -def details(self, method_name, method_data, method_final, methods): +def details(self, logger, method_name, method_data, method_final, methods): if method_name == "url_theme": self.url_theme = method_data elif method_name == "file_theme": @@ -139,7 +142,7 @@ def details(self, method_name, method_data, method_final, methods): elif method_name in string_details: self.details[method_name] = str(method_data) -def item_details(self, method_name, method_data, method_mod, method_final, methods): +def item_details(self, logger, method_name, method_data, method_mod, method_final, methods): if method_name == "item_label": if "item_label" in methods and "item_label.sync" in methods: raise Failed(f"{self.Type} Error: Cannot use item_label and item_label.sync together") @@ -185,7 +188,7 @@ def item_details(self, method_name, method_data, method_mod, method_final, metho else: self.item_details[method_name] = str(method_data).lower() # noqa -def radarr(self, method_name, method_data): +def radarr(self, logger, method_name, method_data): if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: self.radarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") elif method_name == "radarr_folder": @@ -204,7 +207,7 @@ def radarr(self, method_name, method_data): elif method_name == "radarr_all": self.builders.append((method_name, True)) -def sonarr(self, method_name, method_data): +def sonarr(self, logger, method_name, method_data): if method_name in ["sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_ignore_cache"]: self.sonarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]: @@ -226,7 +229,7 @@ def sonarr(self, method_name, method_data): elif method_name == "sonarr_all": self.builders.append((method_name, True)) -def anidb(self, method_name, method_data): +def anidb(self, logger, method_name, method_data): if method_name == "anidb_popular": self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=30, maximum=30))) elif method_name in ["anidb_id", "anidb_relation"]: @@ -245,7 +248,7 @@ def anidb(self, method_name, method_data): new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) self.builders.append((method_name, new_dictionary)) -def anilist(self, method_name, method_data): +def anilist(self, logger, method_name, method_data): if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]: for anilist_id in self.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): self.builders.append((method_name, anilist_id)) @@ -319,7 +322,7 @@ def anilist(self, method_name, method_data): new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) self.builders.append((method_name, new_dictionary)) -def icheckmovies(self, method_name, method_data): +def icheckmovies(self, logger, method_name, method_data): if method_name.startswith("icheckmovies_list"): icheckmovies_lists = self.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.language) for icheckmovies_list in icheckmovies_lists: @@ -327,7 +330,7 @@ def icheckmovies(self, method_name, method_data): if method_name.endswith("_details"): self.summaries[method_name] = self.config.ICheckMovies.get_list_description(icheckmovies_lists[0], self.language) -def imdb(self, method_name, method_data): +def imdb(self, logger, method_name, method_data): if method_name == "imdb_id": for value in util.get_list(method_data): if str(value).startswith("tt"): @@ -537,7 +540,7 @@ def imdb(self, method_name, method_data): else: raise Failed(f"{self.Type} Error: {method_name} had no valid fields") -def letterboxd(self, method_name, method_data): +def letterboxd(self, logger, method_name, method_data): if method_name.startswith("letterboxd_list"): letterboxd_lists = self.config.Letterboxd.validate_letterboxd_lists(self.Type, method_data, self.language) for letterboxd_list in letterboxd_lists: @@ -545,7 +548,7 @@ def letterboxd(self, method_name, method_data): if method_name.endswith("_details"): self.summaries[method_name] = self.config.Letterboxd.get_list_description(letterboxd_lists[0]["url"], self.language) -def mal(self, method_name, method_data): +def mal(self, logger, method_name, method_data): if method_name == "mal_id": for mal_id in util.get_int_list(method_data, "MyAnimeList ID"): self.builders.append((method_name, mal_id)) @@ -652,7 +655,7 @@ def mal(self, method_name, method_data): final_text = f"MyAnimeList Search\n{method_name[4:].capitalize()}: {' or '.join([str(all_items[i]) for i in final_items])}" self.builders.append(("mal_search", ({"genres" if method_name == "mal_genre" else "producers": ",".join(final_items)}, final_text, 0))) -def mojo(self, method_name, method_data): +def mojo(self, logger, method_name, method_data): for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} final = {} @@ -773,7 +776,7 @@ def mojo(self, method_name, method_data): final["limit"] = util.parse(self.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 self.builders.append((method_name, final)) -def plex(self, method_name, method_data): +def plex(self, logger, method_name, method_data): if method_name in ["plex_all", "plex_pilots"]: self.builders.append((method_name, self.builder_level)) elif method_name == "plex_watchlist": @@ -807,18 +810,18 @@ def plex(self, method_name, method_data): else: raise Failed(str(e)) -def reciperr(self, method_name, method_data): +def reciperr(self, logger, method_name, method_data): if method_name == "reciperr_list": for reciperr_list in self.config.Reciperr.validate_list(method_data): self.builders.append((method_name, reciperr_list)) elif method_name == "stevenlu_popular": self.builders.append((method_name, util.parse(self.Type, method_name, method_data, "bool"))) -def mdblist(self, method_name, method_data): +def mdblist(self, logger, method_name, method_data): for mdb_dict in self.config.MDBList.validate_mdblist_lists(self.Type, method_data): self.builders.append((method_name, mdb_dict)) -def tautulli(self, method_name, method_data): +def tautulli(self, logger, method_name, method_data): for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} final_dict = { @@ -835,7 +838,7 @@ def tautulli(self, method_name, method_data): final_dict["list_buffer"] = buff self.builders.append((method_name, final_dict)) -def tmdb(self, method_name, method_data): +def tmdb(self, logger, method_name, method_data): if method_name == "tmdb_discover": for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} @@ -926,7 +929,7 @@ def tmdb(self, method_name, method_data): for value in values: self.builders.append((method_name[:-8] if method_name in tmdb.details_builders else method_name, value)) -def trakt(self, method_name, method_data): +def trakt(self, logger, method_name, method_data): if method_name.startswith("trakt_list"): trakt_lists = self.config.Trakt.validate_list(method_data) for trakt_list in trakt_lists: @@ -971,7 +974,7 @@ def trakt(self, method_name, method_data): for trakt_dict in self.config.Trakt.validate_chart(self.Type, final_method, trakt_dicts, self.library.is_movie): self.builders.append((final_method, trakt_dict)) -def tvdb(self, method_name, method_data): +def tvdb(self, logger, method_name, method_data): values = util.get_list(method_data) if method_name.endswith("_details"): if method_name.startswith(("tvdb_movie", "tvdb_show")): From 3e680c268f40ab1ea18a02d263cf5891d55e2f46 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 08:21:56 -0400 Subject: [PATCH 06/24] fix: _builder_attribute_setter --- modules/builder/__init__.py | 90 +- modules/builder/_add_methods.py | 995 ---------------- modules/builder/_builder_attribute_setter.py | 1085 ++++++++++++++++++ 3 files changed, 1090 insertions(+), 1080 deletions(-) delete mode 100644 modules/builder/_add_methods.py create mode 100644 modules/builder/_builder_attribute_setter.py diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index a0d19a818..ab27df352 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -1,7 +1,7 @@ import os, re, time from arrapi import ArrException from datetime import datetime -from modules import anidb, anilist, icheckmovies, imdb, letterboxd, mal, mojo, plex, radarr, reciperr, sonarr, tautulli, tmdb, trakt, tvdb, mdblist, util +from modules import plex, tmdb, util from modules.util import Failed, FilterFailed, NonExisting, NotScheduled, NotScheduledRange, Deleted from modules.overlay import Overlay from modules.poster import KometaImage @@ -9,6 +9,8 @@ from plexapi.exceptions import NotFound from plexapi.video import Movie, Show, Season, Episode from urllib.parse import quote +from ._builder_attribute_setter import BuilderAttributeSetter +from ._config import * logger = util.logger class CollectionBuilder: @@ -30,6 +32,7 @@ def __init__(self, config, metadata, name, data, library=None, overlay=None, ext else: self.type = "collection" self.Type = self.type.capitalize() + self.attributeSetter = BuilderAttributeSetter(self, logger) logger.separator(f"{self.mapping_name} {self.Type}{f' in {self.library.name}' if self.library else ''}") logger.info("") @@ -848,51 +851,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): raise Failed(f"{self.Type} Error: {method_final} attribute only allowed in an overlay file") elif self.overlay and method_name not in overlay_attributes: raise Failed(f"{self.Type} Error: {method_final} attribute not allowed in an overlay file") - elif method_name in summary_details: - self._summary(method_name, method_data) - elif method_name in poster_details: - self._poster(method_name, method_data) - elif method_name in background_details: - self._background(method_name, method_data) - elif method_name in details: - self._details(method_name, method_data, method_final, methods) - elif method_name in item_details: - self._item_details(method_name, method_data, method_mod, method_final, methods) - elif method_name in radarr_details or method_name in radarr.builders: - self._radarr(method_name, method_data) - elif method_name in sonarr_details or method_name in sonarr.builders: - self._sonarr(method_name, method_data) - elif method_name in anidb.builders: - self._anidb(method_name, method_data) - elif method_name in anilist.builders: - self._anilist(method_name, method_data) - elif method_name in icheckmovies.builders: - self._icheckmovies(method_name, method_data) - elif method_name in letterboxd.builders: - self._letterboxd(method_name, method_data) - elif method_name in imdb.builders: - self._imdb(method_name, method_data) - elif method_name in mal.builders: - self._mal(method_name, method_data) - elif method_name in mojo.builders: - self._mojo(method_name, method_data) - elif method_name in plex.builders or method_final in plex.searches: - self._plex(method_name, method_data) - elif method_name in reciperr.builders: - self._reciperr(method_name, method_data) - elif method_name in tautulli.builders: - self._tautulli(method_name, method_data) - elif method_name in tmdb.builders: - self._tmdb(method_name, method_data) - elif method_name in trakt.builders or method_name in ["sync_to_trakt_list", "sync_missing_to_trakt_list"]: - self._trakt(method_name, method_data) - elif method_name in tvdb.builders: - self._tvdb(method_name, method_data) - elif method_name in mdblist.builders: - self._mdblist(method_name, method_data) - elif method_name == "filters": - self._filters(method_name, method_data) - else: + elif not self.attributeSetter.setAttributes(method_name, method_data, method_final, methods, method_mod): raise Failed(f"{self.Type} Error: {method_final} attribute not supported") except Failed as e: if self.validate_builders: @@ -983,45 +942,6 @@ def apply_vars(input_str, var_set, var_key, var_limit): logger.info("") logger.info("Validation Successful") - def _filters(self, method_name, method_data): - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - current_filters = [] - validate = True - if "validate" in dict_methods: - if dict_data[dict_methods["validate"]] is None: - raise Failed(f"{self.Type} Error: validate filter attribute is blank") - if not isinstance(dict_data[dict_methods["validate"]], bool): - raise Failed(f"{self.Type} Error: validate filter attribute must be either true or false") - validate = dict_data.pop(dict_methods["validate"]) - for filter_method, filter_data in dict_data.items(): - filter_attr, modifier, filter_final = self.library.split(filter_method) - message = None - if filter_final not in all_filters: - message = f"{self.Type} Error: {filter_final} is not a valid filter attribute" - elif self.builder_level in filters and filter_attr not in filters[self.builder_level]: - message = f"{self.Type} Error: {filter_final} is not a valid {self.builder_level} filter attribute" - elif filter_final is None: - message = f"{self.Type} Error: {filter_final} filter attribute is blank" - else: - try: - final_data = self.validate_attribute(filter_attr, modifier, f"{filter_final} filter", filter_data, validate) - except FilterFailed as e: - raise Failed(e) - if self.builder_level in ["show", "season", "artist", "album"] and filter_attr in sub_filters: - current_filters.append(("episodes" if self.builder_level in ["show", "season"] else "tracks", {filter_final: final_data, "percentage": self.default_percent})) - else: - current_filters.append((filter_final, final_data)) - if message: - if validate: - raise Failed(message) - else: - logger.error(message) - if current_filters: - self.filters.append(current_filters) - self.has_tmdb_filters = any([str(k).split(".")[0] in tmdb_filters for f in self.filters for k, v in f]) - self.has_imdb_filters = any([str(k).split(".")[0] in imdb_filters for f in self.filters for k, v in f]) - def gather_ids(self, method, value): expired = None list_key = None diff --git a/modules/builder/_add_methods.py b/modules/builder/_add_methods.py deleted file mode 100644 index 6c4347569..000000000 --- a/modules/builder/_add_methods.py +++ /dev/null @@ -1,995 +0,0 @@ -import os, re -from datetime import datetime, timedelta -from dateutil.relativedelta import relativedelta -from modules import anilist, imdb, mal, mojo, plex, radarr, sonarr, tmdb, trakt,util -from modules.util import Failed, FilterFailed, NotScheduled -from requests.exceptions import ConnectionError -from _config import * - -def summary(self, logger, method_name, method_data): - if method_name == "summary": - self.summaries[method_name] = str(method_data).replace("<>", self.key_name) if self.key_name else method_data - elif method_name == "tmdb_summary": - self.summaries[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, "TMDb ID"), self.library.is_movie).overview - elif method_name == "tmdb_description": - self.summaries[method_name] = self.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).description - elif method_name == "tmdb_biography": - self.summaries[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, "TMDb Person ID")).biography - elif method_name == "tvdb_summary": - self.summaries[method_name] = self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).summary - elif method_name == "tvdb_description": - summary, _ = self.config.TVDb.get_list_description(method_data) - if summary: - self.summaries[method_name] = summary - elif method_name == "trakt_description": - try: - self.summaries[method_name] = self.config.Trakt.list_description(self.config.Trakt.validate_list(method_data)[0]) - except Failed as e: - logger.error(f"Trakt Error: List description not found: {e}") - elif method_name == "letterboxd_description": - self.summaries[method_name] = self.config.Letterboxd.get_list_description(method_data, self.language) - elif method_name == "icheckmovies_description": - self.summaries[method_name] = self.config.ICheckMovies.get_list_description(method_data, self.language) - -def poster(self, logger, method_name, method_data): - if method_name == "url_poster": - try: - if not method_data.startswith("https://theposterdb.com/api/assets/"): - image_response = self.config.get(method_data, headers=util.header()) - if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: - raise ConnectionError - self.posters[method_name] = method_data - except ConnectionError: - logger.warning(f"{self.Type} Warning: No Poster Found at {method_data}") - elif method_name == "tmdb_list_poster": - self.posters[method_name] = self.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).poster_url - elif method_name == "tvdb_list_poster": - _, poster = self.config.TVDb.get_list_description(method_data) - if poster: - self.posters[method_name] = poster - elif method_name == "tmdb_poster": - self.posters[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_url - elif method_name == "tmdb_profile": - self.posters[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_url - elif method_name == "tvdb_poster": - self.posters[method_name] = f"{self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).poster_url}" - elif method_name == "file_poster": - if os.path.exists(os.path.abspath(method_data)): - self.posters[method_name] = os.path.abspath(method_data) - else: - logger.error(f"{self.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") - -def background(self, logger, method_name, method_data): - if method_name == "url_background": - try: - image_response = self.config.get(method_data, headers=util.header()) - if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: - raise ConnectionError - self.backgrounds[method_name] = method_data - except ConnectionError: - logger.warning(f"{self.Type} Warning: No Background Found at {method_data}") - elif method_name == "tmdb_background": - self.backgrounds[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).backdrop_url - elif method_name == "tvdb_background": - self.posters[method_name] = f"{self.config.TVDb.get_tvdb_obj(method_data, is_movie=self.library.is_movie).background_url}" - elif method_name == "file_background": - if os.path.exists(os.path.abspath(method_data)): - self.backgrounds[method_name] = os.path.abspath(method_data) - else: - logger.error(f"{self.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") - -def details(self, logger, method_name, method_data, method_final, methods): - if method_name == "url_theme": - self.url_theme = method_data - elif method_name == "file_theme": - if os.path.exists(os.path.abspath(method_data)): - self.file_theme = os.path.abspath(method_data) - else: - logger.error(f"{self.Type} Error: Theme Path Does Not Exist: {os.path.abspath(method_data)}") - elif method_name == "tmdb_region": - self.tmdb_region = util.parse(self.Type, method_name, method_data, options=self.config.TMDb.iso_3166_1) - elif method_name == "collection_mode": - try: - self.details[method_name] = util.check_collection_mode(method_data) - except Failed as e: - logger.error(e) - elif method_name == "collection_filtering": - if method_data and str(method_data).lower() in plex.collection_filtering_options: - self.details[method_name] = str(method_data).lower() - else: - logger.error(f"Config Error: {method_data} collection_filtering invalid\n\tadmin (Always the server admin user)\n\tuser (User currently viewing the content)") - elif method_name == "minimum_items": - self.minimum = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1) - elif method_name == "cache_builders": - self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", minimum=0) - elif method_name == "default_percent": - self.default_percent = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1, maximum=100) - elif method_name == "server_preroll": - self.server_preroll = util.parse(self.Type, method_name, method_data) - elif method_name == "ignore_ids": - self.ignore_ids.extend(util.parse(self.Type, method_name, method_data, datatype="intlist")) - elif method_name == "ignore_imdb_ids": - self.ignore_imdb_ids.extend(util.parse(self.Type, method_name, method_data, datatype="list")) - elif method_name == "label": - if "label" in methods and "label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use label and label.sync together") - if "label.remove" in methods and "label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use label.remove and label.sync together") - if method_final == "label" and "label_sync_mode" in methods and self.data[methods["label_sync_mode"]] == "sync": - self.details["label.sync"] = util.get_list(method_data) if method_data else [] - else: - self.details[method_final] = util.get_list(method_data) if method_data else [] - elif method_name == "changes_webhooks": - self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="list") if method_data else None - elif method_name in scheduled_boolean: - if isinstance(method_data, bool): - self.details[method_name] = method_data - elif isinstance(method_data, (int, float)): - self.details[method_name] = method_data > 0 - elif str(method_data).lower() in ["t", "true"]: - self.details[method_name] = True - elif str(method_data).lower() in ["f", "false"]: - self.details[method_name] = False - else: - try: - util.schedule_check(method_name, util.parse(self.Type, method_name, method_data), self.current_time, self.config.run_hour) - self.details[method_name] = True - except NotScheduled: - self.details[method_name] = False - elif method_name in boolean_details: - default = self.details[method_name] if method_name in self.details else None - self.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="bool", default=default) - elif method_name in string_details: - self.details[method_name] = str(method_data) - -def item_details(self, logger, method_name, method_data, method_mod, method_final, methods): - if method_name == "item_label": - if "item_label" in methods and "item_label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_label and item_label.sync together") - if "item_label.remove" in methods and "item_label.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_label.remove and item_label.sync together") - self.item_details[method_final] = util.get_list(method_data) if method_data else [] - if method_name == "item_genre": - if "item_genre" in methods and "item_genre.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_genre and item_genre.sync together") - if "item_genre.remove" in methods and "item_genre.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together") - self.item_details[method_final] = util.get_list(method_data) if method_data else [] - elif method_name == "item_edition": - self.item_details[method_final] = str(method_data) if method_data else "" # noqa - elif method_name == "non_item_remove_label": - if not method_data: - raise Failed(f"{self.Type} Error: non_item_remove_label is blank") - self.item_details[method_final] = util.get_list(method_data) - elif method_name in ["item_radarr_tag", "item_sonarr_tag"]: - if method_name in methods and f"{method_name}.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.sync together") - if f"{method_name}.remove" in methods and f"{method_name}.sync" in methods: - raise Failed(f"{self.Type} Error: Cannot use {method_name}.remove and {method_name}.sync together") - if method_name in methods and f"{method_name}.remove" in methods: - raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.remove together") - self.item_details[method_name] = util.get_list(method_data, lower=True) - self.item_details["apply_tags"] = method_mod[1:] if method_mod else "" - elif method_name == "item_refresh_delay": - self.item_details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", default=0, minimum=0) - elif method_name in item_bool_details: - if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): - self.item_details[method_name] = True - elif method_name in item_false_details: - self.item_details[method_name] = False - elif method_name in plex.item_advance_keys: - key, options = plex.item_advance_keys[method_name] - if method_name in advance_new_agent and self.library.agent not in plex.new_plex_agents: - logger.error(f"Metadata Error: {method_name} attribute only works for with the New Plex Movie Agent and New Plex TV Agent") - elif method_name in advance_show and not self.library.is_show: - logger.error(f"Metadata Error: {method_name} attribute only works for show libraries") - elif str(method_data).lower() not in options: - logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid") - else: - self.item_details[method_name] = str(method_data).lower() # noqa - -def radarr(self, logger, method_name, method_data): - if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: - self.radarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") - elif method_name == "radarr_folder": - self.radarr_details["folder"] = method_data - elif method_name == "radarr_availability": - if str(method_data).lower() in radarr.availability_translation: - self.radarr_details["availability"] = str(method_data).lower() - else: - raise Failed(f"{self.Type} Error: {method_name} attribute must be either announced, cinemas, released or db") - elif method_name == "radarr_quality": - self.radarr_details["quality"] = method_data - elif method_name == "radarr_tag": - self.radarr_details["tag"] = util.get_list(method_data, lower=True) - elif method_name == "radarr_taglist": - self.builders.append((method_name, util.get_list(method_data, lower=True))) - elif method_name == "radarr_all": - self.builders.append((method_name, True)) - -def sonarr(self, logger, method_name, method_data): - if method_name in ["sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_ignore_cache"]: - self.sonarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") - elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]: - self.sonarr_details[method_name[7:]] = method_data - elif method_name == "sonarr_monitor": - if str(method_data).lower() in sonarr.monitor_translation: - self.sonarr_details["monitor"] = str(method_data).lower() - else: - raise Failed(f"{self.Type} Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none") - elif method_name == "sonarr_series": - if str(method_data).lower() in sonarr.series_types: - self.sonarr_details["series"] = str(method_data).lower() - else: - raise Failed(f"{self.Type} Error: {method_name} attribute must be either standard, daily, or anime") - elif method_name == "sonarr_tag": - self.sonarr_details["tag"] = util.get_list(method_data, lower=True) - elif method_name == "sonarr_taglist": - self.builders.append((method_name, util.get_list(method_data, lower=True))) - elif method_name == "sonarr_all": - self.builders.append((method_name, True)) - -def anidb(self, logger, method_name, method_data): - if method_name == "anidb_popular": - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=30, maximum=30))) - elif method_name in ["anidb_id", "anidb_relation"]: - for anidb_id in self.config.AniDB.validate_anidb_ids(method_data): - self.builders.append((method_name, anidb_id)) - elif method_name == "anidb_tag": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {} - if "tag" not in dict_methods: - raise Failed(f"{self.Type} Error: anidb_tag tag attribute is required") - elif not dict_data[dict_methods["tag"]]: - raise Failed(f"{self.Type} Error: anidb_tag tag attribute is blank") - else: - new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["tag"]], "AniDB Tag ID") - new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) - self.builders.append((method_name, new_dictionary)) - -def anilist(self, logger, method_name, method_data): - if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]: - for anilist_id in self.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): - self.builders.append((method_name, anilist_id)) - elif method_name in ["anilist_popular", "anilist_trending", "anilist_top_rated"]: - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) - elif method_name == "anilist_userlist": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = { - "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), - "list_name": util.parse(self.Type, "list_name", dict_data, methods=dict_methods, parent=method_name), - "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.userlist_sort_options), - } - score_dict = {} - for search_method, search_data in dict_data.items(): - search_attr, modifier = os.path.splitext(str(search_method).lower()) - if search_attr == "score" and modifier in [".gt", ".gte", ".lt", ".lte"]: - score = util.parse(self.Type, search_method, dict_data, methods=dict_methods, datatype="int", default=-1, minimum=0, maximum=10, parent=method_name) - if score > -1: - score_dict[modifier] = score - elif search_attr not in ["username", "list_name", "sort_by"]: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - new_dictionary["score"] = score_dict - self.builders.append((method_name, self.config.AniList.validate_userlist(new_dictionary))) - elif method_name == "anilist_search": - if self.current_time.month in [12, 1, 2]: current_season = "winter" - elif self.current_time.month in [3, 4, 5]: current_season = "spring" - elif self.current_time.month in [6, 7, 8]: current_season = "summer" - else: current_season = "fall" - default_year = self.current_year + 1 if self.current_time.month == 12 else self.current_year - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {} - for search_method, search_data in dict_data.items(): - lower_method = str(search_method).lower() - search_attr, modifier = os.path.splitext(lower_method) - if lower_method not in anilist.searches: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - elif search_attr == "season": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, parent=method_name, default=current_season, options=util.seasons) - if new_dictionary[search_attr] == "current": - new_dictionary[search_attr] = current_season - if "year" not in dict_methods: - logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {default_year} by default") - new_dictionary["year"] = default_year - elif search_attr == "year": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1) - elif search_data is None: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") - elif search_attr == "adult": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="bool", parent=method_name) - elif search_attr == "country": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.country_codes, parent=method_name) - elif search_attr == "source": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.media_source, parent=method_name) - elif search_attr in ["episodes", "duration", "score", "popularity"]: - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name) - elif search_attr in ["format", "status", "genre", "tag", "tag_category"]: - new_dictionary[lower_method] = self.config.AniList.validate(search_attr.replace("_", " ").title(), util.parse(self.Type, search_method, search_data)) - elif search_attr in ["start", "end"]: - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") - elif search_attr == "min_tag_percent": - new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100) - elif search_attr == "search": - new_dictionary[search_attr] = str(search_data) - elif lower_method not in ["sort_by", "limit"]: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - if len(new_dictionary) == 0: - raise Failed(f"{self.Type} Error: {method_name} must have at least one valid search option") - new_dictionary["sort_by"] = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options) - new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) - self.builders.append((method_name, new_dictionary)) - -def icheckmovies(self, logger, method_name, method_data): - if method_name.startswith("icheckmovies_list"): - icheckmovies_lists = self.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.language) - for icheckmovies_list in icheckmovies_lists: - self.builders.append(("icheckmovies_list", icheckmovies_list)) - if method_name.endswith("_details"): - self.summaries[method_name] = self.config.ICheckMovies.get_list_description(icheckmovies_lists[0], self.language) - -def imdb(self, logger, method_name, method_data): - if method_name == "imdb_id": - for value in util.get_list(method_data): - if str(value).startswith("tt"): - self.builders.append((method_name, value)) - else: - raise Failed(f"{self.Type} Error: imdb_id {value} must begin with tt") - elif method_name == "imdb_list": - try: - for imdb_dict in self.config.IMDb.validate_imdb_lists(self.Type, method_data, self.language): - self.builders.append((method_name, imdb_dict)) - except Failed as e: - logger.error(e) - elif method_name == "imdb_chart": - for value in util.get_list(method_data): - if value in imdb.movie_charts and not self.library.is_movie: - raise Failed(f"{self.Type} Error: chart: {value} does not work with show libraries") - elif value in imdb.show_charts and self.library.is_movie: - raise Failed(f"{self.Type} Error: chart: {value} does not work with movie libraries") - elif value in imdb.movie_charts or value in imdb.show_charts: - self.builders.append((method_name, value)) - else: - raise Failed(f"{self.Type} Error: chart: {value} is invalid options are {[i for i in imdb.charts]}") - elif method_name == "imdb_watchlist": - for imdb_user in self.config.IMDb.validate_imdb_watchlists(self.Type, method_data, self.language): - self.builders.append((method_name, imdb_user)) - elif method_name == "imdb_award": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - event_id = util.parse(self.Type, "event_id", dict_data, parent=method_name, methods=dict_methods, regex=(r"(ev\d+)", "ev0000003")) - git_event, year_options = self.config.IMDb.get_event_years(event_id) - if not year_options: - raise Failed(f"{self.Type} Error: imdb_award event_id attribute: No event found at {imdb.base_url}/event/{event_id}") - if "event_year" not in dict_methods: - raise Failed(f"{self.Type} Error: imdb_award event_year attribute not found") - og_year = dict_data[dict_methods["event_year"]] - if not og_year: - raise Failed(f"{self.Type} Error: imdb_award event_year attribute is blank") - if og_year in ["all", "latest"]: - event_year = og_year - elif not isinstance(og_year, list) and "-" in str(og_year) and len(str(og_year)) > 7: - try: - min_year, max_year = og_year.split("-") - min_year = int(min_year) - max_year = int(max_year) if max_year != "current" else None - event_year = [] - for option in year_options: - check = int(option.split("-")[0] if "-" in option else option) - if check >= min_year and (max_year is None or check <= max_year): - event_year.append(option) - except ValueError: - raise Failed(f"{self.Type} Error: imdb_award event_year attribute invalid: {og_year}") - else: - event_year = util.parse(self.Type, "event_year", og_year, parent=method_name, datatype="strlist", options=year_options) - if (event_year == "all" or len(event_year) > 1) and not git_event: - raise Failed(f"{self.Type} Error: Only specific events work when using multiple years. Event Options: [{', '.join([k for k in self.config.IMDb.events_validation])}]") - award_filters = [] - if "award_filter" in dict_methods: - if not dict_data[dict_methods["award_filter"]]: - raise Failed(f"{self.Type} Error: imdb_award award_filter attribute is blank") - award_filters = util.parse(self.Type, "award_filter", dict_data[dict_methods["award_filter"]], datatype="lowerlist") - category_filters = [] - if "category_filter" in dict_methods: - if not dict_data[dict_methods["category_filter"]]: - raise Failed(f"{self.Type} Error: imdb_award category_filter attribute is blank") - category_filters = util.parse(self.Type, "category_filter", dict_data[dict_methods["category_filter"]], datatype="lowerlist") - final_category = [] - final_awards = [] - if award_filters or category_filters: - award_names, category_names = self.config.IMDb.get_award_names(event_id, year_options[0] if event_year == "latest" else event_year) - lower_award = {a.lower(): a for a in award_names if a} - for award_filter in award_filters: - if award_filter in lower_award: - final_awards.append(lower_award[award_filter]) - else: - raise Failed(f"{self.Type} Error: imdb_award award_filter attribute invalid: {award_filter} must be in in [{', '.join([v for _, v in lower_award.items()])}]") - lower_category = {c.lower(): c for c in category_names if c} - for category_filter in category_filters: - if category_filter in lower_category: - final_category.append(lower_category[category_filter]) - else: - raise Failed(f"{self.Type} Error: imdb_award category_filter attribute invalid: {category_filter} must be in in [{', '.join([v for _, v in lower_category.items()])}]") - self.builders.append((method_name, { - "event_id": event_id, "event_year": event_year, "award_filter": final_awards if final_awards else None, "category_filter": final_category if final_category else None, - "winning": util.parse(self.Type, "winning", dict_data, parent=method_name, methods=dict_methods, datatype="bool", default=False) - })) - elif method_name == "imdb_search": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, minimum=0, default=100, parent=method_name)} - for search_method, search_data in dict_data.items(): - lower_method = str(search_method).lower() - search_attr, modifier = os.path.splitext(lower_method) - if search_data is None: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") - elif lower_method not in imdb.imdb_search_attributes: - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - elif search_attr == "sort_by": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name, options=imdb.sort_options) - elif search_attr == "title": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name) - elif search_attr == "type": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.title_type_options) - elif search_attr == "topic": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.topic_options) - elif search_attr == "release": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="date", parent=method_name, date_return="%Y-%m-%d") - elif search_attr == "rating": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="float", parent=method_name, minimum=0.1, maximum=10) - elif search_attr in ["votes", "imdb_top", "imdb_bottom", "popularity", "runtime"]: - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name, minimum=0) - elif search_attr == "genre": - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.genre_options) - elif search_attr == "event": - events = [] - for event in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - if event in imdb.event_options: - events.append(event) - else: - res = re.search(r'(ev\d+)', event) - if res: - events.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ev\\d+ e.g. ev0000292 or be one of {', '.join([e for e in imdb.event_options])}") - if events: - new_dictionary[lower_method] = events - elif search_attr == "company": - companies = [] - for company in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - if company in imdb.company_options: - companies.append(company) - else: - res = re.search(r'(co\d+)', company) - if res: - companies.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern co\\d+ e.g. co0098836 or be one of {', '.join([e for e in imdb.company_options])}") - if companies: - new_dictionary[lower_method] = companies - elif search_attr == "content_rating": - final_list = [] - for content in util.get_list(search_data): - if content: - final_dict = {"region": "US", "rating": None} - if not isinstance(content, dict): - final_dict["rating"] = str(content) - else: - if "rating" not in content or not content["rating"]: - raise Failed(f"{method_name} {search_method} attribute: rating attribute is required") - final_dict["rating"] = str(content["rating"]) - if "region" not in content or not content["region"]: - logger.warning(f"{method_name} {search_method} attribute: region attribute not found defaulting to 'US'") - elif len(str(content["region"])) != 2: - logger.warning(f"{method_name} {search_method} attribute: region attribute: {str(content['region'])} must be only 2 characters defaulting to 'US'") - else: - final_dict["region"] = str(content["region"]).upper() - final_list.append(final_dict) - if final_list: - new_dictionary[lower_method] = final_list - elif search_attr == "country": - countries = [] - for country in util.parse(self.Type, search_method, search_data, datatype="upperlist", parent=method_name): - if country: - if len(str(country)) != 2: - raise Failed(f"{method_name} {search_method} attribute: {country} must be only 2 characters i.e. 'US'") - countries.append(str(country)) - if countries: - new_dictionary[lower_method] = countries - elif search_attr in ["keyword", "language", "alternate_version", "crazy_credit", "location", "goof", "plot", "quote", "soundtrack", "trivia"]: - new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name) - elif search_attr == "cast": - casts = [] - for cast in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - res = re.search(r'(nm\d+)', cast) - if res: - casts.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern nm\\d+ e.g. nm00988366") - if casts: - new_dictionary[lower_method] = casts - elif search_attr == "series": - series = [] - for show in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - res = re.search(r'(tt\d+)', show) - if res: - series.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern tt\\d+ e.g. tt00988366") - if series: - new_dictionary[lower_method] = series - elif search_attr == "list": - lists = [] - for new_list in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): - res = re.search(r'(ls\d+)', new_list) - if res: - lists.append(res.group(1)) - else: - raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ls\\d+ e.g. ls000024621") - if lists: - new_dictionary[lower_method] = lists - elif search_attr == "adult": - if util.parse(self.Type, search_method, search_data, datatype="bool", parent=method_name): - new_dictionary[lower_method] = True - elif search_attr != "limit": - raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") - if len(new_dictionary) > 1: - self.builders.append((method_name, new_dictionary)) - else: - raise Failed(f"{self.Type} Error: {method_name} had no valid fields") - -def letterboxd(self, logger, method_name, method_data): - if method_name.startswith("letterboxd_list"): - letterboxd_lists = self.config.Letterboxd.validate_letterboxd_lists(self.Type, method_data, self.language) - for letterboxd_list in letterboxd_lists: - self.builders.append(("letterboxd_list", letterboxd_list)) - if method_name.endswith("_details"): - self.summaries[method_name] = self.config.Letterboxd.get_list_description(letterboxd_lists[0]["url"], self.language) - -def mal(self, logger, method_name, method_data): - if method_name == "mal_id": - for mal_id in util.get_int_list(method_data, "MyAnimeList ID"): - self.builders.append((method_name, mal_id)) - elif method_name in ["mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special", "mal_popular", "mal_favorite", "mal_suggested"]: - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100 if method_name == "mal_suggested" else 500))) - elif method_name in ["mal_season", "mal_userlist", "mal_search"]: - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - if method_name == "mal_season": - if self.current_time.month in [1, 2, 3]: default_season = "winter" - elif self.current_time.month in [4, 5, 6]: default_season = "spring" - elif self.current_time.month in [7, 8, 9]: default_season = "summer" - else: default_season = "fall" - season = util.parse(self.Type, "season", dict_data, methods=dict_methods, parent=method_name, default=default_season, options=util.seasons) - if season == "current": - season = default_season - self.builders.append((method_name, { - "season": season, - "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="members", options=mal.season_sort_options, translation=mal.season_sort_translation), - "year": util.parse(self.Type, "year", dict_data, datatype="int", methods=dict_methods, default=self.current_year, parent=method_name, minimum=1917, maximum=self.current_year + 1), - "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=500), - "starting_only": util.parse(self.Type, "starting_only", dict_data, datatype="bool", methods=dict_methods, default=False, parent=method_name) - })) - elif method_name == "mal_userlist": - self.builders.append((method_name, { - "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), - "status": util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, default="all", options=mal.userlist_status), - "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=mal.userlist_sort_options, translation=mal.userlist_sort_translation), - "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=1000) - })) - elif method_name == "mal_search": - final_attributes = {} - final_text = "MyAnimeList Search" - if "sort_by" in dict_methods: - sort = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, options=mal.search_combos) - sort_type, sort_direction = sort.split(".") - final_text += f"\nSorted By: {sort}" - final_attributes["order_by"] = sort_type - final_attributes["sort"] = sort_direction - limit = 0 - if "limit" in dict_methods: - limit = util.parse(self.Type, "limit", dict_data, datatype="int", default=0, methods=dict_methods, parent=method_name) - final_text += f"\nLimit: {limit if limit else 'None'}" - if "query" in dict_methods: - final_attributes["q"] = util.parse(self.Type, "query", dict_data, methods=dict_methods, parent=method_name) - final_text += f"\nQuery: {final_attributes['q']}" - if "prefix" in dict_methods: - final_attributes["letter"] = util.parse(self.Type, "prefix", dict_data, methods=dict_methods, parent=method_name) - final_text += f"\nPrefix: {final_attributes['letter']}" - if "type" in dict_methods: - type_list = util.parse(self.Type, "type", dict_data, datatype="commalist", methods=dict_methods, parent=method_name, options=mal.search_types) - final_attributes["type"] = ",".join(type_list) - final_text += f"\nType: {' or '.join(type_list)}" - if "status" in dict_methods: - final_attributes["status"] = util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, options=mal.search_status) - final_text += f"\nStatus: {final_attributes['status']}" - if "genre" in dict_methods: - genre_str = str(util.parse(self.Type, "genre", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.config.MyAnimeList.genres) - final_text += f"\nGenre: {out_text}" - final_attributes["genres"] = out_ints - if "genre.not" in dict_methods: - genre_str = str(util.parse(self.Type, "genre.not", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.config.MyAnimeList.genres) - final_text += f"\nNot Genre: {out_text}" - final_attributes["genres_exclude"] = out_ints - if "studio" in dict_methods: - studio_str = str(util.parse(self.Type, "studio", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.Type, 'Studio', studio_str, self.config.MyAnimeList.studios) - final_text += f"\nStudio: {out_text}" - final_attributes["producers"] = out_ints - if "content_rating" in dict_methods: - final_attributes["rating"] = util.parse(self.Type, "content_rating", dict_data, methods=dict_methods, parent=method_name, options=mal.search_ratings) - final_text += f"\nContent Rating: {final_attributes['rating']}" - if "score.gte" in dict_methods: - final_attributes["min_score"] = util.parse(self.Type, "score.gte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_text += f"\nScore Greater Than or Equal: {final_attributes['min_score']}" - elif "score.gt" in dict_methods: - original_score = util.parse(self.Type, "score.gt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_attributes["min_score"] = original_score + 0.01 - final_text += f"\nScore Greater Than: {original_score}" - if "score.lte" in dict_methods: - final_attributes["max_score"] = util.parse(self.Type, "score.lte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_text += f"\nScore Less Than or Equal: {final_attributes['max_score']}" - elif "score.lt" in dict_methods: - original_score = util.parse(self.Type, "score.lt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) - final_attributes["max_score"] = original_score - 0.01 - final_text += f"\nScore Less Than: {original_score}" - if "min_score" in final_attributes and "max_score" in final_attributes and final_attributes["max_score"] <= final_attributes["min_score"]: - raise Failed(f"{self.Type} Error: mal_search score.lte/score.lt attribute must be greater than score.gte/score.gt") - if "sfw" in dict_methods: - sfw = util.parse(self.Type, "sfw", dict_data, datatype="bool", methods=dict_methods, parent=method_name) - if sfw: - final_attributes["sfw"] = 1 - final_text += f"\nSafe for Work: {final_attributes['sfw']}" - if not final_attributes: - raise Failed(f"{self.Type} Error: no mal_search attributes found") - self.builders.append((method_name, (final_attributes, final_text, limit))) - elif method_name in ["mal_genre", "mal_studio"]: - logger.warning(f"Config Warning: {method_name} will run as a mal_search") - item_list = util.parse(self.Type, method_name[4:], method_data, datatype="commalist") - all_items = self.config.MyAnimeList.genres if method_name == "mal_genre" else self.config.MyAnimeList.studios - final_items = [str(all_items[i]) for i in item_list if i in all_items] - final_text = f"MyAnimeList Search\n{method_name[4:].capitalize()}: {' or '.join([str(all_items[i]) for i in final_items])}" - self.builders.append(("mal_search", ({"genres" if method_name == "mal_genre" else "producers": ",".join(final_items)}, final_text, 0))) - -def mojo(self, logger, method_name, method_data): - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - final = {} - if method_name == "mojo_record": - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.top_options) - elif method_name == "mojo_world": - if "year" not in dict_methods: - raise Failed(f"{self.Type} Error: {method_name} year attribute not found") - og_year = dict_data[dict_methods["year"]] - if not og_year: - raise Failed(f"{self.Type} Error: {method_name} year attribute is blank") - if og_year == "current": - final["year"] = str(self.current_year) # noqa - elif str(og_year).startswith("current-"): - try: - final["year"] = str(self.current_year - int(og_year.split("-")[1])) # noqa - if final["year"] not in mojo.year_options: - raise Failed(f"{self.Type} Error: {method_name} year attribute final value must be 1977 or greater: {og_year}") - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} year attribute invalid: {og_year}") - else: - final["year"] = util.parse(self.Type, "year", dict_data, methods=dict_methods, parent=method_name, options=mojo.year_options) - elif method_name == "mojo_all_time": - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.chart_options) - final["content_rating_filter"] = util.parse(self.Type, "content_rating_filter", dict_data, methods=dict_methods, parent=method_name, options=mojo.content_rating_options) if "content_rating_filter" in dict_methods else None - elif method_name == "mojo_never": - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="domestic", options=self.config.BoxOfficeMojo.never_options) - final["never"] = str(util.parse(self.Type, "never", dict_data, methods=dict_methods, parent=method_name, default="1", options=mojo.never_in_options)) if "never" in dict_methods else "1" - elif method_name in ["mojo_domestic", "mojo_international"]: - dome = method_name == "mojo_domestic" - final["range"] = util.parse(self.Type, "range", dict_data, methods=dict_methods, parent=method_name, options=mojo.dome_range_options if dome else mojo.intl_range_options) - if not dome: - final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="international", options=self.config.BoxOfficeMojo.intl_options) - chart_date = self.current_time - if final["range"] != "daily": - _m = "range_data" if final["range"] == "yearly" and "year" not in dict_methods and "range_data" in dict_methods else "year" - if _m not in dict_methods: - raise Failed(f"{self.Type} Error: {method_name} {_m} attribute not found") - og_year = dict_data[dict_methods[_m]] - if not og_year: - raise Failed(f"{self.Type} Error: {method_name} {_m} attribute is blank") - if str(og_year).startswith("current-"): - try: - chart_date = self.current_time - relativedelta(years=int(og_year.split("-")[1])) - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} {_m} attribute invalid: {og_year}") - else: - _y = util.parse(self.Type, _m, dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.year_options) - if _y != "current": - chart_date = self.current_time - relativedelta(years=self.current_time.year - _y) - if final["range"] != "yearly": - if "range_data" not in dict_methods: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute not found") - og_data = dict_data[dict_methods["range_data"]] - if not og_data: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute is blank") - - if final["range"] == "holiday": - final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, options=mojo.holiday_options) - elif final["range"] == "daily": - if og_data == "current": - final["range_data"] = datetime.strftime(self.current_time, "%Y-%m-%d") # noqa - elif str(og_data).startswith("current-"): - try: - final["range_data"] = datetime.strftime(self.current_time - timedelta(days=int(og_data.split("-")[1])), "%Y-%m-%d") # noqa - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", datatype="date", date_return="%Y-%m-%d") - if final["range_data"] == "current": - final["range_data"] = datetime.strftime(self.current_time, "%Y-%m-%d") # noqa - elif final["range"] in ["weekend", "weekly"]: - if str(og_data).startswith("current-"): - try: - final_date = chart_date - timedelta(weeks=int(og_data.split("-")[1])) - final_iso = final_date.isocalendar() - final["range_data"] = final_iso.week - final["year"] = final_iso.year - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + [str(i) for i in range(1, 54)]) - current_iso = chart_date.isocalendar() - final["range_data"] = current_iso.week if _v == "current" else _v - final["year"] = current_iso.year - elif final["range"] == "monthly": - if str(og_data).startswith("current-"): - try: - final_date = chart_date - relativedelta(months=int(og_data.split("-")[1])) - final["range_data"] = final_date.month - final["year"] = final_date.year - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + util.lower_months) - final["range_data"] = chart_date.month if _v == "current" else util.lower_months[_v] - elif final["range"] == "quarterly": - if str(og_data).startswith("current-"): - try: - final_date = chart_date - relativedelta(months=int(og_data.split("-")[1]) * 3) - final["range_data"] = mojo.quarters[final_date.month] - final["year"] = final_date.year - except ValueError: - raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") - else: - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.quarter_options) - final["range_data"] = mojo.quarters[chart_date.month] if _v == "current" else _v - elif final["range"] == "season": - _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.season_options) - final["range_data"] = mojo.seasons[chart_date.month] if _v == "current" else _v - else: - final["range_data"] = chart_date.year - if "year" not in final: - final["year"] = chart_date.year - if final["year"] < 1977: - raise Failed(f"{self.Type} Error: {method_name} attribute final date value must be on year 1977 or greater: {final['year']}") - - final["limit"] = util.parse(self.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 - self.builders.append((method_name, final)) - -def plex(self, logger, method_name, method_data): - if method_name in ["plex_all", "plex_pilots"]: - self.builders.append((method_name, self.builder_level)) - elif method_name == "plex_watchlist": - if method_data not in plex.watchlist_sorts: - logger.warning(f"{self.Type} Warning: Watchlist sort: {method_data} invalid defaulting to added.asc") - self.builders.append((method_name, method_data if method_data in plex.watchlist_sorts else "added.asc")) - elif method_name in ["plex_search", "plex_collectionless"]: - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - if method_name == "plex_search": - try: - self.builders.append((method_name, self.build_filter("plex_search", dict_data))) - except FilterFailed as e: - if self.ignore_blank_results: - raise - else: - raise Failed(str(e)) - elif method_name == "plex_collectionless": - prefix_list = util.parse(self.Type, "exclude_prefix", dict_data, datatype="list", methods=dict_methods) if "exclude_prefix" in dict_methods else [] - exact_list = util.parse(self.Type, "exclude", dict_data, datatype="list", methods=dict_methods) if "exclude" in dict_methods else [] - if len(prefix_list) == 0 and len(exact_list) == 0: - raise Failed(f"{self.Type} Error: you must have at least one exclusion") - exact_list.append(self.name) - self.builders.append((method_name, {"exclude_prefix": prefix_list, "exclude": exact_list})) - else: - try: - self.builders.append(("plex_search", self.build_filter("plex_search", {"any": {method_name: method_data}}))) - except FilterFailed as e: - if self.ignore_blank_results: - raise - else: - raise Failed(str(e)) - -def reciperr(self, logger, method_name, method_data): - if method_name == "reciperr_list": - for reciperr_list in self.config.Reciperr.validate_list(method_data): - self.builders.append((method_name, reciperr_list)) - elif method_name == "stevenlu_popular": - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, "bool"))) - -def mdblist(self, logger, method_name, method_data): - for mdb_dict in self.config.MDBList.validate_mdblist_lists(self.Type, method_data): - self.builders.append((method_name, mdb_dict)) - -def tautulli(self, logger, method_name, method_data): - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - final_dict = { - "list_type": "popular" if method_name == "tautulli_popular" else "watched", - "list_days": util.parse(self.Type, "list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), - "list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), - "list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) - } - buff = final_dict["list_size"] * 3 - if self.library.Tautulli.has_section: - buff = 0 - elif "list_buffer" in dict_methods: - buff = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name) - final_dict["list_buffer"] = buff - self.builders.append((method_name, final_dict)) - -def tmdb(self, logger, method_name, method_data): - if method_name == "tmdb_discover": - for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): - dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)} - for discover_method, discover_data in dict_data.items(): - lower_method = str(discover_method).lower() - discover_attr, modifier = os.path.splitext(lower_method) - if discover_data is None: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute is blank") - elif discover_method.lower() not in tmdb.discover_all: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") - elif self.library.is_movie and discover_attr in tmdb.discover_tv_only: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for show libraries") - elif self.library.is_show and discover_attr in tmdb.discover_movie_only: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for movie libraries") - elif discover_attr == "region": - new_dictionary[discover_attr] = util.parse(self.Type, discover_method, discover_data.upper(), parent=method_name, regex=("^[A-Z]{2}$", "US")) - elif discover_attr == "sort_by": - options = tmdb.discover_movie_sort if self.library.is_movie else tmdb.discover_tv_sort - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=options) - elif discover_attr == "certification_country": - if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data: - new_dictionary[lower_method] = discover_data - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") - elif discover_attr == "certification": - if "certification_country" in dict_data: - new_dictionary[lower_method] = discover_data - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country") - elif discover_attr == "watch_region": - if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data: - new_dictionary[lower_method] = discover_data.upper() - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") - elif discover_attr == "with_watch_monetization_types": - if "watch_region" in dict_data: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=tmdb.discover_monetization_types) - else: - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region") - elif discover_attr in tmdb.discover_booleans: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="bool", parent=method_name) - elif discover_attr == "vote_average": - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="float", parent=method_name) - elif discover_attr == "with_status": - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) - elif discover_attr == "with_type": - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) - elif discover_attr in tmdb.discover_dates: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") - elif discover_attr in tmdb.discover_years: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1) - elif discover_attr in tmdb.discover_ints: - new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name) - elif discover_attr in tmdb.discover_strings: - new_dictionary[lower_method] = discover_data - elif discover_attr != "limit": - raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") - if len(new_dictionary) > 1: - self.builders.append((method_name, new_dictionary)) - else: - raise Failed(f"{self.Type} Error: {method_name} had no valid fields") - elif method_name in tmdb.int_builders: - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) - else: - values = self.config.TMDb.validate_tmdb_ids(method_data, method_name) - if method_name in tmdb.details_builders: - if method_name.startswith(("tmdb_collection", "tmdb_movie", "tmdb_show")): - item = self.config.TMDb.get_movie_show_or_collection(values[0], self.library.is_movie) - if item.overview: - self.summaries[method_name] = item.overview - if item.backdrop_url: - self.backgrounds[method_name] = item.backdrop_url - if item.poster_url: - self.posters[method_name] = item.poster_url - elif method_name.startswith(("tmdb_actor", "tmdb_crew", "tmdb_director", "tmdb_producer", "tmdb_writer")): - item = self.config.TMDb.get_person(values[0]) - if item.biography: - self.summaries[method_name] = item.biography - if item.profile_path: - self.posters[method_name] = item.profile_url - elif method_name.startswith("tmdb_list"): - item = self.config.TMDb.get_list(values[0]) - if item.description: - self.summaries[method_name] = item.description - if item.poster_url: - self.posters[method_name] = item.poster_url - for value in values: - self.builders.append((method_name[:-8] if method_name in tmdb.details_builders else method_name, value)) - -def trakt(self, logger, method_name, method_data): - if method_name.startswith("trakt_list"): - trakt_lists = self.config.Trakt.validate_list(method_data) - for trakt_list in trakt_lists: - self.builders.append(("trakt_list", trakt_list)) - if method_name.endswith("_details"): - try: - self.summaries[method_name] = self.config.Trakt.list_description(trakt_lists[0]) - except Failed as e: - logger.error(f"Trakt Error: List description not found: {e}") - elif method_name == "trakt_boxoffice": - if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): - self.builders.append((method_name, 10)) - else: - raise Failed(f"{self.Type} Error: {method_name} must be set to true") - elif method_name == "trakt_recommendations": - self.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100))) - elif method_name == "sync_to_trakt_list": - if method_data not in self.config.Trakt.slugs: - raise Failed(f"{self.Type} Error: {method_data} invalid. Options {', '.join(self.config.Trakt.slugs)}") - self.sync_to_trakt_list = method_data - elif method_name == "sync_missing_to_trakt_list": - self.sync_missing_to_trakt_list = util.parse(self.Type, method_name, method_data, datatype="bool", default=False) - elif method_name in trakt.builders: - if method_name in ["trakt_chart", "trakt_userlist"]: - trakt_dicts = method_data - final_method = method_name - elif method_name in ["trakt_watchlist", "trakt_collection"]: - trakt_dicts = [] - for trakt_user in util.get_list(method_data, split=False): - trakt_dicts.append({"userlist": method_name[6:], "user": trakt_user}) - final_method = "trakt_userlist" - else: - terms = method_name.split("_") - trakt_dicts = { - "chart": terms[1], - "limit": util.parse(self.Type, method_name, method_data, datatype="int", default=10), - "time_period": terms[2] if len(terms) > 2 else None - } - final_method = "trakt_chart" - if method_name != final_method: - logger.warning(f"{self.Type} Warning: {method_name} will run as {final_method}") - for trakt_dict in self.config.Trakt.validate_chart(self.Type, final_method, trakt_dicts, self.library.is_movie): - self.builders.append((final_method, trakt_dict)) - -def tvdb(self, logger, method_name, method_data): - values = util.get_list(method_data) - if method_name.endswith("_details"): - if method_name.startswith(("tvdb_movie", "tvdb_show")): - item = self.config.TVDb.get_tvdb_obj(values[0], is_movie=method_name.startswith("tvdb_movie")) - if item.summary: - self.summaries[method_name] = item.summary - if item.background_url: - self.backgrounds[method_name] = item.background_url - if item.poster_url: - self.posters[method_name] = item.poster_url - elif method_name.startswith("tvdb_list"): - description, poster = self.config.TVDb.get_list_description(values[0]) - if description: - self.summaries[method_name] = description - if poster: - self.posters[method_name] = poster - for value in values: - self.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value)) diff --git a/modules/builder/_builder_attribute_setter.py b/modules/builder/_builder_attribute_setter.py new file mode 100644 index 000000000..5350cc6d4 --- /dev/null +++ b/modules/builder/_builder_attribute_setter.py @@ -0,0 +1,1085 @@ +import os, re +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta +from modules import anilist, imdb, mal, mojo, plex, radarr, sonarr, tmdb, trakt,util +from modules.util import Failed, FilterFailed, NotScheduled +from requests.exceptions import ConnectionError +from modules.builder._config import * + +class BuilderAttributeSetter: + def __init__(self, collectionBuilder, logger): + self.collectionBuilder = collectionBuilder + self.logger = logger + + def setAttributes(self, method_name, method_data, method_final, methods, method_mod): + if method_name in summary_details: + self._summary(method_name, method_data) + elif method_name in poster_details: + self._poster(method_name, method_data) + elif method_name in background_details: + self._background(method_name, method_data) + elif method_name in details: + self._details(method_name, method_data, method_final, methods) + elif method_name in item_details: + self._item_details(method_name, method_data, method_mod, method_final, methods) + elif method_name in radarr_details or method_name in radarr.builders: + self._radarr(method_name, method_data) + elif method_name in sonarr_details or method_name in sonarr.builders: + self._sonarr(method_name, method_data) + elif method_name in anidb.builders: + self._anidb(method_name, method_data) + elif method_name in anilist.builders: + self._anilist(method_name, method_data) + elif method_name in icheckmovies.builders: + self._icheckmovies(method_name, method_data) + elif method_name in letterboxd.builders: + self._letterboxd(method_name, method_data) + elif method_name in imdb.builders: + self._imdb(method_name, method_data) + elif method_name in mal.builders: + self._mal(method_name, method_data) + elif method_name in mojo.builders: + self._mojo(method_name, method_data) + elif method_name in plex.builders or method_final in plex.searches: + self._plex(method_name, method_data) + elif method_name in reciperr.builders: + self._reciperr(method_name, method_data) + elif method_name in tautulli.builders: + self._tautulli(method_name, method_data) + elif method_name in tmdb.builders: + self._tmdb(method_name, method_data) + elif method_name in trakt.builders or method_name in ["sync_to_trakt_list", "sync_missing_to_trakt_list"]: + self._trakt(method_name, method_data) + elif method_name in tvdb.builders: + self._tvdb(method_name, method_data) + elif method_name in mdblist.builders: + self._mdblist(method_name, method_data) + else: + return False + + def _summary(self, method_name, method_data): + if method_name == "summary": + self.collectionBuilder.summaries[method_name] = str(method_data).replace("<>", self.collectionBuilder.key_name) if self.collectionBuilder.key_name else method_data + elif method_name == "tmdb_summary": + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, "TMDb ID"), self.collectionBuilder.library.is_movie).overview + elif method_name == "tmdb_description": + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).description + elif method_name == "tmdb_biography": + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.TMDb.get_person(util.regex_first_int(method_data, "TMDb Person ID")).biography + elif method_name == "tvdb_summary": + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.TVDb.get_tvdb_obj(method_data, is_movie=self.collectionBuilder.library.is_movie).summary + elif method_name == "tvdb_description": + summary, _ = self.collectionBuilder.config.TVDb.get_list_description(method_data) + if summary: + self.collectionBuilder.summaries[method_name] = summary + elif method_name == "trakt_description": + try: + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.Trakt.list_description(self.collectionBuilder.config.Trakt.validate_list(method_data)[0]) + except Failed as e: + self.logger.error(f"Trakt Error: List description not found: {e}") + elif method_name == "letterboxd_description": + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.Letterboxd.get_list_description(method_data, self.collectionBuilder.language) + elif method_name == "icheckmovies_description": + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.ICheckMovies.get_list_description(method_data, self.collectionBuilder.language) + + def _poster(self, method_name, method_data): + if method_name == "url_poster": + try: + if not method_data.startswith("https://theposterdb.com/api/assets/"): + image_response = self.collectionBuilder.config.get(method_data, headers=util.header()) + if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: + raise ConnectionError + self.collectionBuilder.posters[method_name] = method_data + except ConnectionError: + self.logger.warning(f"{self.collectionBuilder.Type} Warning: No Poster Found at {method_data}") + elif method_name == "tmdb_list_poster": + self.collectionBuilder.posters[method_name] = self.collectionBuilder.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).poster_url + elif method_name == "tvdb_list_poster": + _, poster = self.collectionBuilder.config.TVDb.get_list_description(method_data) + if poster: + self.collectionBuilder.posters[method_name] = poster + elif method_name == "tmdb_poster": + self.collectionBuilder.posters[method_name] = self.collectionBuilder.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.collectionBuilder.library.is_movie).poster_url + elif method_name == "tmdb_profile": + self.collectionBuilder.posters[method_name] = self.collectionBuilder.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_url + elif method_name == "tvdb_poster": + self.collectionBuilder.posters[method_name] = f"{self.collectionBuilder.config.TVDb.get_tvdb_obj(method_data, is_movie=self.collectionBuilder.library.is_movie).poster_url}" + elif method_name == "file_poster": + if os.path.exists(os.path.abspath(method_data)): + self.collectionBuilder.posters[method_name] = os.path.abspath(method_data) + else: + self.logger.error(f"{self.collectionBuilder.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") + + def _background(self, method_name, method_data): + if method_name == "url_background": + try: + image_response = self.collectionBuilder.config.get(method_data, headers=util.header()) + if image_response.status_code >= 400 or image_response.headers["Content-Type"] not in util.image_content_types: + raise ConnectionError + self.collectionBuilder.backgrounds[method_name] = method_data + except ConnectionError: + self.logger.warning(f"{self.collectionBuilder.Type} Warning: No Background Found at {method_data}") + elif method_name == "tmdb_background": + self.collectionBuilder.backgrounds[method_name] = self.collectionBuilder.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.collectionBuilder.library.is_movie).backdrop_url + elif method_name == "tvdb_background": + self.collectionBuilder.posters[method_name] = f"{self.collectionBuilder.config.TVDb.get_tvdb_obj(method_data, is_movie=self.collectionBuilder.library.is_movie).background_url}" + elif method_name == "file_background": + if os.path.exists(os.path.abspath(method_data)): + self.collectionBuilder.backgrounds[method_name] = os.path.abspath(method_data) + else: + self.logger.error(f"{self.collectionBuilder.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") + + def _details(self, method_name, method_data, method_final, methods): + if method_name == "url_theme": + self.collectionBuilder.url_theme = method_data + elif method_name == "file_theme": + if os.path.exists(os.path.abspath(method_data)): + self.collectionBuilder.file_theme = os.path.abspath(method_data) + else: + self.logger.error(f"{self.collectionBuilder.Type} Error: Theme Path Does Not Exist: {os.path.abspath(method_data)}") + elif method_name == "tmdb_region": + self.collectionBuilder.tmdb_region = util.parse(self.collectionBuilder.Type, method_name, method_data, options=self.collectionBuilder.config.TMDb.iso_3166_1) + elif method_name == "collection_mode": + try: + self.collectionBuilder.details[method_name] = util.check_collection_mode(method_data) + except Failed as e: + self.logger.error(e) + elif method_name == "collection_filtering": + if method_data and str(method_data).lower() in plex.collection_filtering_options: + self.collectionBuilder.details[method_name] = str(method_data).lower() + else: + self.logger.error(f"Config Error: {method_data} collection_filtering invalid\n\tadmin (Always the server admin user)\n\tuser (User currently viewing the content)") + elif method_name == "minimum_items": + self.collectionBuilder.minimum = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", minimum=1) + elif method_name == "cache_builders": + self.collectionBuilder.details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", minimum=0) + elif method_name == "default_percent": + self.collectionBuilder.default_percent = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", minimum=1, maximum=100) + elif method_name == "server_preroll": + self.collectionBuilder.server_preroll = util.parse(self.collectionBuilder.Type, method_name, method_data) + elif method_name == "ignore_ids": + self.collectionBuilder.ignore_ids.extend(util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="intlist")) + elif method_name == "ignore_imdb_ids": + self.collectionBuilder.ignore_imdb_ids.extend(util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="list")) + elif method_name == "label": + if "label" in methods and "label.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use label and label.sync together") + if "label.remove" in methods and "label.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use label.remove and label.sync together") + if method_final == "label" and "label_sync_mode" in methods and self.collectionBuilder.data[methods["label_sync_mode"]] == "sync": + self.collectionBuilder.details["label.sync"] = util.get_list(method_data) if method_data else [] + else: + self.collectionBuilder.details[method_final] = util.get_list(method_data) if method_data else [] + elif method_name == "changes_webhooks": + self.collectionBuilder.details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="list") if method_data else None + elif method_name in scheduled_boolean: + if isinstance(method_data, bool): + self.collectionBuilder.details[method_name] = method_data + elif isinstance(method_data, (int, float)): + self.collectionBuilder.details[method_name] = method_data > 0 + elif str(method_data).lower() in ["t", "true"]: + self.collectionBuilder.details[method_name] = True + elif str(method_data).lower() in ["f", "false"]: + self.collectionBuilder.details[method_name] = False + else: + try: + util.schedule_check(method_name, util.parse(self.collectionBuilder.Type, method_name, method_data), self.collectionBuilder.current_time, self.collectionBuilder.config.run_hour) + self.collectionBuilder.details[method_name] = True + except NotScheduled: + self.collectionBuilder.details[method_name] = False + elif method_name in boolean_details: + default = self.collectionBuilder.details[method_name] if method_name in self.collectionBuilder.details else None + self.collectionBuilder.details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=default) + elif method_name in string_details: + self.collectionBuilder.details[method_name] = str(method_data) + + def _item_details(self, method_name, method_data, method_mod, method_final, methods): + if method_name == "item_label": + if "item_label" in methods and "item_label.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_label and item_label.sync together") + if "item_label.remove" in methods and "item_label.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_label.remove and item_label.sync together") + self.collectionBuilder.item_details[method_final] = util.get_list(method_data) if method_data else [] + if method_name == "item_genre": + if "item_genre" in methods and "item_genre.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_genre and item_genre.sync together") + if "item_genre.remove" in methods and "item_genre.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_genre.remove and item_genre.sync together") + self.collectionBuilder.item_details[method_final] = util.get_list(method_data) if method_data else [] + elif method_name == "item_edition": + self.collectionBuilder.item_details[method_final] = str(method_data) if method_data else "" # noqa + elif method_name == "non_item_remove_label": + if not method_data: + raise Failed(f"{self.collectionBuilder.Type} Error: non_item_remove_label is blank") + self.collectionBuilder.item_details[method_final] = util.get_list(method_data) + elif method_name in ["item_radarr_tag", "item_sonarr_tag"]: + if method_name in methods and f"{method_name}.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use {method_name} and {method_name}.sync together") + if f"{method_name}.remove" in methods and f"{method_name}.sync" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use {method_name}.remove and {method_name}.sync together") + if method_name in methods and f"{method_name}.remove" in methods: + raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use {method_name} and {method_name}.remove together") + self.collectionBuilder.item_details[method_name] = util.get_list(method_data, lower=True) + self.collectionBuilder.item_details["apply_tags"] = method_mod[1:] if method_mod else "" + elif method_name == "item_refresh_delay": + self.collectionBuilder.item_details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=0, minimum=0) + elif method_name in item_bool_details: + if util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=False): + self.collectionBuilder.item_details[method_name] = True + elif method_name in item_false_details: + self.collectionBuilder.item_details[method_name] = False + elif method_name in plex.item_advance_keys: + key, options = plex.item_advance_keys[method_name] + if method_name in advance_new_agent and self.collectionBuilder.library.agent not in plex.new_plex_agents: + self.logger.error(f"Metadata Error: {method_name} attribute only works for with the New Plex Movie Agent and New Plex TV Agent") + elif method_name in advance_show and not self.collectionBuilder.library.is_show: + self.logger.error(f"Metadata Error: {method_name} attribute only works for show libraries") + elif str(method_data).lower() not in options: + self.logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid") + else: + self.collectionBuilder.item_details[method_name] = str(method_data).lower() # noqa + + def _radarr(self, method_name, method_data): + if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: + self.collectionBuilder.radarr_details[method_name[7:]] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool") + elif method_name == "radarr_folder": + self.collectionBuilder.radarr_details["folder"] = method_data + elif method_name == "radarr_availability": + if str(method_data).lower() in radarr.availability_translation: + self.collectionBuilder.radarr_details["availability"] = str(method_data).lower() + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute must be either announced, cinemas, released or db") + elif method_name == "radarr_quality": + self.collectionBuilder.radarr_details["quality"] = method_data + elif method_name == "radarr_tag": + self.collectionBuilder.radarr_details["tag"] = util.get_list(method_data, lower=True) + elif method_name == "radarr_taglist": + self.collectionBuilder.builders.append((method_name, util.get_list(method_data, lower=True))) + elif method_name == "radarr_all": + self.collectionBuilder.builders.append((method_name, True)) + + def _sonarr(self, method_name, method_data): + if method_name in ["sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_ignore_cache"]: + self.collectionBuilder.sonarr_details[method_name[7:]] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool") + elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]: + self.collectionBuilder.sonarr_details[method_name[7:]] = method_data + elif method_name == "sonarr_monitor": + if str(method_data).lower() in sonarr.monitor_translation: + self.collectionBuilder.sonarr_details["monitor"] = str(method_data).lower() + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none") + elif method_name == "sonarr_series": + if str(method_data).lower() in sonarr.series_types: + self.collectionBuilder.sonarr_details["series"] = str(method_data).lower() + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute must be either standard, daily, or anime") + elif method_name == "sonarr_tag": + self.collectionBuilder.sonarr_details["tag"] = util.get_list(method_data, lower=True) + elif method_name == "sonarr_taglist": + self.collectionBuilder.builders.append((method_name, util.get_list(method_data, lower=True))) + elif method_name == "sonarr_all": + self.collectionBuilder.builders.append((method_name, True)) + + def _anidb(self, method_name, method_data): + if method_name == "anidb_popular": + self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=30, maximum=30))) + elif method_name in ["anidb_id", "anidb_relation"]: + for anidb_id in self.collectionBuilder.config.AniDB.validate_anidb_ids(method_data): + self.collectionBuilder.builders.append((method_name, anidb_id)) + elif method_name == "anidb_tag": + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {} + if "tag" not in dict_methods: + raise Failed(f"{self.collectionBuilder.Type} Error: anidb_tag tag attribute is required") + elif not dict_data[dict_methods["tag"]]: + raise Failed(f"{self.collectionBuilder.Type} Error: anidb_tag tag attribute is blank") + else: + new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["tag"]], "AniDB Tag ID") + new_dictionary["limit"] = util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) + self.collectionBuilder.builders.append((method_name, new_dictionary)) + + def _anilist(self, method_name, method_data): + if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]: + for anilist_id in self.collectionBuilder.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): + self.collectionBuilder.builders.append((method_name, anilist_id)) + elif method_name in ["anilist_popular", "anilist_trending", "anilist_top_rated"]: + self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10))) + elif method_name == "anilist_userlist": + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = { + "username": util.parse(self.collectionBuilder.Type, "username", dict_data, methods=dict_methods, parent=method_name), + "list_name": util.parse(self.collectionBuilder.Type, "list_name", dict_data, methods=dict_methods, parent=method_name), + "sort_by": util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.userlist_sort_options), + } + score_dict = {} + for search_method, search_data in dict_data.items(): + search_attr, modifier = os.path.splitext(str(search_method).lower()) + if search_attr == "score" and modifier in [".gt", ".gte", ".lt", ".lte"]: + score = util.parse(self.collectionBuilder.Type, search_method, dict_data, methods=dict_methods, datatype="int", default=-1, minimum=0, maximum=10, parent=method_name) + if score > -1: + score_dict[modifier] = score + elif search_attr not in ["username", "list_name", "sort_by"]: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + new_dictionary["score"] = score_dict + self.collectionBuilder.builders.append((method_name, self.collectionBuilder.config.AniList.validate_userlist(new_dictionary))) + elif method_name == "anilist_search": + if self.collectionBuilder.current_time.month in [12, 1, 2]: current_season = "winter" + elif self.collectionBuilder.current_time.month in [3, 4, 5]: current_season = "spring" + elif self.collectionBuilder.current_time.month in [6, 7, 8]: current_season = "summer" + else: current_season = "fall" + default_year = self.collectionBuilder.current_year + 1 if self.collectionBuilder.current_time.month == 12 else self.collectionBuilder.current_year + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {} + for search_method, search_data in dict_data.items(): + lower_method = str(search_method).lower() + search_attr, modifier = os.path.splitext(lower_method) + if lower_method not in anilist.searches: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + elif search_attr == "season": + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, parent=method_name, default=current_season, options=util.seasons) + if new_dictionary[search_attr] == "current": + new_dictionary[search_attr] = current_season + if "year" not in dict_methods: + self.logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {default_year} by default") + new_dictionary["year"] = default_year + elif search_attr == "year": + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1) + elif search_data is None: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute is blank") + elif search_attr == "adult": + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="bool", parent=method_name) + elif search_attr == "country": + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, options=anilist.country_codes, parent=method_name) + elif search_attr == "source": + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, options=anilist.media_source, parent=method_name) + elif search_attr in ["episodes", "duration", "score", "popularity"]: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="int", parent=method_name) + elif search_attr in ["format", "status", "genre", "tag", "tag_category"]: + new_dictionary[lower_method] = self.collectionBuilder.config.AniList.validate(search_attr.replace("_", " ").title(), util.parse(self.collectionBuilder.Type, search_method, search_data)) + elif search_attr in ["start", "end"]: + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") + elif search_attr == "min_tag_percent": + new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100) + elif search_attr == "search": + new_dictionary[search_attr] = str(search_data) + elif lower_method not in ["sort_by", "limit"]: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + if len(new_dictionary) == 0: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} must have at least one valid search option") + new_dictionary["sort_by"] = util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options) + new_dictionary["limit"] = util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) + self.collectionBuilder.builders.append((method_name, new_dictionary)) + + def _icheckmovies(self, method_name, method_data): + if method_name.startswith("icheckmovies_list"): + icheckmovies_lists = self.collectionBuilder.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.collectionBuilder.language) + for icheckmovies_list in icheckmovies_lists: + self.collectionBuilder.builders.append(("icheckmovies_list", icheckmovies_list)) + if method_name.endswith("_details"): + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.ICheckMovies.get_list_description(icheckmovies_lists[0], self.collectionBuilder.language) + + def _imdb(self, method_name, method_data): + if method_name == "imdb_id": + for value in util.get_list(method_data): + if str(value).startswith("tt"): + self.collectionBuilder.builders.append((method_name, value)) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_id {value} must begin with tt") + elif method_name == "imdb_list": + try: + for imdb_dict in self.collectionBuilder.config.IMDb.validate_imdb_lists(self.collectionBuilder.Type, method_data, self.collectionBuilder.language): + self.collectionBuilder.builders.append((method_name, imdb_dict)) + except Failed as e: + self.logger.error(e) + elif method_name == "imdb_chart": + for value in util.get_list(method_data): + if value in imdb.movie_charts and not self.collectionBuilder.library.is_movie: + raise Failed(f"{self.collectionBuilder.Type} Error: chart: {value} does not work with show libraries") + elif value in imdb.show_charts and self.collectionBuilder.library.is_movie: + raise Failed(f"{self.collectionBuilder.Type} Error: chart: {value} does not work with movie libraries") + elif value in imdb.movie_charts or value in imdb.show_charts: + self.collectionBuilder.builders.append((method_name, value)) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: chart: {value} is invalid options are {[i for i in imdb.charts]}") + elif method_name == "imdb_watchlist": + for imdb_user in self.collectionBuilder.config.IMDb.validate_imdb_watchlists(self.collectionBuilder.Type, method_data, self.collectionBuilder.language): + self.collectionBuilder.builders.append((method_name, imdb_user)) + elif method_name == "imdb_award": + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + event_id = util.parse(self.collectionBuilder.Type, "event_id", dict_data, parent=method_name, methods=dict_methods, regex=(r"(ev\d+)", "ev0000003")) + git_event, year_options = self.collectionBuilder.config.IMDb.get_event_years(event_id) + if not year_options: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_id attribute: No event found at {imdb.base_url}/event/{event_id}") + if "event_year" not in dict_methods: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_year attribute not found") + og_year = dict_data[dict_methods["event_year"]] + if not og_year: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_year attribute is blank") + if og_year in ["all", "latest"]: + event_year = og_year + elif not isinstance(og_year, list) and "-" in str(og_year) and len(str(og_year)) > 7: + try: + min_year, max_year = og_year.split("-") + min_year = int(min_year) + max_year = int(max_year) if max_year != "current" else None + event_year = [] + for option in year_options: + check = int(option.split("-")[0] if "-" in option else option) + if check >= min_year and (max_year is None or check <= max_year): + event_year.append(option) + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_year attribute invalid: {og_year}") + else: + event_year = util.parse(self.collectionBuilder.Type, "event_year", og_year, parent=method_name, datatype="strlist", options=year_options) + if (event_year == "all" or len(event_year) > 1) and not git_event: + raise Failed(f"{self.collectionBuilder.Type} Error: Only specific events work when using multiple years. Event Options: [{', '.join([k for k in self.collectionBuilder.config.IMDb.events_validation])}]") + award_filters = [] + if "award_filter" in dict_methods: + if not dict_data[dict_methods["award_filter"]]: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award award_filter attribute is blank") + award_filters = util.parse(self.collectionBuilder.Type, "award_filter", dict_data[dict_methods["award_filter"]], datatype="lowerlist") + category_filters = [] + if "category_filter" in dict_methods: + if not dict_data[dict_methods["category_filter"]]: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award category_filter attribute is blank") + category_filters = util.parse(self.collectionBuilder.Type, "category_filter", dict_data[dict_methods["category_filter"]], datatype="lowerlist") + final_category = [] + final_awards = [] + if award_filters or category_filters: + award_names, category_names = self.collectionBuilder.config.IMDb.get_award_names(event_id, year_options[0] if event_year == "latest" else event_year) + lower_award = {a.lower(): a for a in award_names if a} + for award_filter in award_filters: + if award_filter in lower_award: + final_awards.append(lower_award[award_filter]) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award award_filter attribute invalid: {award_filter} must be in in [{', '.join([v for _, v in lower_award.items()])}]") + lower_category = {c.lower(): c for c in category_names if c} + for category_filter in category_filters: + if category_filter in lower_category: + final_category.append(lower_category[category_filter]) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award category_filter attribute invalid: {category_filter} must be in in [{', '.join([v for _, v in lower_category.items()])}]") + self.collectionBuilder.builders.append((method_name, { + "event_id": event_id, "event_year": event_year, "award_filter": final_awards if final_awards else None, "category_filter": final_category if final_category else None, + "winning": util.parse(self.collectionBuilder.Type, "winning", dict_data, parent=method_name, methods=dict_methods, datatype="bool", default=False) + })) + elif method_name == "imdb_search": + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {"limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, minimum=0, default=100, parent=method_name)} + for search_method, search_data in dict_data.items(): + lower_method = str(search_method).lower() + search_attr, modifier = os.path.splitext(lower_method) + if search_data is None: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute is blank") + elif lower_method not in imdb.imdb_search_attributes: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + elif search_attr == "sort_by": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, parent=method_name, options=imdb.sort_options) + elif search_attr == "title": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, parent=method_name) + elif search_attr == "type": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.title_type_options) + elif search_attr == "topic": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.topic_options) + elif search_attr == "release": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="date", parent=method_name, date_return="%Y-%m-%d") + elif search_attr == "rating": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="float", parent=method_name, minimum=0.1, maximum=10) + elif search_attr in ["votes", "imdb_top", "imdb_bottom", "popularity", "runtime"]: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="int", parent=method_name, minimum=0) + elif search_attr == "genre": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.genre_options) + elif search_attr == "event": + events = [] + for event in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + if event in imdb.event_options: + events.append(event) + else: + res = re.search(r'(ev\d+)', event) + if res: + events.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ev\\d+ e.g. ev0000292 or be one of {', '.join([e for e in imdb.event_options])}") + if events: + new_dictionary[lower_method] = events + elif search_attr == "company": + companies = [] + for company in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + if company in imdb.company_options: + companies.append(company) + else: + res = re.search(r'(co\d+)', company) + if res: + companies.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern co\\d+ e.g. co0098836 or be one of {', '.join([e for e in imdb.company_options])}") + if companies: + new_dictionary[lower_method] = companies + elif search_attr == "content_rating": + final_list = [] + for content in util.get_list(search_data): + if content: + final_dict = {"region": "US", "rating": None} + if not isinstance(content, dict): + final_dict["rating"] = str(content) + else: + if "rating" not in content or not content["rating"]: + raise Failed(f"{method_name} {search_method} attribute: rating attribute is required") + final_dict["rating"] = str(content["rating"]) + if "region" not in content or not content["region"]: + self.logger.warning(f"{method_name} {search_method} attribute: region attribute not found defaulting to 'US'") + elif len(str(content["region"])) != 2: + self.logger.warning(f"{method_name} {search_method} attribute: region attribute: {str(content['region'])} must be only 2 characters defaulting to 'US'") + else: + final_dict["region"] = str(content["region"]).upper() + final_list.append(final_dict) + if final_list: + new_dictionary[lower_method] = final_list + elif search_attr == "country": + countries = [] + for country in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="upperlist", parent=method_name): + if country: + if len(str(country)) != 2: + raise Failed(f"{method_name} {search_method} attribute: {country} must be only 2 characters i.e. 'US'") + countries.append(str(country)) + if countries: + new_dictionary[lower_method] = countries + elif search_attr in ["keyword", "language", "alternate_version", "crazy_credit", "location", "goof", "plot", "quote", "soundtrack", "trivia"]: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name) + elif search_attr == "cast": + casts = [] + for cast in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + res = re.search(r'(nm\d+)', cast) + if res: + casts.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern nm\\d+ e.g. nm00988366") + if casts: + new_dictionary[lower_method] = casts + elif search_attr == "series": + series = [] + for show in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + res = re.search(r'(tt\d+)', show) + if res: + series.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern tt\\d+ e.g. tt00988366") + if series: + new_dictionary[lower_method] = series + elif search_attr == "list": + lists = [] + for new_list in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + res = re.search(r'(ls\d+)', new_list) + if res: + lists.append(res.group(1)) + else: + raise Failed(f"{method_name} {search_method} attribute: {search_data} must match pattern ls\\d+ e.g. ls000024621") + if lists: + new_dictionary[lower_method] = lists + elif search_attr == "adult": + if util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="bool", parent=method_name): + new_dictionary[lower_method] = True + elif search_attr != "limit": + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + if len(new_dictionary) > 1: + self.collectionBuilder.builders.append((method_name, new_dictionary)) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} had no valid fields") + + def _letterboxd(self, method_name, method_data): + if method_name.startswith("letterboxd_list"): + letterboxd_lists = self.collectionBuilder.config.Letterboxd.validate_letterboxd_lists(self.collectionBuilder.Type, method_data, self.collectionBuilder.language) + for letterboxd_list in letterboxd_lists: + self.collectionBuilder.builders.append(("letterboxd_list", letterboxd_list)) + if method_name.endswith("_details"): + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.Letterboxd.get_list_description(letterboxd_lists[0]["url"], self.collectionBuilder.language) + + def _mal(self, method_name, method_data): + if method_name == "mal_id": + for mal_id in util.get_int_list(method_data, "MyAnimeList ID"): + self.collectionBuilder.builders.append((method_name, mal_id)) + elif method_name in ["mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special", "mal_popular", "mal_favorite", "mal_suggested"]: + self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10, maximum=100 if method_name == "mal_suggested" else 500))) + elif method_name in ["mal_season", "mal_userlist", "mal_search"]: + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + if method_name == "mal_season": + if self.collectionBuilder.current_time.month in [1, 2, 3]: default_season = "winter" + elif self.collectionBuilder.current_time.month in [4, 5, 6]: default_season = "spring" + elif self.collectionBuilder.current_time.month in [7, 8, 9]: default_season = "summer" + else: default_season = "fall" + season = util.parse(self.collectionBuilder.Type, "season", dict_data, methods=dict_methods, parent=method_name, default=default_season, options=util.seasons) + if season == "current": + season = default_season + self.collectionBuilder.builders.append((method_name, { + "season": season, + "sort_by": util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="members", options=mal.season_sort_options, translation=mal.season_sort_translation), + "year": util.parse(self.collectionBuilder.Type, "year", dict_data, datatype="int", methods=dict_methods, default=self.collectionBuilder.current_year, parent=method_name, minimum=1917, maximum=self.collectionBuilder.current_year + 1), + "limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=500), + "starting_only": util.parse(self.collectionBuilder.Type, "starting_only", dict_data, datatype="bool", methods=dict_methods, default=False, parent=method_name) + })) + elif method_name == "mal_userlist": + self.collectionBuilder.builders.append((method_name, { + "username": util.parse(self.collectionBuilder.Type, "username", dict_data, methods=dict_methods, parent=method_name), + "status": util.parse(self.collectionBuilder.Type, "status", dict_data, methods=dict_methods, parent=method_name, default="all", options=mal.userlist_status), + "sort_by": util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=mal.userlist_sort_options, translation=mal.userlist_sort_translation), + "limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=1000) + })) + elif method_name == "mal_search": + final_attributes = {} + final_text = "MyAnimeList Search" + if "sort_by" in dict_methods: + sort = util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, options=mal.search_combos) + sort_type, sort_direction = sort.split(".") + final_text += f"\nSorted By: {sort}" + final_attributes["order_by"] = sort_type + final_attributes["sort"] = sort_direction + limit = 0 + if "limit" in dict_methods: + limit = util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", default=0, methods=dict_methods, parent=method_name) + final_text += f"\nLimit: {limit if limit else 'None'}" + if "query" in dict_methods: + final_attributes["q"] = util.parse(self.collectionBuilder.Type, "query", dict_data, methods=dict_methods, parent=method_name) + final_text += f"\nQuery: {final_attributes['q']}" + if "prefix" in dict_methods: + final_attributes["letter"] = util.parse(self.collectionBuilder.Type, "prefix", dict_data, methods=dict_methods, parent=method_name) + final_text += f"\nPrefix: {final_attributes['letter']}" + if "type" in dict_methods: + type_list = util.parse(self.collectionBuilder.Type, "type", dict_data, datatype="commalist", methods=dict_methods, parent=method_name, options=mal.search_types) + final_attributes["type"] = ",".join(type_list) + final_text += f"\nType: {' or '.join(type_list)}" + if "status" in dict_methods: + final_attributes["status"] = util.parse(self.collectionBuilder.Type, "status", dict_data, methods=dict_methods, parent=method_name, options=mal.search_status) + final_text += f"\nStatus: {final_attributes['status']}" + if "genre" in dict_methods: + genre_str = str(util.parse(self.collectionBuilder.Type, "genre", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.collectionBuilder.Type, 'Genre', genre_str, self.collectionBuilder.config.MyAnimeList.genres) + final_text += f"\nGenre: {out_text}" + final_attributes["genres"] = out_ints + if "genre.not" in dict_methods: + genre_str = str(util.parse(self.collectionBuilder.Type, "genre.not", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.collectionBuilder.Type, 'Genre', genre_str, self.collectionBuilder.config.MyAnimeList.genres) + final_text += f"\nNot Genre: {out_text}" + final_attributes["genres_exclude"] = out_ints + if "studio" in dict_methods: + studio_str = str(util.parse(self.collectionBuilder.Type, "studio", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.collectionBuilder.Type, 'Studio', studio_str, self.collectionBuilder.config.MyAnimeList.studios) + final_text += f"\nStudio: {out_text}" + final_attributes["producers"] = out_ints + if "content_rating" in dict_methods: + final_attributes["rating"] = util.parse(self.collectionBuilder.Type, "content_rating", dict_data, methods=dict_methods, parent=method_name, options=mal.search_ratings) + final_text += f"\nContent Rating: {final_attributes['rating']}" + if "score.gte" in dict_methods: + final_attributes["min_score"] = util.parse(self.collectionBuilder.Type, "score.gte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_text += f"\nScore Greater Than or Equal: {final_attributes['min_score']}" + elif "score.gt" in dict_methods: + original_score = util.parse(self.collectionBuilder.Type, "score.gt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_attributes["min_score"] = original_score + 0.01 + final_text += f"\nScore Greater Than: {original_score}" + if "score.lte" in dict_methods: + final_attributes["max_score"] = util.parse(self.collectionBuilder.Type, "score.lte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_text += f"\nScore Less Than or Equal: {final_attributes['max_score']}" + elif "score.lt" in dict_methods: + original_score = util.parse(self.collectionBuilder.Type, "score.lt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_attributes["max_score"] = original_score - 0.01 + final_text += f"\nScore Less Than: {original_score}" + if "min_score" in final_attributes and "max_score" in final_attributes and final_attributes["max_score"] <= final_attributes["min_score"]: + raise Failed(f"{self.collectionBuilder.Type} Error: mal_search score.lte/score.lt attribute must be greater than score.gte/score.gt") + if "sfw" in dict_methods: + sfw = util.parse(self.collectionBuilder.Type, "sfw", dict_data, datatype="bool", methods=dict_methods, parent=method_name) + if sfw: + final_attributes["sfw"] = 1 + final_text += f"\nSafe for Work: {final_attributes['sfw']}" + if not final_attributes: + raise Failed(f"{self.collectionBuilder.Type} Error: no mal_search attributes found") + self.collectionBuilder.builders.append((method_name, (final_attributes, final_text, limit))) + elif method_name in ["mal_genre", "mal_studio"]: + self.logger.warning(f"Config Warning: {method_name} will run as a mal_search") + item_list = util.parse(self.collectionBuilder.Type, method_name[4:], method_data, datatype="commalist") + all_items = self.collectionBuilder.config.MyAnimeList.genres if method_name == "mal_genre" else self.collectionBuilder.config.MyAnimeList.studios + final_items = [str(all_items[i]) for i in item_list if i in all_items] + final_text = f"MyAnimeList Search\n{method_name[4:].capitalize()}: {' or '.join([str(all_items[i]) for i in final_items])}" + self.collectionBuilder.builders.append(("mal_search", ({"genres" if method_name == "mal_genre" else "producers": ",".join(final_items)}, final_text, 0))) + + def _mojo(self, method_name, method_data): + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + final = {} + if method_name == "mojo_record": + final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.top_options) + elif method_name == "mojo_world": + if "year" not in dict_methods: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute not found") + og_year = dict_data[dict_methods["year"]] + if not og_year: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute is blank") + if og_year == "current": + final["year"] = str(self.collectionBuilder.current_year) # noqa + elif str(og_year).startswith("current-"): + try: + final["year"] = str(self.collectionBuilder.current_year - int(og_year.split("-")[1])) # noqa + if final["year"] not in mojo.year_options: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute final value must be 1977 or greater: {og_year}") + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute invalid: {og_year}") + else: + final["year"] = util.parse(self.collectionBuilder.Type, "year", dict_data, methods=dict_methods, parent=method_name, options=mojo.year_options) + elif method_name == "mojo_all_time": + final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.chart_options) + final["content_rating_filter"] = util.parse(self.collectionBuilder.Type, "content_rating_filter", dict_data, methods=dict_methods, parent=method_name, options=mojo.content_rating_options) if "content_rating_filter" in dict_methods else None + elif method_name == "mojo_never": + final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="domestic", options=self.collectionBuilder.config.BoxOfficeMojo.never_options) + final["never"] = str(util.parse(self.collectionBuilder.Type, "never", dict_data, methods=dict_methods, parent=method_name, default="1", options=mojo.never_in_options)) if "never" in dict_methods else "1" + elif method_name in ["mojo_domestic", "mojo_international"]: + dome = method_name == "mojo_domestic" + final["range"] = util.parse(self.collectionBuilder.Type, "range", dict_data, methods=dict_methods, parent=method_name, options=mojo.dome_range_options if dome else mojo.intl_range_options) + if not dome: + final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="international", options=self.collectionBuilder.config.BoxOfficeMojo.intl_options) + chart_date = self.collectionBuilder.current_time + if final["range"] != "daily": + _m = "range_data" if final["range"] == "yearly" and "year" not in dict_methods and "range_data" in dict_methods else "year" + if _m not in dict_methods: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {_m} attribute not found") + og_year = dict_data[dict_methods[_m]] + if not og_year: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {_m} attribute is blank") + if str(og_year).startswith("current-"): + try: + chart_date = self.collectionBuilder.current_time - relativedelta(years=int(og_year.split("-")[1])) + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {_m} attribute invalid: {og_year}") + else: + _y = util.parse(self.collectionBuilder.Type, _m, dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.year_options) + if _y != "current": + chart_date = self.collectionBuilder.current_time - relativedelta(years=self.collectionBuilder.current_time.year - _y) + if final["range"] != "yearly": + if "range_data" not in dict_methods: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute not found") + og_data = dict_data[dict_methods["range_data"]] + if not og_data: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute is blank") + + if final["range"] == "holiday": + final["range_data"] = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, options=mojo.holiday_options) + elif final["range"] == "daily": + if og_data == "current": + final["range_data"] = datetime.strftime(self.collectionBuilder.current_time, "%Y-%m-%d") # noqa + elif str(og_data).startswith("current-"): + try: + final["range_data"] = datetime.strftime(self.collectionBuilder.current_time - timedelta(days=int(og_data.split("-")[1])), "%Y-%m-%d") # noqa + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + final["range_data"] = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", datatype="date", date_return="%Y-%m-%d") + if final["range_data"] == "current": + final["range_data"] = datetime.strftime(self.collectionBuilder.current_time, "%Y-%m-%d") # noqa + elif final["range"] in ["weekend", "weekly"]: + if str(og_data).startswith("current-"): + try: + final_date = chart_date - timedelta(weeks=int(og_data.split("-")[1])) + final_iso = final_date.isocalendar() + final["range_data"] = final_iso.week + final["year"] = final_iso.year + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + [str(i) for i in range(1, 54)]) + current_iso = chart_date.isocalendar() + final["range_data"] = current_iso.week if _v == "current" else _v + final["year"] = current_iso.year + elif final["range"] == "monthly": + if str(og_data).startswith("current-"): + try: + final_date = chart_date - relativedelta(months=int(og_data.split("-")[1])) + final["range_data"] = final_date.month + final["year"] = final_date.year + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + util.lower_months) + final["range_data"] = chart_date.month if _v == "current" else util.lower_months[_v] + elif final["range"] == "quarterly": + if str(og_data).startswith("current-"): + try: + final_date = chart_date - relativedelta(months=int(og_data.split("-")[1]) * 3) + final["range_data"] = mojo.quarters[final_date.month] + final["year"] = final_date.year + except ValueError: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + else: + _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.quarter_options) + final["range_data"] = mojo.quarters[chart_date.month] if _v == "current" else _v + elif final["range"] == "season": + _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.season_options) + final["range_data"] = mojo.seasons[chart_date.month] if _v == "current" else _v + else: + final["range_data"] = chart_date.year + if "year" not in final: + final["year"] = chart_date.year + if final["year"] < 1977: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute final date value must be on year 1977 or greater: {final['year']}") + + final["limit"] = util.parse(self.collectionBuilder.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 + self.collectionBuilder.builders.append((method_name, final)) + + def _plex(self, method_name, method_data): + if method_name in ["plex_all", "plex_pilots"]: + self.collectionBuilder.builders.append((method_name, self.collectionBuilder.builder_level)) + elif method_name == "plex_watchlist": + if method_data not in plex.watchlist_sorts: + self.logger.warning(f"{self.collectionBuilder.Type} Warning: Watchlist sort: {method_data} invalid defaulting to added.asc") + self.collectionBuilder.builders.append((method_name, method_data if method_data in plex.watchlist_sorts else "added.asc")) + elif method_name in ["plex_search", "plex_collectionless"]: + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + if method_name == "plex_search": + try: + self.collectionBuilder.builders.append((method_name, self.collectionBuilder.build_filter("plex_search", dict_data))) + except FilterFailed as e: + if self.collectionBuilder.ignore_blank_results: + raise + else: + raise Failed(str(e)) + elif method_name == "plex_collectionless": + prefix_list = util.parse(self.collectionBuilder.Type, "exclude_prefix", dict_data, datatype="list", methods=dict_methods) if "exclude_prefix" in dict_methods else [] + exact_list = util.parse(self.collectionBuilder.Type, "exclude", dict_data, datatype="list", methods=dict_methods) if "exclude" in dict_methods else [] + if len(prefix_list) == 0 and len(exact_list) == 0: + raise Failed(f"{self.collectionBuilder.Type} Error: you must have at least one exclusion") + exact_list.append(self.collectionBuilder.name) + self.collectionBuilder.builders.append((method_name, {"exclude_prefix": prefix_list, "exclude": exact_list})) + else: + try: + self.collectionBuilder.builders.append(("plex_search", self.collectionBuilder.build_filter("plex_search", {"any": {method_name: method_data}}))) + except FilterFailed as e: + if self.collectionBuilder.ignore_blank_results: + raise + else: + raise Failed(str(e)) + + def _reciperr(self, method_name, method_data): + if method_name == "reciperr_list": + for reciperr_list in self.collectionBuilder.config.Reciperr.validate_list(method_data): + self.collectionBuilder.builders.append((method_name, reciperr_list)) + elif method_name == "stevenlu_popular": + self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, "bool"))) + + def _mdblist(self, method_name, method_data): + for mdb_dict in self.collectionBuilder.config.MDBList.validate_mdblist_lists(self.collectionBuilder.Type, method_data): + self.collectionBuilder.builders.append((method_name, mdb_dict)) + + def _tautulli(self, method_name, method_data): + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + final_dict = { + "list_type": "popular" if method_name == "tautulli_popular" else "watched", + "list_days": util.parse(self.collectionBuilder.Type, "list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), + "list_size": util.parse(self.collectionBuilder.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), + "list_minimum": util.parse(self.collectionBuilder.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) + } + buff = final_dict["list_size"] * 3 + if self.collectionBuilder.library.Tautulli.has_section: + buff = 0 + elif "list_buffer" in dict_methods: + buff = util.parse(self.collectionBuilder.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name) + final_dict["list_buffer"] = buff + self.collectionBuilder.builders.append((method_name, final_dict)) + + def _tmdb(self, method_name, method_data): + if method_name == "tmdb_discover": + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + new_dictionary = {"limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)} + for discover_method, discover_data in dict_data.items(): + lower_method = str(discover_method).lower() + discover_attr, modifier = os.path.splitext(lower_method) + if discover_data is None: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute is blank") + elif discover_method.lower() not in tmdb.discover_all: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute not supported") + elif self.collectionBuilder.library.is_movie and discover_attr in tmdb.discover_tv_only: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute only works for show libraries") + elif self.collectionBuilder.library.is_show and discover_attr in tmdb.discover_movie_only: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute only works for movie libraries") + elif discover_attr == "region": + new_dictionary[discover_attr] = util.parse(self.collectionBuilder.Type, discover_method, discover_data.upper(), parent=method_name, regex=("^[A-Z]{2}$", "US")) + elif discover_attr == "sort_by": + options = tmdb.discover_movie_sort if self.collectionBuilder.library.is_movie else tmdb.discover_tv_sort + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, parent=method_name, options=options) + elif discover_attr == "certification_country": + if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data: + new_dictionary[lower_method] = discover_data + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") + elif discover_attr == "certification": + if "certification_country" in dict_data: + new_dictionary[lower_method] = discover_data + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country") + elif discover_attr == "watch_region": + if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data: + new_dictionary[lower_method] = discover_data.upper() + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") + elif discover_attr == "with_watch_monetization_types": + if "watch_region" in dict_data: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, parent=method_name, options=tmdb.discover_monetization_types) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region") + elif discover_attr in tmdb.discover_booleans: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="bool", parent=method_name) + elif discover_attr == "vote_average": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="float", parent=method_name) + elif discover_attr == "with_status": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) + elif discover_attr == "with_type": + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) + elif discover_attr in tmdb.discover_dates: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") + elif discover_attr in tmdb.discover_years: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.collectionBuilder.current_year + 1) + elif discover_attr in tmdb.discover_ints: + new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name) + elif discover_attr in tmdb.discover_strings: + new_dictionary[lower_method] = discover_data + elif discover_attr != "limit": + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute not supported") + if len(new_dictionary) > 1: + self.collectionBuilder.builders.append((method_name, new_dictionary)) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} had no valid fields") + elif method_name in tmdb.int_builders: + self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10))) + else: + values = self.collectionBuilder.config.TMDb.validate_tmdb_ids(method_data, method_name) + if method_name in tmdb.details_builders: + if method_name.startswith(("tmdb_collection", "tmdb_movie", "tmdb_show")): + item = self.collectionBuilder.config.TMDb.get_movie_show_or_collection(values[0], self.collectionBuilder.library.is_movie) + if item.overview: + self.collectionBuilder.summaries[method_name] = item.overview + if item.backdrop_url: + self.collectionBuilder.backgrounds[method_name] = item.backdrop_url + if item.poster_url: + self.collectionBuilder.posters[method_name] = item.poster_url + elif method_name.startswith(("tmdb_actor", "tmdb_crew", "tmdb_director", "tmdb_producer", "tmdb_writer")): + item = self.collectionBuilder.config.TMDb.get_person(values[0]) + if item.biography: + self.collectionBuilder.summaries[method_name] = item.biography + if item.profile_path: + self.collectionBuilder.posters[method_name] = item.profile_url + elif method_name.startswith("tmdb_list"): + item = self.collectionBuilder.config.TMDb.get_list(values[0]) + if item.description: + self.collectionBuilder.summaries[method_name] = item.description + if item.poster_url: + self.collectionBuilder.posters[method_name] = item.poster_url + for value in values: + self.collectionBuilder.builders.append((method_name[:-8] if method_name in tmdb.details_builders else method_name, value)) + + def _trakt(self, method_name, method_data): + if method_name.startswith("trakt_list"): + trakt_lists = self.collectionBuilder.config.Trakt.validate_list(method_data) + for trakt_list in trakt_lists: + self.collectionBuilder.builders.append(("trakt_list", trakt_list)) + if method_name.endswith("_details"): + try: + self.collectionBuilder.summaries[method_name] = self.collectionBuilder.config.Trakt.list_description(trakt_lists[0]) + except Failed as e: + self.logger.error(f"Trakt Error: List description not found: {e}") + elif method_name == "trakt_boxoffice": + if util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=False): + self.collectionBuilder.builders.append((method_name, 10)) + else: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} must be set to true") + elif method_name == "trakt_recommendations": + self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10, maximum=100))) + elif method_name == "sync_to_trakt_list": + if method_data not in self.collectionBuilder.config.Trakt.slugs: + raise Failed(f"{self.collectionBuilder.Type} Error: {method_data} invalid. Options {', '.join(self.collectionBuilder.config.Trakt.slugs)}") + self.collectionBuilder.sync_to_trakt_list = method_data + elif method_name == "sync_missing_to_trakt_list": + self.collectionBuilder.sync_missing_to_trakt_list = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=False) + elif method_name in trakt.builders: + if method_name in ["trakt_chart", "trakt_userlist"]: + trakt_dicts = method_data + final_method = method_name + elif method_name in ["trakt_watchlist", "trakt_collection"]: + trakt_dicts = [] + for trakt_user in util.get_list(method_data, split=False): + trakt_dicts.append({"userlist": method_name[6:], "user": trakt_user}) + final_method = "trakt_userlist" + else: + terms = method_name.split("_") + trakt_dicts = { + "chart": terms[1], + "limit": util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10), + "time_period": terms[2] if len(terms) > 2 else None + } + final_method = "trakt_chart" + if method_name != final_method: + self.logger.warning(f"{self.collectionBuilder.Type} Warning: {method_name} will run as {final_method}") + for trakt_dict in self.collectionBuilder.config.Trakt.validate_chart(self.collectionBuilder.Type, final_method, trakt_dicts, self.collectionBuilder.library.is_movie): + self.collectionBuilder.builders.append((final_method, trakt_dict)) + + def _tvdb(self, method_name, method_data): + values = util.get_list(method_data) + if method_name.endswith("_details"): + if method_name.startswith(("tvdb_movie", "tvdb_show")): + item = self.collectionBuilder.config.TVDb.get_tvdb_obj(values[0], is_movie=method_name.startswith("tvdb_movie")) + if item.summary: + self.collectionBuilder.summaries[method_name] = item.summary + if item.background_url: + self.collectionBuilder.backgrounds[method_name] = item.background_url + if item.poster_url: + self.collectionBuilder.posters[method_name] = item.poster_url + elif method_name.startswith("tvdb_list"): + description, poster = self.collectionBuilder.config.TVDb.get_list_description(values[0]) + if description: + self.collectionBuilder.summaries[method_name] = description + if poster: + self.collectionBuilder.posters[method_name] = poster + for value in values: + self.collectionBuilder.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value)) + + def _filters(self, method_name, method_data): + for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + dict_methods = {dm.lower(): dm for dm in dict_data} + current_filters = [] + validate = True + if "validate" in dict_methods: + if dict_data[dict_methods["validate"]] is None: + raise Failed(f"{self.collectionBuilder.Type} Error: validate filter attribute is blank") + if not isinstance(dict_data[dict_methods["validate"]], bool): + raise Failed(f"{self.collectionBuilder.Type} Error: validate filter attribute must be either true or false") + validate = dict_data.pop(dict_methods["validate"]) + for filter_method, filter_data in dict_data.items(): + filter_attr, modifier, filter_final = self.collectionBuilder.library.split(filter_method) + message = None + if filter_final not in all_filters: + message = f"{self.collectionBuilder.Type} Error: {filter_final} is not a valid filter attribute" + elif self.collectionBuilder.builder_level in filters and filter_attr not in filters[self.collectionBuilder.builder_level]: + message = f"{self.collectionBuilder.Type} Error: {filter_final} is not a valid {self.collectionBuilder.builder_level} filter attribute" + elif filter_final is None: + message = f"{self.collectionBuilder.Type} Error: {filter_final} filter attribute is blank" + else: + try: + final_data = self.collectionBuilder.validate_attribute(filter_attr, modifier, f"{filter_final} filter", filter_data, validate) + except FilterFailed as e: + raise Failed(e) + if self.collectionBuilder.builder_level in ["show", "season", "artist", "album"] and filter_attr in sub_filters: + current_filters.append(("episodes" if self.collectionBuilder.builder_level in ["show", "season"] else "tracks", {filter_final: final_data, "percentage": self.collectionBuilder.default_percent})) + else: + current_filters.append((filter_final, final_data)) + if message: + if validate: + raise Failed(message) + else: + self.logger.error(message) + if current_filters: + self.collectionBuilder.filters.append(current_filters) + self.collectionBuilder.has_tmdb_filters = any([str(k).split(".")[0] in tmdb_filters for f in self.collectionBuilder.filters for k, v in f]) + self.collectionBuilder.has_imdb_filters = any([str(k).split(".")[0] in imdb_filters for f in self.collectionBuilder.filters for k, v in f]) From f85d4f3fb3248f121c6d9f01ed983e8e649cb940 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 08:22:55 -0400 Subject: [PATCH 07/24] fix: builder is false --- modules/builder/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index ab27df352..0c6eb2dce 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -851,7 +851,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): raise Failed(f"{self.Type} Error: {method_final} attribute only allowed in an overlay file") elif self.overlay and method_name not in overlay_attributes: raise Failed(f"{self.Type} Error: {method_final} attribute not allowed in an overlay file") - elif not self.attributeSetter.setAttributes(method_name, method_data, method_final, methods, method_mod): + elif self.attributeSetter.setAttributes(method_name, method_data, method_final, methods, method_mod) is False: raise Failed(f"{self.Type} Error: {method_final} attribute not supported") except Failed as e: if self.validate_builders: From f1a49bccf46cd0de42cc56dd955a53f8aac89d99 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 08:47:31 -0400 Subject: [PATCH 08/24] refactor: move _validateAttributes --- modules/builder/__init__.py | 53 +- modules/builder/_builder_attribute_setter.py | 485 ++++++++++--------- 2 files changed, 271 insertions(+), 267 deletions(-) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 0c6eb2dce..81d050752 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -32,7 +32,6 @@ def __init__(self, config, metadata, name, data, library=None, overlay=None, ext else: self.type = "collection" self.Type = self.type.capitalize() - self.attributeSetter = BuilderAttributeSetter(self, logger) logger.separator(f"{self.mapping_name} {self.Type}{f' in {self.library.name}' if self.library else ''}") logger.info("") @@ -794,7 +793,8 @@ def apply_vars(input_str, var_set, var_key, var_limit): if self.smart: self.custom_sort = None - + + attributeSetter = BuilderAttributeSetter(self, logger) for method_key, method_data in self.data.items(): if method_key.lower() in ignored_details: continue @@ -805,54 +805,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): logger.debug(f"Validating Method: {method_key}") logger.debug(f"Value: {method_data}") try: - if method_data is None and method_name in all_builders + plex.searches and method_final not in none_builders: - raise Failed(f"{self.Type} Error: {method_final} attribute is blank") - elif method_data is None and method_final not in none_details: - logger.warning(f"Collection Warning: {method_final} attribute is blank") - elif self.playlist and method_name not in playlist_attributes: - raise Failed(f"{self.Type} Error: {method_final} attribute not allowed when using playlists") - elif not self.config.Trakt and "trakt" in method_name: - raise Failed(f"{self.Type} Error: {method_final} requires Trakt to be configured") - elif not self.library.Radarr and "radarr" in method_name: - logger.error(f"{self.Type} Error: {method_final} requires Radarr to be configured") - elif not self.library.Sonarr and "sonarr" in method_name: - logger.error(f"{self.Type} Error: {method_final} requires Sonarr to be configured") - elif not self.library.Tautulli and "tautulli" in method_name: - raise Failed(f"{self.Type} Error: {method_final} requires Tautulli to be configured") - elif not self.config.MyAnimeList and "mal" in method_name: - raise Failed(f"{self.Type} Error: {method_final} requires MyAnimeList to be configured") - elif self.library.is_movie and method_name in show_only_builders: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for show libraries") - elif self.library.is_show and method_name in movie_only_builders: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for movie libraries") - elif self.library.is_show and method_name in plex.movie_only_searches: - raise Failed(f"{self.Type} Error: {method_final} plex search only allowed for movie libraries") - elif self.library.is_movie and method_name in plex.show_only_searches: - raise Failed(f"{self.Type} Error: {method_final} plex search only allowed for show libraries") - elif self.library.is_music and method_name not in music_attributes: - raise Failed(f"{self.Type} Error: {method_final} attribute not allowed for music libraries") - elif self.library.is_music and method_name in album_details and self.builder_level != "album": - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for album collections") - elif not self.library.is_music and method_name in music_only_builders: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for music libraries") - elif not self.playlist and self.builder_level != "episode" and method_name in episode_parts_only: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with Collection Level: episode") - elif self.parts_collection and method_name not in parts_collection_valid: - raise Failed(f"{self.Type} Error: {method_final} attribute not allowed with Collection Level: {self.builder_level.capitalize()}") - elif self.smart and method_name in smart_invalid: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with normal collections") - elif not self.smart and method_name in smart_only: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with smart collections") - elif self.collectionless and method_name not in collectionless_details: - raise Failed(f"{self.Type} Error: {method_final} attribute not allowed for Collectionless collection") - elif self.smart_url and method_name in all_builders + smart_url_invalid: - raise Failed(f"{self.Type} Error: {method_final} builder not allowed when using smart_filter") - elif not self.overlay and method_name in overlay_only: - raise Failed(f"{self.Type} Error: {method_final} attribute only allowed in an overlay file") - elif self.overlay and method_name not in overlay_attributes: - raise Failed(f"{self.Type} Error: {method_final} attribute not allowed in an overlay file") - elif self.attributeSetter.setAttributes(method_name, method_data, method_final, methods, method_mod) is False: - raise Failed(f"{self.Type} Error: {method_final} attribute not supported") + attributeSetter.setAttributes(method_name, method_data, method_final, methods, method_mod) except Failed as e: if self.validate_builders: raise diff --git a/modules/builder/_builder_attribute_setter.py b/modules/builder/_builder_attribute_setter.py index 5350cc6d4..f143ac8a7 100644 --- a/modules/builder/_builder_attribute_setter.py +++ b/modules/builder/_builder_attribute_setter.py @@ -10,8 +10,11 @@ class BuilderAttributeSetter: def __init__(self, collectionBuilder, logger): self.collectionBuilder = collectionBuilder self.logger = logger + self.Type = collectionBuilder.Type def setAttributes(self, method_name, method_data, method_final, methods, method_mod): + self._validateAttributes(method_name, method_data, method_final) + if method_name in summary_details: self._summary(method_name, method_data) elif method_name in poster_details: @@ -55,7 +58,55 @@ def setAttributes(self, method_name, method_data, method_final, methods, method_ elif method_name in mdblist.builders: self._mdblist(method_name, method_data) else: - return False + raise Failed(f"{self.Type} Error: {method_final} attribute not supported") + + def _validateAttributes(self, method_name, method_data, method_final): + if method_data is None and method_name in all_builders + plex.searches and method_final not in none_builders: + raise Failed(f"{self.Type} Error: {method_final} attribute is blank") + elif method_data is None and method_final not in none_details: + self.logger.warning(f"Collection Warning: {method_final} attribute is blank") + elif self.collectionBuilder.playlist and method_name not in playlist_attributes: + raise Failed(f"{self.Type} Error: {method_final} attribute not allowed when using playlists") + elif not self.collectionBuilder.config.Trakt and "trakt" in method_name: + raise Failed(f"{self.Type} Error: {method_final} requires Trakt to be configured") + elif not self.collectionBuilder.library.Radarr and "radarr" in method_name: + self.logger.error(f"{self.Type} Error: {method_final} requires Radarr to be configured") + elif not self.collectionBuilder.library.Sonarr and "sonarr" in method_name: + self.logger.error(f"{self.Type} Error: {method_final} requires Sonarr to be configured") + elif not self.collectionBuilder.library.Tautulli and "tautulli" in method_name: + raise Failed(f"{self.Type} Error: {method_final} requires Tautulli to be configured") + elif not self.collectionBuilder.config.MyAnimeList and "mal" in method_name: + raise Failed(f"{self.Type} Error: {method_final} requires MyAnimeList to be configured") + elif self.collectionBuilder.library.is_movie and method_name in show_only_builders: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for show libraries") + elif self.collectionBuilder.library.is_show and method_name in movie_only_builders: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for movie libraries") + elif self.collectionBuilder.library.is_show and method_name in plex.movie_only_searches: + raise Failed(f"{self.Type} Error: {method_final} plex search only allowed for movie libraries") + elif self.collectionBuilder.library.is_movie and method_name in plex.show_only_searches: + raise Failed(f"{self.Type} Error: {method_final} plex search only allowed for show libraries") + elif self.collectionBuilder.library.is_music and method_name not in music_attributes: + raise Failed(f"{self.Type} Error: {method_final} attribute not allowed for music libraries") + elif self.collectionBuilder.library.is_music and method_name in album_details and self.collectionBuilder.builder_level != "album": + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for album collections") + elif not self.collectionBuilder.library.is_music and method_name in music_only_builders: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for music libraries") + elif not self.collectionBuilder.playlist and self.collectionBuilder.builder_level != "episode" and method_name in episode_parts_only: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with Collection Level: episode") + elif self.collectionBuilder.parts_collection and method_name not in parts_collection_valid: + raise Failed(f"{self.Type} Error: {method_final} attribute not allowed with Collection Level: {self.collectionBuilder.builder_level.capitalize()}") + elif self.collectionBuilder.smart and method_name in smart_invalid: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with normal collections") + elif not self.collectionBuilder.smart and method_name in smart_only: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with smart collections") + elif self.collectionBuilder.collectionless and method_name not in collectionless_details: + raise Failed(f"{self.Type} Error: {method_final} attribute not allowed for Collectionless collection") + elif self.collectionBuilder.smart_url and method_name in all_builders + smart_url_invalid: + raise Failed(f"{self.Type} Error: {method_final} builder not allowed when using smart_filter") + elif not self.collectionBuilder.overlay and method_name in overlay_only: + raise Failed(f"{self.Type} Error: {method_final} attribute only allowed in an overlay file") + elif self.collectionBuilder.overlay and method_name not in overlay_attributes: + raise Failed(f"{self.Type} Error: {method_final} attribute not allowed in an overlay file") def _summary(self, method_name, method_data): if method_name == "summary": @@ -91,7 +142,7 @@ def _poster(self, method_name, method_data): raise ConnectionError self.collectionBuilder.posters[method_name] = method_data except ConnectionError: - self.logger.warning(f"{self.collectionBuilder.Type} Warning: No Poster Found at {method_data}") + self.logger.warning(f"{self.Type} Warning: No Poster Found at {method_data}") elif method_name == "tmdb_list_poster": self.collectionBuilder.posters[method_name] = self.collectionBuilder.config.TMDb.get_list(util.regex_first_int(method_data, "TMDb List ID")).poster_url elif method_name == "tvdb_list_poster": @@ -108,7 +159,7 @@ def _poster(self, method_name, method_data): if os.path.exists(os.path.abspath(method_data)): self.collectionBuilder.posters[method_name] = os.path.abspath(method_data) else: - self.logger.error(f"{self.collectionBuilder.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") + self.logger.error(f"{self.Type} Error: Poster Path Does Not Exist: {os.path.abspath(method_data)}") def _background(self, method_name, method_data): if method_name == "url_background": @@ -118,7 +169,7 @@ def _background(self, method_name, method_data): raise ConnectionError self.collectionBuilder.backgrounds[method_name] = method_data except ConnectionError: - self.logger.warning(f"{self.collectionBuilder.Type} Warning: No Background Found at {method_data}") + self.logger.warning(f"{self.Type} Warning: No Background Found at {method_data}") elif method_name == "tmdb_background": self.collectionBuilder.backgrounds[method_name] = self.collectionBuilder.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.collectionBuilder.library.is_movie).backdrop_url elif method_name == "tvdb_background": @@ -127,7 +178,7 @@ def _background(self, method_name, method_data): if os.path.exists(os.path.abspath(method_data)): self.collectionBuilder.backgrounds[method_name] = os.path.abspath(method_data) else: - self.logger.error(f"{self.collectionBuilder.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") + self.logger.error(f"{self.Type} Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") def _details(self, method_name, method_data, method_final, methods): if method_name == "url_theme": @@ -136,9 +187,9 @@ def _details(self, method_name, method_data, method_final, methods): if os.path.exists(os.path.abspath(method_data)): self.collectionBuilder.file_theme = os.path.abspath(method_data) else: - self.logger.error(f"{self.collectionBuilder.Type} Error: Theme Path Does Not Exist: {os.path.abspath(method_data)}") + self.logger.error(f"{self.Type} Error: Theme Path Does Not Exist: {os.path.abspath(method_data)}") elif method_name == "tmdb_region": - self.collectionBuilder.tmdb_region = util.parse(self.collectionBuilder.Type, method_name, method_data, options=self.collectionBuilder.config.TMDb.iso_3166_1) + self.collectionBuilder.tmdb_region = util.parse(self.Type, method_name, method_data, options=self.collectionBuilder.config.TMDb.iso_3166_1) elif method_name == "collection_mode": try: self.collectionBuilder.details[method_name] = util.check_collection_mode(method_data) @@ -150,28 +201,28 @@ def _details(self, method_name, method_data, method_final, methods): else: self.logger.error(f"Config Error: {method_data} collection_filtering invalid\n\tadmin (Always the server admin user)\n\tuser (User currently viewing the content)") elif method_name == "minimum_items": - self.collectionBuilder.minimum = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", minimum=1) + self.collectionBuilder.minimum = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1) elif method_name == "cache_builders": - self.collectionBuilder.details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", minimum=0) + self.collectionBuilder.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", minimum=0) elif method_name == "default_percent": - self.collectionBuilder.default_percent = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", minimum=1, maximum=100) + self.collectionBuilder.default_percent = util.parse(self.Type, method_name, method_data, datatype="int", minimum=1, maximum=100) elif method_name == "server_preroll": - self.collectionBuilder.server_preroll = util.parse(self.collectionBuilder.Type, method_name, method_data) + self.collectionBuilder.server_preroll = util.parse(self.Type, method_name, method_data) elif method_name == "ignore_ids": - self.collectionBuilder.ignore_ids.extend(util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="intlist")) + self.collectionBuilder.ignore_ids.extend(util.parse(self.Type, method_name, method_data, datatype="intlist")) elif method_name == "ignore_imdb_ids": - self.collectionBuilder.ignore_imdb_ids.extend(util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="list")) + self.collectionBuilder.ignore_imdb_ids.extend(util.parse(self.Type, method_name, method_data, datatype="list")) elif method_name == "label": if "label" in methods and "label.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use label and label.sync together") + raise Failed(f"{self.Type} Error: Cannot use label and label.sync together") if "label.remove" in methods and "label.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use label.remove and label.sync together") + raise Failed(f"{self.Type} Error: Cannot use label.remove and label.sync together") if method_final == "label" and "label_sync_mode" in methods and self.collectionBuilder.data[methods["label_sync_mode"]] == "sync": self.collectionBuilder.details["label.sync"] = util.get_list(method_data) if method_data else [] else: self.collectionBuilder.details[method_final] = util.get_list(method_data) if method_data else [] elif method_name == "changes_webhooks": - self.collectionBuilder.details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="list") if method_data else None + self.collectionBuilder.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="list") if method_data else None elif method_name in scheduled_boolean: if isinstance(method_data, bool): self.collectionBuilder.details[method_name] = method_data @@ -183,48 +234,48 @@ def _details(self, method_name, method_data, method_final, methods): self.collectionBuilder.details[method_name] = False else: try: - util.schedule_check(method_name, util.parse(self.collectionBuilder.Type, method_name, method_data), self.collectionBuilder.current_time, self.collectionBuilder.config.run_hour) + util.schedule_check(method_name, util.parse(self.Type, method_name, method_data), self.collectionBuilder.current_time, self.collectionBuilder.config.run_hour) self.collectionBuilder.details[method_name] = True except NotScheduled: self.collectionBuilder.details[method_name] = False elif method_name in boolean_details: default = self.collectionBuilder.details[method_name] if method_name in self.collectionBuilder.details else None - self.collectionBuilder.details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=default) + self.collectionBuilder.details[method_name] = util.parse(self.Type, method_name, method_data, datatype="bool", default=default) elif method_name in string_details: self.collectionBuilder.details[method_name] = str(method_data) def _item_details(self, method_name, method_data, method_mod, method_final, methods): if method_name == "item_label": if "item_label" in methods and "item_label.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_label and item_label.sync together") + raise Failed(f"{self.Type} Error: Cannot use item_label and item_label.sync together") if "item_label.remove" in methods and "item_label.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_label.remove and item_label.sync together") + raise Failed(f"{self.Type} Error: Cannot use item_label.remove and item_label.sync together") self.collectionBuilder.item_details[method_final] = util.get_list(method_data) if method_data else [] if method_name == "item_genre": if "item_genre" in methods and "item_genre.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_genre and item_genre.sync together") + raise Failed(f"{self.Type} Error: Cannot use item_genre and item_genre.sync together") if "item_genre.remove" in methods and "item_genre.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use item_genre.remove and item_genre.sync together") + raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together") self.collectionBuilder.item_details[method_final] = util.get_list(method_data) if method_data else [] elif method_name == "item_edition": self.collectionBuilder.item_details[method_final] = str(method_data) if method_data else "" # noqa elif method_name == "non_item_remove_label": if not method_data: - raise Failed(f"{self.collectionBuilder.Type} Error: non_item_remove_label is blank") + raise Failed(f"{self.Type} Error: non_item_remove_label is blank") self.collectionBuilder.item_details[method_final] = util.get_list(method_data) elif method_name in ["item_radarr_tag", "item_sonarr_tag"]: if method_name in methods and f"{method_name}.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use {method_name} and {method_name}.sync together") + raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.sync together") if f"{method_name}.remove" in methods and f"{method_name}.sync" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use {method_name}.remove and {method_name}.sync together") + raise Failed(f"{self.Type} Error: Cannot use {method_name}.remove and {method_name}.sync together") if method_name in methods and f"{method_name}.remove" in methods: - raise Failed(f"{self.collectionBuilder.Type} Error: Cannot use {method_name} and {method_name}.remove together") + raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.remove together") self.collectionBuilder.item_details[method_name] = util.get_list(method_data, lower=True) self.collectionBuilder.item_details["apply_tags"] = method_mod[1:] if method_mod else "" elif method_name == "item_refresh_delay": - self.collectionBuilder.item_details[method_name] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=0, minimum=0) + self.collectionBuilder.item_details[method_name] = util.parse(self.Type, method_name, method_data, datatype="int", default=0, minimum=0) elif method_name in item_bool_details: - if util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=False): + if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): self.collectionBuilder.item_details[method_name] = True elif method_name in item_false_details: self.collectionBuilder.item_details[method_name] = False @@ -241,14 +292,14 @@ def _item_details(self, method_name, method_data, method_mod, method_final, meth def _radarr(self, method_name, method_data): if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_monitor_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: - self.collectionBuilder.radarr_details[method_name[7:]] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool") + self.collectionBuilder.radarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") elif method_name == "radarr_folder": self.collectionBuilder.radarr_details["folder"] = method_data elif method_name == "radarr_availability": if str(method_data).lower() in radarr.availability_translation: self.collectionBuilder.radarr_details["availability"] = str(method_data).lower() else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute must be either announced, cinemas, released or db") + raise Failed(f"{self.Type} Error: {method_name} attribute must be either announced, cinemas, released or db") elif method_name == "radarr_quality": self.collectionBuilder.radarr_details["quality"] = method_data elif method_name == "radarr_tag": @@ -260,19 +311,19 @@ def _radarr(self, method_name, method_data): def _sonarr(self, method_name, method_data): if method_name in ["sonarr_add_missing", "sonarr_add_existing", "sonarr_upgrade_existing", "sonarr_monitor_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_ignore_cache"]: - self.collectionBuilder.sonarr_details[method_name[7:]] = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool") + self.collectionBuilder.sonarr_details[method_name[7:]] = util.parse(self.Type, method_name, method_data, datatype="bool") elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]: self.collectionBuilder.sonarr_details[method_name[7:]] = method_data elif method_name == "sonarr_monitor": if str(method_data).lower() in sonarr.monitor_translation: self.collectionBuilder.sonarr_details["monitor"] = str(method_data).lower() else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none") + raise Failed(f"{self.Type} Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none") elif method_name == "sonarr_series": if str(method_data).lower() in sonarr.series_types: self.collectionBuilder.sonarr_details["series"] = str(method_data).lower() else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute must be either standard, daily, or anime") + raise Failed(f"{self.Type} Error: {method_name} attribute must be either standard, daily, or anime") elif method_name == "sonarr_tag": self.collectionBuilder.sonarr_details["tag"] = util.get_list(method_data, lower=True) elif method_name == "sonarr_taglist": @@ -282,21 +333,21 @@ def _sonarr(self, method_name, method_data): def _anidb(self, method_name, method_data): if method_name == "anidb_popular": - self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=30, maximum=30))) + self.collectionBuilder.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=30, maximum=30))) elif method_name in ["anidb_id", "anidb_relation"]: for anidb_id in self.collectionBuilder.config.AniDB.validate_anidb_ids(method_data): self.collectionBuilder.builders.append((method_name, anidb_id)) elif method_name == "anidb_tag": - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} new_dictionary = {} if "tag" not in dict_methods: - raise Failed(f"{self.collectionBuilder.Type} Error: anidb_tag tag attribute is required") + raise Failed(f"{self.Type} Error: anidb_tag tag attribute is required") elif not dict_data[dict_methods["tag"]]: - raise Failed(f"{self.collectionBuilder.Type} Error: anidb_tag tag attribute is blank") + raise Failed(f"{self.Type} Error: anidb_tag tag attribute is blank") else: new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["tag"]], "AniDB Tag ID") - new_dictionary["limit"] = util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) + new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) self.collectionBuilder.builders.append((method_name, new_dictionary)) def _anilist(self, method_name, method_data): @@ -304,24 +355,24 @@ def _anilist(self, method_name, method_data): for anilist_id in self.collectionBuilder.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): self.collectionBuilder.builders.append((method_name, anilist_id)) elif method_name in ["anilist_popular", "anilist_trending", "anilist_top_rated"]: - self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10))) + self.collectionBuilder.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) elif method_name == "anilist_userlist": - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} new_dictionary = { - "username": util.parse(self.collectionBuilder.Type, "username", dict_data, methods=dict_methods, parent=method_name), - "list_name": util.parse(self.collectionBuilder.Type, "list_name", dict_data, methods=dict_methods, parent=method_name), - "sort_by": util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.userlist_sort_options), + "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), + "list_name": util.parse(self.Type, "list_name", dict_data, methods=dict_methods, parent=method_name), + "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.userlist_sort_options), } score_dict = {} for search_method, search_data in dict_data.items(): search_attr, modifier = os.path.splitext(str(search_method).lower()) if search_attr == "score" and modifier in [".gt", ".gte", ".lt", ".lte"]: - score = util.parse(self.collectionBuilder.Type, search_method, dict_data, methods=dict_methods, datatype="int", default=-1, minimum=0, maximum=10, parent=method_name) + score = util.parse(self.Type, search_method, dict_data, methods=dict_methods, datatype="int", default=-1, minimum=0, maximum=10, parent=method_name) if score > -1: score_dict[modifier] = score elif search_attr not in ["username", "list_name", "sort_by"]: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") new_dictionary["score"] = score_dict self.collectionBuilder.builders.append((method_name, self.collectionBuilder.config.AniList.validate_userlist(new_dictionary))) elif method_name == "anilist_search": @@ -330,47 +381,47 @@ def _anilist(self, method_name, method_data): elif self.collectionBuilder.current_time.month in [6, 7, 8]: current_season = "summer" else: current_season = "fall" default_year = self.collectionBuilder.current_year + 1 if self.collectionBuilder.current_time.month == 12 else self.collectionBuilder.current_year - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} new_dictionary = {} for search_method, search_data in dict_data.items(): lower_method = str(search_method).lower() search_attr, modifier = os.path.splitext(lower_method) if lower_method not in anilist.searches: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") elif search_attr == "season": - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, parent=method_name, default=current_season, options=util.seasons) + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, parent=method_name, default=current_season, options=util.seasons) if new_dictionary[search_attr] == "current": new_dictionary[search_attr] = current_season if "year" not in dict_methods: self.logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {default_year} by default") new_dictionary["year"] = default_year elif search_attr == "year": - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1) + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1) elif search_data is None: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute is blank") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") elif search_attr == "adult": - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="bool", parent=method_name) + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="bool", parent=method_name) elif search_attr == "country": - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, options=anilist.country_codes, parent=method_name) + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.country_codes, parent=method_name) elif search_attr == "source": - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, options=anilist.media_source, parent=method_name) + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, options=anilist.media_source, parent=method_name) elif search_attr in ["episodes", "duration", "score", "popularity"]: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="int", parent=method_name) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name) elif search_attr in ["format", "status", "genre", "tag", "tag_category"]: - new_dictionary[lower_method] = self.collectionBuilder.config.AniList.validate(search_attr.replace("_", " ").title(), util.parse(self.collectionBuilder.Type, search_method, search_data)) + new_dictionary[lower_method] = self.collectionBuilder.config.AniList.validate(search_attr.replace("_", " ").title(), util.parse(self.Type, search_method, search_data)) elif search_attr in ["start", "end"]: - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") elif search_attr == "min_tag_percent": - new_dictionary[search_attr] = util.parse(self.collectionBuilder.Type, search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100) + new_dictionary[search_attr] = util.parse(self.Type, search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100) elif search_attr == "search": new_dictionary[search_attr] = str(search_data) elif lower_method not in ["sort_by", "limit"]: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") if len(new_dictionary) == 0: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} must have at least one valid search option") - new_dictionary["sort_by"] = util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options) - new_dictionary["limit"] = util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) + raise Failed(f"{self.Type} Error: {method_name} must have at least one valid search option") + new_dictionary["sort_by"] = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options) + new_dictionary["limit"] = util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) self.collectionBuilder.builders.append((method_name, new_dictionary)) def _icheckmovies(self, method_name, method_data): @@ -387,38 +438,38 @@ def _imdb(self, method_name, method_data): if str(value).startswith("tt"): self.collectionBuilder.builders.append((method_name, value)) else: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_id {value} must begin with tt") + raise Failed(f"{self.Type} Error: imdb_id {value} must begin with tt") elif method_name == "imdb_list": try: - for imdb_dict in self.collectionBuilder.config.IMDb.validate_imdb_lists(self.collectionBuilder.Type, method_data, self.collectionBuilder.language): + for imdb_dict in self.collectionBuilder.config.IMDb.validate_imdb_lists(self.Type, method_data, self.collectionBuilder.language): self.collectionBuilder.builders.append((method_name, imdb_dict)) except Failed as e: self.logger.error(e) elif method_name == "imdb_chart": for value in util.get_list(method_data): if value in imdb.movie_charts and not self.collectionBuilder.library.is_movie: - raise Failed(f"{self.collectionBuilder.Type} Error: chart: {value} does not work with show libraries") + raise Failed(f"{self.Type} Error: chart: {value} does not work with show libraries") elif value in imdb.show_charts and self.collectionBuilder.library.is_movie: - raise Failed(f"{self.collectionBuilder.Type} Error: chart: {value} does not work with movie libraries") + raise Failed(f"{self.Type} Error: chart: {value} does not work with movie libraries") elif value in imdb.movie_charts or value in imdb.show_charts: self.collectionBuilder.builders.append((method_name, value)) else: - raise Failed(f"{self.collectionBuilder.Type} Error: chart: {value} is invalid options are {[i for i in imdb.charts]}") + raise Failed(f"{self.Type} Error: chart: {value} is invalid options are {[i for i in imdb.charts]}") elif method_name == "imdb_watchlist": - for imdb_user in self.collectionBuilder.config.IMDb.validate_imdb_watchlists(self.collectionBuilder.Type, method_data, self.collectionBuilder.language): + for imdb_user in self.collectionBuilder.config.IMDb.validate_imdb_watchlists(self.Type, method_data, self.collectionBuilder.language): self.collectionBuilder.builders.append((method_name, imdb_user)) elif method_name == "imdb_award": - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} - event_id = util.parse(self.collectionBuilder.Type, "event_id", dict_data, parent=method_name, methods=dict_methods, regex=(r"(ev\d+)", "ev0000003")) + event_id = util.parse(self.Type, "event_id", dict_data, parent=method_name, methods=dict_methods, regex=(r"(ev\d+)", "ev0000003")) git_event, year_options = self.collectionBuilder.config.IMDb.get_event_years(event_id) if not year_options: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_id attribute: No event found at {imdb.base_url}/event/{event_id}") + raise Failed(f"{self.Type} Error: imdb_award event_id attribute: No event found at {imdb.base_url}/event/{event_id}") if "event_year" not in dict_methods: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_year attribute not found") + raise Failed(f"{self.Type} Error: imdb_award event_year attribute not found") og_year = dict_data[dict_methods["event_year"]] if not og_year: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_year attribute is blank") + raise Failed(f"{self.Type} Error: imdb_award event_year attribute is blank") if og_year in ["all", "latest"]: event_year = og_year elif not isinstance(og_year, list) and "-" in str(og_year) and len(str(og_year)) > 7: @@ -432,21 +483,21 @@ def _imdb(self, method_name, method_data): if check >= min_year and (max_year is None or check <= max_year): event_year.append(option) except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award event_year attribute invalid: {og_year}") + raise Failed(f"{self.Type} Error: imdb_award event_year attribute invalid: {og_year}") else: - event_year = util.parse(self.collectionBuilder.Type, "event_year", og_year, parent=method_name, datatype="strlist", options=year_options) + event_year = util.parse(self.Type, "event_year", og_year, parent=method_name, datatype="strlist", options=year_options) if (event_year == "all" or len(event_year) > 1) and not git_event: - raise Failed(f"{self.collectionBuilder.Type} Error: Only specific events work when using multiple years. Event Options: [{', '.join([k for k in self.collectionBuilder.config.IMDb.events_validation])}]") + raise Failed(f"{self.Type} Error: Only specific events work when using multiple years. Event Options: [{', '.join([k for k in self.collectionBuilder.config.IMDb.events_validation])}]") award_filters = [] if "award_filter" in dict_methods: if not dict_data[dict_methods["award_filter"]]: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award award_filter attribute is blank") - award_filters = util.parse(self.collectionBuilder.Type, "award_filter", dict_data[dict_methods["award_filter"]], datatype="lowerlist") + raise Failed(f"{self.Type} Error: imdb_award award_filter attribute is blank") + award_filters = util.parse(self.Type, "award_filter", dict_data[dict_methods["award_filter"]], datatype="lowerlist") category_filters = [] if "category_filter" in dict_methods: if not dict_data[dict_methods["category_filter"]]: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award category_filter attribute is blank") - category_filters = util.parse(self.collectionBuilder.Type, "category_filter", dict_data[dict_methods["category_filter"]], datatype="lowerlist") + raise Failed(f"{self.Type} Error: imdb_award category_filter attribute is blank") + category_filters = util.parse(self.Type, "category_filter", dict_data[dict_methods["category_filter"]], datatype="lowerlist") final_category = [] final_awards = [] if award_filters or category_filters: @@ -456,47 +507,47 @@ def _imdb(self, method_name, method_data): if award_filter in lower_award: final_awards.append(lower_award[award_filter]) else: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award award_filter attribute invalid: {award_filter} must be in in [{', '.join([v for _, v in lower_award.items()])}]") + raise Failed(f"{self.Type} Error: imdb_award award_filter attribute invalid: {award_filter} must be in in [{', '.join([v for _, v in lower_award.items()])}]") lower_category = {c.lower(): c for c in category_names if c} for category_filter in category_filters: if category_filter in lower_category: final_category.append(lower_category[category_filter]) else: - raise Failed(f"{self.collectionBuilder.Type} Error: imdb_award category_filter attribute invalid: {category_filter} must be in in [{', '.join([v for _, v in lower_category.items()])}]") + raise Failed(f"{self.Type} Error: imdb_award category_filter attribute invalid: {category_filter} must be in in [{', '.join([v for _, v in lower_category.items()])}]") self.collectionBuilder.builders.append((method_name, { "event_id": event_id, "event_year": event_year, "award_filter": final_awards if final_awards else None, "category_filter": final_category if final_category else None, - "winning": util.parse(self.collectionBuilder.Type, "winning", dict_data, parent=method_name, methods=dict_methods, datatype="bool", default=False) + "winning": util.parse(self.Type, "winning", dict_data, parent=method_name, methods=dict_methods, datatype="bool", default=False) })) elif method_name == "imdb_search": - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {"limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, minimum=0, default=100, parent=method_name)} + new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, minimum=0, default=100, parent=method_name)} for search_method, search_data in dict_data.items(): lower_method = str(search_method).lower() search_attr, modifier = os.path.splitext(lower_method) if search_data is None: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute is blank") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank") elif lower_method not in imdb.imdb_search_attributes: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") elif search_attr == "sort_by": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, parent=method_name, options=imdb.sort_options) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name, options=imdb.sort_options) elif search_attr == "title": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, parent=method_name) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, parent=method_name) elif search_attr == "type": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.title_type_options) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.title_type_options) elif search_attr == "topic": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.topic_options) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.topic_options) elif search_attr == "release": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="date", parent=method_name, date_return="%Y-%m-%d") + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="date", parent=method_name, date_return="%Y-%m-%d") elif search_attr == "rating": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="float", parent=method_name, minimum=0.1, maximum=10) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="float", parent=method_name, minimum=0.1, maximum=10) elif search_attr in ["votes", "imdb_top", "imdb_bottom", "popularity", "runtime"]: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="int", parent=method_name, minimum=0) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="int", parent=method_name, minimum=0) elif search_attr == "genre": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.genre_options) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name, options=imdb.genre_options) elif search_attr == "event": events = [] - for event in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + for event in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): if event in imdb.event_options: events.append(event) else: @@ -509,7 +560,7 @@ def _imdb(self, method_name, method_data): new_dictionary[lower_method] = events elif search_attr == "company": companies = [] - for company in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + for company in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): if company in imdb.company_options: companies.append(company) else: @@ -542,7 +593,7 @@ def _imdb(self, method_name, method_data): new_dictionary[lower_method] = final_list elif search_attr == "country": countries = [] - for country in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="upperlist", parent=method_name): + for country in util.parse(self.Type, search_method, search_data, datatype="upperlist", parent=method_name): if country: if len(str(country)) != 2: raise Failed(f"{method_name} {search_method} attribute: {country} must be only 2 characters i.e. 'US'") @@ -550,10 +601,10 @@ def _imdb(self, method_name, method_data): if countries: new_dictionary[lower_method] = countries elif search_attr in ["keyword", "language", "alternate_version", "crazy_credit", "location", "goof", "plot", "quote", "soundtrack", "trivia"]: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name) + new_dictionary[lower_method] = util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name) elif search_attr == "cast": casts = [] - for cast in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + for cast in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): res = re.search(r'(nm\d+)', cast) if res: casts.append(res.group(1)) @@ -563,7 +614,7 @@ def _imdb(self, method_name, method_data): new_dictionary[lower_method] = casts elif search_attr == "series": series = [] - for show in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + for show in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): res = re.search(r'(tt\d+)', show) if res: series.append(res.group(1)) @@ -573,7 +624,7 @@ def _imdb(self, method_name, method_data): new_dictionary[lower_method] = series elif search_attr == "list": lists = [] - for new_list in util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="lowerlist", parent=method_name): + for new_list in util.parse(self.Type, search_method, search_data, datatype="lowerlist", parent=method_name): res = re.search(r'(ls\d+)', new_list) if res: lists.append(res.group(1)) @@ -582,18 +633,18 @@ def _imdb(self, method_name, method_data): if lists: new_dictionary[lower_method] = lists elif search_attr == "adult": - if util.parse(self.collectionBuilder.Type, search_method, search_data, datatype="bool", parent=method_name): + if util.parse(self.Type, search_method, search_data, datatype="bool", parent=method_name): new_dictionary[lower_method] = True elif search_attr != "limit": - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {search_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported") if len(new_dictionary) > 1: self.collectionBuilder.builders.append((method_name, new_dictionary)) else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} had no valid fields") + raise Failed(f"{self.Type} Error: {method_name} had no valid fields") def _letterboxd(self, method_name, method_data): if method_name.startswith("letterboxd_list"): - letterboxd_lists = self.collectionBuilder.config.Letterboxd.validate_letterboxd_lists(self.collectionBuilder.Type, method_data, self.collectionBuilder.language) + letterboxd_lists = self.collectionBuilder.config.Letterboxd.validate_letterboxd_lists(self.Type, method_data, self.collectionBuilder.language) for letterboxd_list in letterboxd_lists: self.collectionBuilder.builders.append(("letterboxd_list", letterboxd_list)) if method_name.endswith("_details"): @@ -604,168 +655,168 @@ def _mal(self, method_name, method_data): for mal_id in util.get_int_list(method_data, "MyAnimeList ID"): self.collectionBuilder.builders.append((method_name, mal_id)) elif method_name in ["mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special", "mal_popular", "mal_favorite", "mal_suggested"]: - self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10, maximum=100 if method_name == "mal_suggested" else 500))) + self.collectionBuilder.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100 if method_name == "mal_suggested" else 500))) elif method_name in ["mal_season", "mal_userlist", "mal_search"]: - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} if method_name == "mal_season": if self.collectionBuilder.current_time.month in [1, 2, 3]: default_season = "winter" elif self.collectionBuilder.current_time.month in [4, 5, 6]: default_season = "spring" elif self.collectionBuilder.current_time.month in [7, 8, 9]: default_season = "summer" else: default_season = "fall" - season = util.parse(self.collectionBuilder.Type, "season", dict_data, methods=dict_methods, parent=method_name, default=default_season, options=util.seasons) + season = util.parse(self.Type, "season", dict_data, methods=dict_methods, parent=method_name, default=default_season, options=util.seasons) if season == "current": season = default_season self.collectionBuilder.builders.append((method_name, { "season": season, - "sort_by": util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="members", options=mal.season_sort_options, translation=mal.season_sort_translation), - "year": util.parse(self.collectionBuilder.Type, "year", dict_data, datatype="int", methods=dict_methods, default=self.collectionBuilder.current_year, parent=method_name, minimum=1917, maximum=self.collectionBuilder.current_year + 1), - "limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=500), - "starting_only": util.parse(self.collectionBuilder.Type, "starting_only", dict_data, datatype="bool", methods=dict_methods, default=False, parent=method_name) + "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="members", options=mal.season_sort_options, translation=mal.season_sort_translation), + "year": util.parse(self.Type, "year", dict_data, datatype="int", methods=dict_methods, default=self.collectionBuilder.current_year, parent=method_name, minimum=1917, maximum=self.collectionBuilder.current_year + 1), + "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=500), + "starting_only": util.parse(self.Type, "starting_only", dict_data, datatype="bool", methods=dict_methods, default=False, parent=method_name) })) elif method_name == "mal_userlist": self.collectionBuilder.builders.append((method_name, { - "username": util.parse(self.collectionBuilder.Type, "username", dict_data, methods=dict_methods, parent=method_name), - "status": util.parse(self.collectionBuilder.Type, "status", dict_data, methods=dict_methods, parent=method_name, default="all", options=mal.userlist_status), - "sort_by": util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=mal.userlist_sort_options, translation=mal.userlist_sort_translation), - "limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=1000) + "username": util.parse(self.Type, "username", dict_data, methods=dict_methods, parent=method_name), + "status": util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, default="all", options=mal.userlist_status), + "sort_by": util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=mal.userlist_sort_options, translation=mal.userlist_sort_translation), + "limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name, maximum=1000) })) elif method_name == "mal_search": final_attributes = {} final_text = "MyAnimeList Search" if "sort_by" in dict_methods: - sort = util.parse(self.collectionBuilder.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, options=mal.search_combos) + sort = util.parse(self.Type, "sort_by", dict_data, methods=dict_methods, parent=method_name, options=mal.search_combos) sort_type, sort_direction = sort.split(".") final_text += f"\nSorted By: {sort}" final_attributes["order_by"] = sort_type final_attributes["sort"] = sort_direction limit = 0 if "limit" in dict_methods: - limit = util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", default=0, methods=dict_methods, parent=method_name) + limit = util.parse(self.Type, "limit", dict_data, datatype="int", default=0, methods=dict_methods, parent=method_name) final_text += f"\nLimit: {limit if limit else 'None'}" if "query" in dict_methods: - final_attributes["q"] = util.parse(self.collectionBuilder.Type, "query", dict_data, methods=dict_methods, parent=method_name) + final_attributes["q"] = util.parse(self.Type, "query", dict_data, methods=dict_methods, parent=method_name) final_text += f"\nQuery: {final_attributes['q']}" if "prefix" in dict_methods: - final_attributes["letter"] = util.parse(self.collectionBuilder.Type, "prefix", dict_data, methods=dict_methods, parent=method_name) + final_attributes["letter"] = util.parse(self.Type, "prefix", dict_data, methods=dict_methods, parent=method_name) final_text += f"\nPrefix: {final_attributes['letter']}" if "type" in dict_methods: - type_list = util.parse(self.collectionBuilder.Type, "type", dict_data, datatype="commalist", methods=dict_methods, parent=method_name, options=mal.search_types) + type_list = util.parse(self.Type, "type", dict_data, datatype="commalist", methods=dict_methods, parent=method_name, options=mal.search_types) final_attributes["type"] = ",".join(type_list) final_text += f"\nType: {' or '.join(type_list)}" if "status" in dict_methods: - final_attributes["status"] = util.parse(self.collectionBuilder.Type, "status", dict_data, methods=dict_methods, parent=method_name, options=mal.search_status) + final_attributes["status"] = util.parse(self.Type, "status", dict_data, methods=dict_methods, parent=method_name, options=mal.search_status) final_text += f"\nStatus: {final_attributes['status']}" if "genre" in dict_methods: - genre_str = str(util.parse(self.collectionBuilder.Type, "genre", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.collectionBuilder.Type, 'Genre', genre_str, self.collectionBuilder.config.MyAnimeList.genres) + genre_str = str(util.parse(self.Type, "genre", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.collectionBuilder.config.MyAnimeList.genres) final_text += f"\nGenre: {out_text}" final_attributes["genres"] = out_ints if "genre.not" in dict_methods: - genre_str = str(util.parse(self.collectionBuilder.Type, "genre.not", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.collectionBuilder.Type, 'Genre', genre_str, self.collectionBuilder.config.MyAnimeList.genres) + genre_str = str(util.parse(self.Type, "genre.not", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.Type, 'Genre', genre_str, self.collectionBuilder.config.MyAnimeList.genres) final_text += f"\nNot Genre: {out_text}" final_attributes["genres_exclude"] = out_ints if "studio" in dict_methods: - studio_str = str(util.parse(self.collectionBuilder.Type, "studio", dict_data, methods=dict_methods, parent=method_name)) - out_text, out_ints = util.parse_and_or(self.collectionBuilder.Type, 'Studio', studio_str, self.collectionBuilder.config.MyAnimeList.studios) + studio_str = str(util.parse(self.Type, "studio", dict_data, methods=dict_methods, parent=method_name)) + out_text, out_ints = util.parse_and_or(self.Type, 'Studio', studio_str, self.collectionBuilder.config.MyAnimeList.studios) final_text += f"\nStudio: {out_text}" final_attributes["producers"] = out_ints if "content_rating" in dict_methods: - final_attributes["rating"] = util.parse(self.collectionBuilder.Type, "content_rating", dict_data, methods=dict_methods, parent=method_name, options=mal.search_ratings) + final_attributes["rating"] = util.parse(self.Type, "content_rating", dict_data, methods=dict_methods, parent=method_name, options=mal.search_ratings) final_text += f"\nContent Rating: {final_attributes['rating']}" if "score.gte" in dict_methods: - final_attributes["min_score"] = util.parse(self.collectionBuilder.Type, "score.gte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_attributes["min_score"] = util.parse(self.Type, "score.gte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) final_text += f"\nScore Greater Than or Equal: {final_attributes['min_score']}" elif "score.gt" in dict_methods: - original_score = util.parse(self.collectionBuilder.Type, "score.gt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + original_score = util.parse(self.Type, "score.gt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) final_attributes["min_score"] = original_score + 0.01 final_text += f"\nScore Greater Than: {original_score}" if "score.lte" in dict_methods: - final_attributes["max_score"] = util.parse(self.collectionBuilder.Type, "score.lte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + final_attributes["max_score"] = util.parse(self.Type, "score.lte", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) final_text += f"\nScore Less Than or Equal: {final_attributes['max_score']}" elif "score.lt" in dict_methods: - original_score = util.parse(self.collectionBuilder.Type, "score.lt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) + original_score = util.parse(self.Type, "score.lt", dict_data, datatype="float", methods=dict_methods, parent=method_name, minimum=0, maximum=10) final_attributes["max_score"] = original_score - 0.01 final_text += f"\nScore Less Than: {original_score}" if "min_score" in final_attributes and "max_score" in final_attributes and final_attributes["max_score"] <= final_attributes["min_score"]: - raise Failed(f"{self.collectionBuilder.Type} Error: mal_search score.lte/score.lt attribute must be greater than score.gte/score.gt") + raise Failed(f"{self.Type} Error: mal_search score.lte/score.lt attribute must be greater than score.gte/score.gt") if "sfw" in dict_methods: - sfw = util.parse(self.collectionBuilder.Type, "sfw", dict_data, datatype="bool", methods=dict_methods, parent=method_name) + sfw = util.parse(self.Type, "sfw", dict_data, datatype="bool", methods=dict_methods, parent=method_name) if sfw: final_attributes["sfw"] = 1 final_text += f"\nSafe for Work: {final_attributes['sfw']}" if not final_attributes: - raise Failed(f"{self.collectionBuilder.Type} Error: no mal_search attributes found") + raise Failed(f"{self.Type} Error: no mal_search attributes found") self.collectionBuilder.builders.append((method_name, (final_attributes, final_text, limit))) elif method_name in ["mal_genre", "mal_studio"]: self.logger.warning(f"Config Warning: {method_name} will run as a mal_search") - item_list = util.parse(self.collectionBuilder.Type, method_name[4:], method_data, datatype="commalist") + item_list = util.parse(self.Type, method_name[4:], method_data, datatype="commalist") all_items = self.collectionBuilder.config.MyAnimeList.genres if method_name == "mal_genre" else self.collectionBuilder.config.MyAnimeList.studios final_items = [str(all_items[i]) for i in item_list if i in all_items] final_text = f"MyAnimeList Search\n{method_name[4:].capitalize()}: {' or '.join([str(all_items[i]) for i in final_items])}" self.collectionBuilder.builders.append(("mal_search", ({"genres" if method_name == "mal_genre" else "producers": ",".join(final_items)}, final_text, 0))) def _mojo(self, method_name, method_data): - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} final = {} if method_name == "mojo_record": - final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.top_options) + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.top_options) elif method_name == "mojo_world": if "year" not in dict_methods: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute not found") + raise Failed(f"{self.Type} Error: {method_name} year attribute not found") og_year = dict_data[dict_methods["year"]] if not og_year: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute is blank") + raise Failed(f"{self.Type} Error: {method_name} year attribute is blank") if og_year == "current": final["year"] = str(self.collectionBuilder.current_year) # noqa elif str(og_year).startswith("current-"): try: final["year"] = str(self.collectionBuilder.current_year - int(og_year.split("-")[1])) # noqa if final["year"] not in mojo.year_options: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute final value must be 1977 or greater: {og_year}") + raise Failed(f"{self.Type} Error: {method_name} year attribute final value must be 1977 or greater: {og_year}") except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} year attribute invalid: {og_year}") + raise Failed(f"{self.Type} Error: {method_name} year attribute invalid: {og_year}") else: - final["year"] = util.parse(self.collectionBuilder.Type, "year", dict_data, methods=dict_methods, parent=method_name, options=mojo.year_options) + final["year"] = util.parse(self.Type, "year", dict_data, methods=dict_methods, parent=method_name, options=mojo.year_options) elif method_name == "mojo_all_time": - final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.chart_options) - final["content_rating_filter"] = util.parse(self.collectionBuilder.Type, "content_rating_filter", dict_data, methods=dict_methods, parent=method_name, options=mojo.content_rating_options) if "content_rating_filter" in dict_methods else None + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, options=mojo.chart_options) + final["content_rating_filter"] = util.parse(self.Type, "content_rating_filter", dict_data, methods=dict_methods, parent=method_name, options=mojo.content_rating_options) if "content_rating_filter" in dict_methods else None elif method_name == "mojo_never": - final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="domestic", options=self.collectionBuilder.config.BoxOfficeMojo.never_options) - final["never"] = str(util.parse(self.collectionBuilder.Type, "never", dict_data, methods=dict_methods, parent=method_name, default="1", options=mojo.never_in_options)) if "never" in dict_methods else "1" + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="domestic", options=self.collectionBuilder.config.BoxOfficeMojo.never_options) + final["never"] = str(util.parse(self.Type, "never", dict_data, methods=dict_methods, parent=method_name, default="1", options=mojo.never_in_options)) if "never" in dict_methods else "1" elif method_name in ["mojo_domestic", "mojo_international"]: dome = method_name == "mojo_domestic" - final["range"] = util.parse(self.collectionBuilder.Type, "range", dict_data, methods=dict_methods, parent=method_name, options=mojo.dome_range_options if dome else mojo.intl_range_options) + final["range"] = util.parse(self.Type, "range", dict_data, methods=dict_methods, parent=method_name, options=mojo.dome_range_options if dome else mojo.intl_range_options) if not dome: - final["chart"] = util.parse(self.collectionBuilder.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="international", options=self.collectionBuilder.config.BoxOfficeMojo.intl_options) + final["chart"] = util.parse(self.Type, "chart", dict_data, methods=dict_methods, parent=method_name, default="international", options=self.collectionBuilder.config.BoxOfficeMojo.intl_options) chart_date = self.collectionBuilder.current_time if final["range"] != "daily": _m = "range_data" if final["range"] == "yearly" and "year" not in dict_methods and "range_data" in dict_methods else "year" if _m not in dict_methods: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {_m} attribute not found") + raise Failed(f"{self.Type} Error: {method_name} {_m} attribute not found") og_year = dict_data[dict_methods[_m]] if not og_year: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {_m} attribute is blank") + raise Failed(f"{self.Type} Error: {method_name} {_m} attribute is blank") if str(og_year).startswith("current-"): try: chart_date = self.collectionBuilder.current_time - relativedelta(years=int(og_year.split("-")[1])) except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {_m} attribute invalid: {og_year}") + raise Failed(f"{self.Type} Error: {method_name} {_m} attribute invalid: {og_year}") else: - _y = util.parse(self.collectionBuilder.Type, _m, dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.year_options) + _y = util.parse(self.Type, _m, dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.year_options) if _y != "current": chart_date = self.collectionBuilder.current_time - relativedelta(years=self.collectionBuilder.current_time.year - _y) if final["range"] != "yearly": if "range_data" not in dict_methods: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute not found") + raise Failed(f"{self.Type} Error: {method_name} range_data attribute not found") og_data = dict_data[dict_methods["range_data"]] if not og_data: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute is blank") + raise Failed(f"{self.Type} Error: {method_name} range_data attribute is blank") if final["range"] == "holiday": - final["range_data"] = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, options=mojo.holiday_options) + final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, options=mojo.holiday_options) elif final["range"] == "daily": if og_data == "current": final["range_data"] = datetime.strftime(self.collectionBuilder.current_time, "%Y-%m-%d") # noqa @@ -773,9 +824,9 @@ def _mojo(self, method_name, method_data): try: final["range_data"] = datetime.strftime(self.collectionBuilder.current_time - timedelta(days=int(og_data.split("-")[1])), "%Y-%m-%d") # noqa except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") else: - final["range_data"] = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", datatype="date", date_return="%Y-%m-%d") + final["range_data"] = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", datatype="date", date_return="%Y-%m-%d") if final["range_data"] == "current": final["range_data"] = datetime.strftime(self.collectionBuilder.current_time, "%Y-%m-%d") # noqa elif final["range"] in ["weekend", "weekly"]: @@ -786,9 +837,9 @@ def _mojo(self, method_name, method_data): final["range_data"] = final_iso.week final["year"] = final_iso.year except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") else: - _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + [str(i) for i in range(1, 54)]) + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + [str(i) for i in range(1, 54)]) current_iso = chart_date.isocalendar() final["range_data"] = current_iso.week if _v == "current" else _v final["year"] = current_iso.year @@ -799,9 +850,9 @@ def _mojo(self, method_name, method_data): final["range_data"] = final_date.month final["year"] = final_date.year except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") else: - _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + util.lower_months) + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=["current"] + util.lower_months) final["range_data"] = chart_date.month if _v == "current" else util.lower_months[_v] elif final["range"] == "quarterly": if str(og_data).startswith("current-"): @@ -810,21 +861,21 @@ def _mojo(self, method_name, method_data): final["range_data"] = mojo.quarters[final_date.month] final["year"] = final_date.year except ValueError: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} range_data attribute invalid: {og_data}") + raise Failed(f"{self.Type} Error: {method_name} range_data attribute invalid: {og_data}") else: - _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.quarter_options) + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.quarter_options) final["range_data"] = mojo.quarters[chart_date.month] if _v == "current" else _v elif final["range"] == "season": - _v = util.parse(self.collectionBuilder.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.season_options) + _v = util.parse(self.Type, "range_data", dict_data, methods=dict_methods, parent=method_name, default="current", options=mojo.season_options) final["range_data"] = mojo.seasons[chart_date.month] if _v == "current" else _v else: final["range_data"] = chart_date.year if "year" not in final: final["year"] = chart_date.year if final["year"] < 1977: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} attribute final date value must be on year 1977 or greater: {final['year']}") + raise Failed(f"{self.Type} Error: {method_name} attribute final date value must be on year 1977 or greater: {final['year']}") - final["limit"] = util.parse(self.collectionBuilder.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 + final["limit"] = util.parse(self.Type, "limit", dict_data, methods=dict_methods, parent=method_name, default=0, datatype="int", maximum=1000) if "limit" in dict_methods else 0 self.collectionBuilder.builders.append((method_name, final)) def _plex(self, method_name, method_data): @@ -832,10 +883,10 @@ def _plex(self, method_name, method_data): self.collectionBuilder.builders.append((method_name, self.collectionBuilder.builder_level)) elif method_name == "plex_watchlist": if method_data not in plex.watchlist_sorts: - self.logger.warning(f"{self.collectionBuilder.Type} Warning: Watchlist sort: {method_data} invalid defaulting to added.asc") + self.logger.warning(f"{self.Type} Warning: Watchlist sort: {method_data} invalid defaulting to added.asc") self.collectionBuilder.builders.append((method_name, method_data if method_data in plex.watchlist_sorts else "added.asc")) elif method_name in ["plex_search", "plex_collectionless"]: - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} if method_name == "plex_search": try: @@ -846,10 +897,10 @@ def _plex(self, method_name, method_data): else: raise Failed(str(e)) elif method_name == "plex_collectionless": - prefix_list = util.parse(self.collectionBuilder.Type, "exclude_prefix", dict_data, datatype="list", methods=dict_methods) if "exclude_prefix" in dict_methods else [] - exact_list = util.parse(self.collectionBuilder.Type, "exclude", dict_data, datatype="list", methods=dict_methods) if "exclude" in dict_methods else [] + prefix_list = util.parse(self.Type, "exclude_prefix", dict_data, datatype="list", methods=dict_methods) if "exclude_prefix" in dict_methods else [] + exact_list = util.parse(self.Type, "exclude", dict_data, datatype="list", methods=dict_methods) if "exclude" in dict_methods else [] if len(prefix_list) == 0 and len(exact_list) == 0: - raise Failed(f"{self.collectionBuilder.Type} Error: you must have at least one exclusion") + raise Failed(f"{self.Type} Error: you must have at least one exclusion") exact_list.append(self.collectionBuilder.name) self.collectionBuilder.builders.append((method_name, {"exclude_prefix": prefix_list, "exclude": exact_list})) else: @@ -866,94 +917,94 @@ def _reciperr(self, method_name, method_data): for reciperr_list in self.collectionBuilder.config.Reciperr.validate_list(method_data): self.collectionBuilder.builders.append((method_name, reciperr_list)) elif method_name == "stevenlu_popular": - self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, "bool"))) + self.collectionBuilder.builders.append((method_name, util.parse(self.Type, method_name, method_data, "bool"))) def _mdblist(self, method_name, method_data): - for mdb_dict in self.collectionBuilder.config.MDBList.validate_mdblist_lists(self.collectionBuilder.Type, method_data): + for mdb_dict in self.collectionBuilder.config.MDBList.validate_mdblist_lists(self.Type, method_data): self.collectionBuilder.builders.append((method_name, mdb_dict)) def _tautulli(self, method_name, method_data): - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} final_dict = { "list_type": "popular" if method_name == "tautulli_popular" else "watched", - "list_days": util.parse(self.collectionBuilder.Type, "list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), - "list_size": util.parse(self.collectionBuilder.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), - "list_minimum": util.parse(self.collectionBuilder.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) + "list_days": util.parse(self.Type, "list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), + "list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), + "list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) } buff = final_dict["list_size"] * 3 if self.collectionBuilder.library.Tautulli.has_section: buff = 0 elif "list_buffer" in dict_methods: - buff = util.parse(self.collectionBuilder.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name) + buff = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name) final_dict["list_buffer"] = buff self.collectionBuilder.builders.append((method_name, final_dict)) def _tmdb(self, method_name, method_data): if method_name == "tmdb_discover": - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} - new_dictionary = {"limit": util.parse(self.collectionBuilder.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)} + new_dictionary = {"limit": util.parse(self.Type, "limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)} for discover_method, discover_data in dict_data.items(): lower_method = str(discover_method).lower() discover_attr, modifier = os.path.splitext(lower_method) if discover_data is None: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute is blank") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute is blank") elif discover_method.lower() not in tmdb.discover_all: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") elif self.collectionBuilder.library.is_movie and discover_attr in tmdb.discover_tv_only: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute only works for show libraries") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for show libraries") elif self.collectionBuilder.library.is_show and discover_attr in tmdb.discover_movie_only: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute only works for movie libraries") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for movie libraries") elif discover_attr == "region": - new_dictionary[discover_attr] = util.parse(self.collectionBuilder.Type, discover_method, discover_data.upper(), parent=method_name, regex=("^[A-Z]{2}$", "US")) + new_dictionary[discover_attr] = util.parse(self.Type, discover_method, discover_data.upper(), parent=method_name, regex=("^[A-Z]{2}$", "US")) elif discover_attr == "sort_by": options = tmdb.discover_movie_sort if self.collectionBuilder.library.is_movie else tmdb.discover_tv_sort - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, parent=method_name, options=options) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=options) elif discover_attr == "certification_country": if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data: new_dictionary[lower_method] = discover_data else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") + raise Failed(f"{self.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") elif discover_attr == "certification": if "certification_country" in dict_data: new_dictionary[lower_method] = discover_data else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country") elif discover_attr == "watch_region": if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data: new_dictionary[lower_method] = discover_data.upper() else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") elif discover_attr == "with_watch_monetization_types": if "watch_region" in dict_data: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, parent=method_name, options=tmdb.discover_monetization_types) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, parent=method_name, options=tmdb.discover_monetization_types) else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region") elif discover_attr in tmdb.discover_booleans: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="bool", parent=method_name) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="bool", parent=method_name) elif discover_attr == "vote_average": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="float", parent=method_name) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="float", parent=method_name) elif discover_attr == "with_status": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) elif discover_attr == "with_type": - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) elif discover_attr in tmdb.discover_dates: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="date", parent=method_name, date_return="%m/%d/%Y") elif discover_attr in tmdb.discover_years: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.collectionBuilder.current_year + 1) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.collectionBuilder.current_year + 1) elif discover_attr in tmdb.discover_ints: - new_dictionary[lower_method] = util.parse(self.collectionBuilder.Type, discover_method, discover_data, datatype="int", parent=method_name) + new_dictionary[lower_method] = util.parse(self.Type, discover_method, discover_data, datatype="int", parent=method_name) elif discover_attr in tmdb.discover_strings: new_dictionary[lower_method] = discover_data elif discover_attr != "limit": - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} {discover_method} attribute not supported") + raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported") if len(new_dictionary) > 1: self.collectionBuilder.builders.append((method_name, new_dictionary)) else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} had no valid fields") + raise Failed(f"{self.Type} Error: {method_name} had no valid fields") elif method_name in tmdb.int_builders: - self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10))) + self.collectionBuilder.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10))) else: values = self.collectionBuilder.config.TMDb.validate_tmdb_ids(method_data, method_name) if method_name in tmdb.details_builders: @@ -991,18 +1042,18 @@ def _trakt(self, method_name, method_data): except Failed as e: self.logger.error(f"Trakt Error: List description not found: {e}") elif method_name == "trakt_boxoffice": - if util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=False): + if util.parse(self.Type, method_name, method_data, datatype="bool", default=False): self.collectionBuilder.builders.append((method_name, 10)) else: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_name} must be set to true") + raise Failed(f"{self.Type} Error: {method_name} must be set to true") elif method_name == "trakt_recommendations": - self.collectionBuilder.builders.append((method_name, util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10, maximum=100))) + self.collectionBuilder.builders.append((method_name, util.parse(self.Type, method_name, method_data, datatype="int", default=10, maximum=100))) elif method_name == "sync_to_trakt_list": if method_data not in self.collectionBuilder.config.Trakt.slugs: - raise Failed(f"{self.collectionBuilder.Type} Error: {method_data} invalid. Options {', '.join(self.collectionBuilder.config.Trakt.slugs)}") + raise Failed(f"{self.Type} Error: {method_data} invalid. Options {', '.join(self.collectionBuilder.config.Trakt.slugs)}") self.collectionBuilder.sync_to_trakt_list = method_data elif method_name == "sync_missing_to_trakt_list": - self.collectionBuilder.sync_missing_to_trakt_list = util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="bool", default=False) + self.collectionBuilder.sync_missing_to_trakt_list = util.parse(self.Type, method_name, method_data, datatype="bool", default=False) elif method_name in trakt.builders: if method_name in ["trakt_chart", "trakt_userlist"]: trakt_dicts = method_data @@ -1016,13 +1067,13 @@ def _trakt(self, method_name, method_data): terms = method_name.split("_") trakt_dicts = { "chart": terms[1], - "limit": util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="int", default=10), + "limit": util.parse(self.Type, method_name, method_data, datatype="int", default=10), "time_period": terms[2] if len(terms) > 2 else None } final_method = "trakt_chart" if method_name != final_method: - self.logger.warning(f"{self.collectionBuilder.Type} Warning: {method_name} will run as {final_method}") - for trakt_dict in self.collectionBuilder.config.Trakt.validate_chart(self.collectionBuilder.Type, final_method, trakt_dicts, self.collectionBuilder.library.is_movie): + self.logger.warning(f"{self.Type} Warning: {method_name} will run as {final_method}") + for trakt_dict in self.collectionBuilder.config.Trakt.validate_chart(self.Type, final_method, trakt_dicts, self.collectionBuilder.library.is_movie): self.collectionBuilder.builders.append((final_method, trakt_dict)) def _tvdb(self, method_name, method_data): @@ -1046,25 +1097,25 @@ def _tvdb(self, method_name, method_data): self.collectionBuilder.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value)) def _filters(self, method_name, method_data): - for dict_data in util.parse(self.collectionBuilder.Type, method_name, method_data, datatype="listdict"): + for dict_data in util.parse(self.Type, method_name, method_data, datatype="listdict"): dict_methods = {dm.lower(): dm for dm in dict_data} current_filters = [] validate = True if "validate" in dict_methods: if dict_data[dict_methods["validate"]] is None: - raise Failed(f"{self.collectionBuilder.Type} Error: validate filter attribute is blank") + raise Failed(f"{self.Type} Error: validate filter attribute is blank") if not isinstance(dict_data[dict_methods["validate"]], bool): - raise Failed(f"{self.collectionBuilder.Type} Error: validate filter attribute must be either true or false") + raise Failed(f"{self.Type} Error: validate filter attribute must be either true or false") validate = dict_data.pop(dict_methods["validate"]) for filter_method, filter_data in dict_data.items(): filter_attr, modifier, filter_final = self.collectionBuilder.library.split(filter_method) message = None if filter_final not in all_filters: - message = f"{self.collectionBuilder.Type} Error: {filter_final} is not a valid filter attribute" + message = f"{self.Type} Error: {filter_final} is not a valid filter attribute" elif self.collectionBuilder.builder_level in filters and filter_attr not in filters[self.collectionBuilder.builder_level]: - message = f"{self.collectionBuilder.Type} Error: {filter_final} is not a valid {self.collectionBuilder.builder_level} filter attribute" + message = f"{self.Type} Error: {filter_final} is not a valid {self.collectionBuilder.builder_level} filter attribute" elif filter_final is None: - message = f"{self.collectionBuilder.Type} Error: {filter_final} filter attribute is blank" + message = f"{self.Type} Error: {filter_final} filter attribute is blank" else: try: final_data = self.collectionBuilder.validate_attribute(filter_attr, modifier, f"{filter_final} filter", filter_data, validate) From c697f80789f8e623d4e31ccc118f4a60cd08ba6d Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 08:53:47 -0400 Subject: [PATCH 09/24] refactor: move for loop to setter --- modules/builder/__init__.py | 19 ++-------------- modules/builder/_builder_attribute_setter.py | 23 +++++++++++++++++--- 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 81d050752..13fef06ad 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -794,23 +794,8 @@ def apply_vars(input_str, var_set, var_key, var_limit): if self.smart: self.custom_sort = None - attributeSetter = BuilderAttributeSetter(self, logger) - for method_key, method_data in self.data.items(): - if method_key.lower() in ignored_details: - continue - logger.debug("") - method_name, method_mod, method_final = self.library.split(method_key) - if method_name in ignored_details: - continue - logger.debug(f"Validating Method: {method_key}") - logger.debug(f"Value: {method_data}") - try: - attributeSetter.setAttributes(method_name, method_data, method_final, methods, method_mod) - except Failed as e: - if self.validate_builders: - raise - else: - logger.error(e) + attributeSetter = BuilderAttributeSetter() + attributeSetter.setAttributes(self, methods, logger) if "append_label" in methods and not self.playlist and not self.overlay: logger.debug("") diff --git a/modules/builder/_builder_attribute_setter.py b/modules/builder/_builder_attribute_setter.py index f143ac8a7..a85f90df4 100644 --- a/modules/builder/_builder_attribute_setter.py +++ b/modules/builder/_builder_attribute_setter.py @@ -7,12 +7,29 @@ from modules.builder._config import * class BuilderAttributeSetter: - def __init__(self, collectionBuilder, logger): + def setAttributes(self, collectionBuilder, methods, logger): self.collectionBuilder = collectionBuilder - self.logger = logger self.Type = collectionBuilder.Type + self.logger = logger + + for method_key, method_data in collectionBuilder.data.items(): + if method_key.lower() in ignored_details: + continue + logger.debug("") + method_name, method_mod, method_final = collectionBuilder.library.split(method_key) + if method_name in ignored_details: + continue + logger.debug(f"Validating Method: {method_key}") + logger.debug(f"Value: {method_data}") + try: + self._setAttribute(method_name, method_data, method_final, methods, method_mod) + except Failed as e: + if collectionBuilder.validate_builders: + raise + else: + logger.error(e) - def setAttributes(self, method_name, method_data, method_final, methods, method_mod): + def _setAttribute(self, method_name, method_data, method_final, methods, method_mod): self._validateAttributes(method_name, method_data, method_final) if method_name in summary_details: From 490b8ce11ff104e5d7b87730e22f1ecf6abc0108 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 09:12:47 -0400 Subject: [PATCH 10/24] refactor: validate_methods --- modules/builder/__init__.py | 331 +---------------- ...tribute_setter.py => _attribute_setter.py} | 0 modules/builder/_validate_methods.py | 333 ++++++++++++++++++ 3 files changed, 337 insertions(+), 327 deletions(-) rename modules/builder/{_builder_attribute_setter.py => _attribute_setter.py} (100%) create mode 100644 modules/builder/_validate_methods.py diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 13fef06ad..dcdce459d 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -2,15 +2,16 @@ from arrapi import ArrException from datetime import datetime from modules import plex, tmdb, util -from modules.util import Failed, FilterFailed, NonExisting, NotScheduled, NotScheduledRange, Deleted +from modules.util import Failed, FilterFailed, NotScheduled, Deleted from modules.overlay import Overlay from modules.poster import KometaImage from plexapi.audio import Artist, Album, Track from plexapi.exceptions import NotFound from plexapi.video import Movie, Show, Season, Episode from urllib.parse import quote -from ._builder_attribute_setter import BuilderAttributeSetter from ._config import * +from ._attribute_setter import BuilderAttributeSetter +from ._validate_methods import validateMethods logger = util.logger class CollectionBuilder: @@ -468,332 +469,8 @@ def apply_vars(input_str, var_set, var_key, var_limit): else: server_check = pl_library.PlexServer.machineIdentifier - self.ignore_blank_results = False - if "ignore_blank_results" in methods and not self.playlist: - logger.debug("") - logger.debug("Validating Method: ignore_blank_results") - logger.debug(f"Value: {data[methods['ignore_blank_results']]}") - self.ignore_blank_results = util.parse(self.Type, "ignore_blank_results", self.data, datatype="bool", methods=methods, default=False) - - self.smart_filter_details = "" - self.smart_label_url = None - self.smart_label = {"sort_by": "random", "all": {"label": [self.name]}} - self.smart_label_collection = False - if "smart_label" in methods and not self.playlist and not self.overlay and not self.library.is_music: - logger.debug("") - logger.debug("Validating Method: smart_label") - self.smart_label_collection = True - if not self.data[methods["smart_label"]]: - logger.warning(f"{self.Type} Error: smart_label attribute is blank defaulting to random") - else: - logger.debug(f"Value: {self.data[methods['smart_label']]}") - if isinstance(self.data[methods["smart_label"]], dict): - _data, replaced = util.replace_label(self.name, self.data[methods["smart_label"]]) - if not replaced: - raise Failed("Config Error: <> not found in the smart_label attribute data") - self.smart_label = _data - elif (self.library.is_movie and str(self.data[methods["smart_label"]]).lower() in plex.movie_sorts) \ - or (self.library.is_show and str(self.data[methods["smart_label"]]).lower() in plex.show_sorts): - self.smart_label["sort_by"] = str(self.data[methods["smart_label"]]).lower() - else: - logger.warning(f"{self.Type} Error: smart_label attribute: {self.data[methods['smart_label']]} is invalid defaulting to random") - if self.smart_label_collection and self.library.smart_label_check(self.name): - try: - _, self.smart_filter_details, self.smart_label_url = self.build_filter("smart_label", self.smart_label, default_sort="random") - except FilterFailed as e: - if self.ignore_blank_results: - raise - else: - raise Failed(str(e)) - - if "delete_not_scheduled" in methods and not self.overlay: - logger.debug("") - logger.debug("Validating Method: delete_not_scheduled") - logger.debug(f"Value: {data[methods['delete_not_scheduled']]}") - self.details["delete_not_scheduled"] = util.parse(self.Type, "delete_not_scheduled", self.data, datatype="bool", methods=methods, default=False) - - if "schedule" in methods and not self.config.requested_collections and not self.overlay: - logger.debug("") - logger.debug("Validating Method: schedule") - if not self.data[methods["schedule"]]: - raise Failed(f"{self.Type} Error: schedule attribute is blank") - else: - logger.debug(f"Value: {self.data[methods['schedule']]}") - err = None - try: - util.schedule_check("schedule", self.data[methods["schedule"]], self.current_time, self.config.run_hour) - except NonExisting as e: - self.non_existing = str(e) - except NotScheduledRange as e: - err = e - except NotScheduled as e: - if not self.config.ignore_schedules: - err = e - if err: - suffix = "" - if self.details["delete_not_scheduled"]: - try: - self.obj = self.library.get_playlist(self.name) if self.playlist else self.library.get_collection(self.name, force_search=True) - logger.info(self.delete()) - self.deleted = True - suffix = f" and was deleted" - except Failed: - suffix = f" and could not be found to delete" - raise NotScheduled(f"{err}\n\n{self.Type} {self.name} not scheduled to run{suffix}") - - if "delete_collections_named" in methods and not self.overlay and not self.playlist: - logger.debug("") - logger.debug("Validating Method: delete_collections_named") - logger.debug(f"Value: {data[methods['delete_collections_named']]}") - for del_col in util.parse(self.Type, "delete_collections_named", self.data, datatype="strlist", methods=methods): - try: - del_obj = self.library.get_collection(del_col, force_search=True) - self.library.delete(del_obj) - logger.info(f"Collection: {del_obj.title} deleted") - except Failed as e: - if str(e).startswith("Plex Error: Failed to delete"): - logger.error(e) + validateMethods(self, methods, data, logger) - self.collectionless = "plex_collectionless" in methods and not self.playlist and not self.overlay - - self.validate_builders = True - if "validate_builders" in methods and not self.overlay: - logger.debug("") - logger.debug("Validating Method: validate_builders") - logger.debug(f"Value: {data[methods['validate_builders']]}") - self.validate_builders = util.parse(self.Type, "validate_builders", self.data, datatype="bool", methods=methods, default=True) - - self.run_again = False - if "run_again" in methods and not self.overlay: - logger.debug("") - logger.debug("Validating Method: run_again") - logger.debug(f"Value: {data[methods['run_again']]}") - self.run_again = util.parse(self.Type, "run_again", self.data, datatype="bool", methods=methods, default=False) - - self.build_collection = False if self.overlay else True - if "build_collection" in methods and not self.playlist and not self.overlay: - logger.debug("") - logger.debug("Validating Method: build_collection") - logger.debug(f"Value: {data[methods['build_collection']]}") - self.build_collection = util.parse(self.Type, "build_collection", self.data, datatype="bool", methods=methods, default=True) - - self.blank_collection = False - if "blank_collection" in methods and not self.playlist and not self.overlay: - logger.debug("") - logger.debug("Validating Method: blank_collection") - logger.debug(f"Value: {data[methods['blank_collection']]}") - self.blank_collection = util.parse(self.Type, "blank_collection", self.data, datatype="bool", methods=methods, default=False) - - self.sync = self.library.sync_mode == "sync" and self.type != "overlay" - if "sync_mode" in methods and not self.overlay: - logger.debug("") - logger.debug("Validating Method: sync_mode") - if not self.data[methods["sync_mode"]]: - logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {self.library.sync_mode}") - else: - logger.debug(f"Value: {self.data[methods['sync_mode']]}") - if self.data[methods["sync_mode"]].lower() not in ["append", "sync"]: - logger.warning(f"Collection Warning: {self.data[methods['sync_mode']]} sync_mode invalid using general: {self.library.sync_mode}") - else: - self.sync = self.data[methods["sync_mode"]].lower() == "sync" - - self.tmdb_person_offset = 0 - if "tmdb_person_offset" in methods: - logger.debug("") - logger.debug("Validating Method: tmdb_person_offset") - logger.debug(f"Value: {data[methods['tmdb_person_offset']]}") - self.tmdb_person_offset = util.parse(self.Type, "tmdb_person_offset", self.data, datatype="int", methods=methods, default=0, minimum=0) - - self.tmdb_birthday = None - if "tmdb_birthday" in methods: - logger.debug("") - logger.debug("Validating Method: tmdb_birthday") - logger.debug(f"Value: {data[methods['tmdb_birthday']]}") - if not self.data[methods["tmdb_birthday"]]: - raise Failed(f"{self.Type} Error: tmdb_birthday attribute is blank") - parsed_birthday = util.parse(self.Type, "tmdb_birthday", self.data, datatype="dict", methods=methods) - parsed_methods = {m.lower(): m for m in parsed_birthday} - self.tmdb_birthday = { - "before": util.parse(self.Type, "before", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), - "after": util.parse(self.Type, "after", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), - "this_month": util.parse(self.Type, "this_month", parsed_birthday, datatype="bool", methods=parsed_methods, default=False) - } - - first_person = None - self.tmdb_person_birthday = None - if "tmdb_person" in methods: - logger.debug("") - logger.debug("Validating Method: tmdb_person") - if not self.data[methods["tmdb_person"]]: - raise Failed(f"{self.Type} Error: tmdb_person attribute is blank") - else: - logger.debug(f"Value: {self.data[methods['tmdb_person']]}") - valid_names = [] - for tmdb_person in util.get_list(self.data[methods["tmdb_person"]]): - try: - if not first_person: - first_person = tmdb_person - person = self.config.TMDb.get_person(util.regex_first_int(tmdb_person, "TMDb Person ID")) - valid_names.append(person.name) - if person.biography: - self.summaries["tmdb_person"] = person.biography - if person.profile_url: - self.posters["tmdb_person"] = person.profile_url - if person.birthday and not self.tmdb_person_birthday: - self.tmdb_person_birthday = person.birthday - except Failed as e: - if str(e).startswith("TMDb Error"): - logger.error(e) - else: - try: - results = self.config.TMDb.search_people(tmdb_person) - if results: - result_index = len(results) - 1 if self.tmdb_person_offset >= len(results) else self.tmdb_person_offset - valid_names.append(tmdb_person) - if results[result_index].biography: - self.summaries["tmdb_person"] = results[result_index].biography - if results[result_index].profile_url: - self.posters["tmdb_person"] = results[result_index].profile_url - if results[result_index].birthday and not self.tmdb_person_birthday: - self.tmdb_person_birthday = results[result_index].birthday - except Failed as ee: - logger.error(ee) - if len(valid_names) > 0: - self.details["tmdb_person"] = valid_names - else: - raise Failed(f"{self.Type} Error: No valid TMDb Person IDs in {self.data[methods['tmdb_person']]}") - - if self.tmdb_birthday: - if "tmdb_person" not in methods: - raise NotScheduled("Skipped because tmdb_person is required when using tmdb_birthday") - if not self.tmdb_person_birthday: - raise NotScheduled(f"Skipped because No Birthday was found for {first_person}") - now = datetime(self.current_time.year, self.current_time.month, self.current_time.day) - - try: - delta = datetime(now.year, self.tmdb_person_birthday.month, self.tmdb_person_birthday.day) - except ValueError: - delta = datetime(now.year, self.tmdb_person_birthday.month, 28) - - before_delta = delta - after_delta = delta - if delta < now: - try: - before_delta = datetime(now.year + 1, self.tmdb_person_birthday.month, self.tmdb_person_birthday.day) - except ValueError: - before_delta = datetime(now.year + 1, self.tmdb_person_birthday.month, 28) - elif delta > now: - try: - after_delta = datetime(now.year - 1, self.tmdb_person_birthday.month, self.tmdb_person_birthday.day) - except ValueError: - after_delta = datetime(now.year - 1, self.tmdb_person_birthday.month, 28) - days_after = (now - after_delta).days - days_before = (before_delta - now).days - msg = "" - if self.tmdb_birthday["this_month"]: - if now.month != self.tmdb_person_birthday.month: - msg = f"Skipped because Birthday Month: {self.tmdb_person_birthday.month} is not {now.month}" - elif days_before > self.tmdb_birthday["before"] and days_after > self.tmdb_birthday["after"]: - msg = f"Skipped because days until {self.tmdb_person_birthday.month}/{self.tmdb_person_birthday.day}: {days_before} > {self.tmdb_birthday['before']} and days after {self.tmdb_person_birthday.month}/{self.tmdb_person_birthday.day}: {days_after} > {self.tmdb_birthday['after']}" - if msg: - suffix = "" - if self.details["delete_not_scheduled"]: - try: - self.obj = self.library.get_playlist(self.name) if self.playlist else self.library.get_collection(self.name, force_search=True) - logger.info(self.delete()) - self.deleted = True - suffix = f" and was deleted" - except Failed: - suffix = f" and could not be found to delete" - raise NotScheduled(f"{msg}{suffix}") - - self.smart_url = None - self.smart_type_key = None - if "smart_url" in methods and not self.playlist and not self.overlay: - logger.debug("") - logger.debug("Validating Method: smart_url") - if not self.data[methods["smart_url"]]: - raise Failed(f"{self.Type} Error: smart_url attribute is blank") - else: - logger.debug(f"Value: {self.data[methods['smart_url']]}") - try: - self.smart_url, self.smart_type_key = self.library.get_smart_filter_from_uri(self.data[methods["smart_url"]]) - except ValueError: - raise Failed(f"{self.Type} Error: smart_url is incorrectly formatted") - - if "smart_filter" in methods and not self.playlist and not self.overlay: - try: - self.smart_type_key, self.smart_filter_details, self.smart_url = self.build_filter("smart_filter", self.data[methods["smart_filter"]], display=True, default_sort="random") - except FilterFailed as e: - if self.ignore_blank_results: - raise - else: - raise Failed(str(e)) - - if self.collectionless: - for x in ["smart_label", "smart_filter", "smart_url"]: - if x in methods: - self.collectionless = False - logger.info("") - logger.warning(f"{self.Type} Error: {x} is not compatible with plex_collectionless removing plex_collectionless") - - if self.run_again and self.smart_url: - self.run_again = False - logger.info("") - logger.warning(f"{self.Type} Error: smart_filter is not compatible with run_again removing run_again") - - if self.smart_url and self.smart_label_collection: - raise Failed(f"{self.Type} Error: smart_filter is not compatible with smart_label") - - if self.parts_collection and "smart_url" in methods: - raise Failed(f"{self.Type} Error: smart_url is not compatible with builder_level: {self.builder_level}") - - self.smart = self.smart_url or self.smart_label_collection - - test_sort = None - if "collection_order" in methods and not self.playlist and self.build_collection: - if self.data[methods["collection_order"]] is None: - raise Failed(f"{self.Type} Warning: collection_order attribute is blank") - else: - test_sort = self.data[methods["collection_order"]] - elif "collection_order" not in methods and not self.playlist and not self.blank_collection and self.build_collection and self.library.default_collection_order and not self.smart: - test_sort = self.library.default_collection_order - logger.info("") - logger.warning(f"{self.Type} Warning: collection_order not found using library default_collection_order: {test_sort}") - self.custom_sort = "custom" if self.playlist else None - if test_sort: - if self.smart: - raise Failed(f"{self.Type} Error: collection_order does not work with Smart Collections") - logger.debug("") - logger.debug("Validating Method: collection_order") - logger.debug(f"Value: {test_sort}") - if test_sort in plex.collection_order_options + ["custom.asc", "custom.desc"]: - self.details["collection_order"] = test_sort.split(".")[0] - if test_sort.startswith("custom") and self.build_collection: - self.custom_sort = test_sort - else: - sort_type = self.builder_level - if sort_type == "item": - if self.library.is_show: - sort_type = "show" - elif self.library.is_music: - sort_type = "artist" - else: - sort_type = "movie" - _, _, sorts = plex.sort_types[sort_type] - if not isinstance(test_sort, list): - test_sort = [test_sort] - self.custom_sort = [] - for ts in test_sort: - if ts not in sorts: - raise Failed(f"{self.Type} Error: collection_order: {ts} is invalid. Options: {', '.join(sorts)}") - self.custom_sort.append(ts) - if test_sort not in plex.collection_order_options + ["custom.asc", "custom.desc"] and not self.custom_sort: - raise Failed(f"{self.Type} Error: {test_sort} collection_order invalid\n\trelease (Order Collection by release dates)\n\talpha (Order Collection Alphabetically)\n\tcustom.asc/custom.desc (Custom Order Collection)\n\tOther sorting options can be found at https://github.com/Kometa-Team/Kometa/wiki/Smart-Builders#sort-options") - - if self.smart: - self.custom_sort = None - attributeSetter = BuilderAttributeSetter() attributeSetter.setAttributes(self, methods, logger) diff --git a/modules/builder/_builder_attribute_setter.py b/modules/builder/_attribute_setter.py similarity index 100% rename from modules/builder/_builder_attribute_setter.py rename to modules/builder/_attribute_setter.py diff --git a/modules/builder/_validate_methods.py b/modules/builder/_validate_methods.py new file mode 100644 index 000000000..03d9a4e5d --- /dev/null +++ b/modules/builder/_validate_methods.py @@ -0,0 +1,333 @@ +from datetime import datetime +from modules import plex, util +from modules.util import Failed, FilterFailed, NotScheduled +from modules.builder._config import * +from modules.util import Failed, FilterFailed, NonExisting, NotScheduled, NotScheduledRange + +def validateMethods(collectionBuilder, methods, data, logger): + collectionBuilder.ignore_blank_results = False + if "ignore_blank_results" in methods and not collectionBuilder.playlist: + logger.debug("") + logger.debug("Validating Method: ignore_blank_results") + logger.debug(f"Value: {data[methods['ignore_blank_results']]}") + collectionBuilder.ignore_blank_results = util.parse(collectionBuilder.Type, "ignore_blank_results", collectionBuilder.data, datatype="bool", methods=methods, default=False) + + collectionBuilder.smart_filter_details = "" + collectionBuilder.smart_label_url = None + collectionBuilder.smart_label = {"sort_by": "random", "all": {"label": [collectionBuilder.name]}} + collectionBuilder.smart_label_collection = False + if "smart_label" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay and not collectionBuilder.library.is_music: + logger.debug("") + logger.debug("Validating Method: smart_label") + collectionBuilder.smart_label_collection = True + if not collectionBuilder.data[methods["smart_label"]]: + logger.warning(f"{collectionBuilder.Type} Error: smart_label attribute is blank defaulting to random") + else: + logger.debug(f"Value: {collectionBuilder.data[methods['smart_label']]}") + if isinstance(collectionBuilder.data[methods["smart_label"]], dict): + _data, replaced = util.replace_label(collectionBuilder.name, collectionBuilder.data[methods["smart_label"]]) + if not replaced: + raise Failed("Config Error: <> not found in the smart_label attribute data") + collectionBuilder.smart_label = _data + elif (collectionBuilder.library.is_movie and str(collectionBuilder.data[methods["smart_label"]]).lower() in plex.movie_sorts) \ + or (collectionBuilder.library.is_show and str(collectionBuilder.data[methods["smart_label"]]).lower() in plex.show_sorts): + collectionBuilder.smart_label["sort_by"] = str(collectionBuilder.data[methods["smart_label"]]).lower() + else: + logger.warning(f"{collectionBuilder.Type} Error: smart_label attribute: {collectionBuilder.data[methods['smart_label']]} is invalid defaulting to random") + if collectionBuilder.smart_label_collection and collectionBuilder.library.smart_label_check(collectionBuilder.name): + try: + _, collectionBuilder.smart_filter_details, collectionBuilder.smart_label_url = collectionBuilder.build_filter("smart_label", collectionBuilder.smart_label, default_sort="random") + except FilterFailed as e: + if collectionBuilder.ignore_blank_results: + raise + else: + raise Failed(str(e)) + + if "delete_not_scheduled" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: delete_not_scheduled") + logger.debug(f"Value: {data[methods['delete_not_scheduled']]}") + collectionBuilder.details["delete_not_scheduled"] = util.parse(collectionBuilder.Type, "delete_not_scheduled", collectionBuilder.data, datatype="bool", methods=methods, default=False) + + if "schedule" in methods and not collectionBuilder.config.requested_collections and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: schedule") + if not collectionBuilder.data[methods["schedule"]]: + raise Failed(f"{collectionBuilder.Type} Error: schedule attribute is blank") + else: + logger.debug(f"Value: {collectionBuilder.data[methods['schedule']]}") + err = None + try: + util.schedule_check("schedule", collectionBuilder.data[methods["schedule"]], collectionBuilder.current_time, collectionBuilder.config.run_hour) + except NonExisting as e: + collectionBuilder.non_existing = str(e) + except NotScheduledRange as e: + err = e + except NotScheduled as e: + if not collectionBuilder.config.ignore_schedules: + err = e + if err: + suffix = "" + if collectionBuilder.details["delete_not_scheduled"]: + try: + collectionBuilder.obj = collectionBuilder.library.get_playlist(collectionBuilder.name) if collectionBuilder.playlist else collectionBuilder.library.get_collection(collectionBuilder.name, force_search=True) + logger.info(collectionBuilder.delete()) + collectionBuilder.deleted = True + suffix = f" and was deleted" + except Failed: + suffix = f" and could not be found to delete" + raise NotScheduled(f"{err}\n\n{collectionBuilder.Type} {collectionBuilder.name} not scheduled to run{suffix}") + + if "delete_collections_named" in methods and not collectionBuilder.overlay and not collectionBuilder.playlist: + logger.debug("") + logger.debug("Validating Method: delete_collections_named") + logger.debug(f"Value: {data[methods['delete_collections_named']]}") + for del_col in util.parse(collectionBuilder.Type, "delete_collections_named", collectionBuilder.data, datatype="strlist", methods=methods): + try: + del_obj = collectionBuilder.library.get_collection(del_col, force_search=True) + collectionBuilder.library.delete(del_obj) + logger.info(f"Collection: {del_obj.title} deleted") + except Failed as e: + if str(e).startswith("Plex Error: Failed to delete"): + logger.error(e) + + collectionBuilder.collectionless = "plex_collectionless" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay + + collectionBuilder.validate_builders = True + if "validate_builders" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: validate_builders") + logger.debug(f"Value: {data[methods['validate_builders']]}") + collectionBuilder.validate_builders = util.parse(collectionBuilder.Type, "validate_builders", collectionBuilder.data, datatype="bool", methods=methods, default=True) + + collectionBuilder.run_again = False + if "run_again" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: run_again") + logger.debug(f"Value: {data[methods['run_again']]}") + collectionBuilder.run_again = util.parse(collectionBuilder.Type, "run_again", collectionBuilder.data, datatype="bool", methods=methods, default=False) + + collectionBuilder.build_collection = False if collectionBuilder.overlay else True + if "build_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: build_collection") + logger.debug(f"Value: {data[methods['build_collection']]}") + collectionBuilder.build_collection = util.parse(collectionBuilder.Type, "build_collection", collectionBuilder.data, datatype="bool", methods=methods, default=True) + + collectionBuilder.blank_collection = False + if "blank_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: blank_collection") + logger.debug(f"Value: {data[methods['blank_collection']]}") + collectionBuilder.blank_collection = util.parse(collectionBuilder.Type, "blank_collection", collectionBuilder.data, datatype="bool", methods=methods, default=False) + + collectionBuilder.sync = collectionBuilder.library.sync_mode == "sync" and collectionBuilder.type != "overlay" + if "sync_mode" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: sync_mode") + if not collectionBuilder.data[methods["sync_mode"]]: + logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {collectionBuilder.library.sync_mode}") + else: + logger.debug(f"Value: {collectionBuilder.data[methods['sync_mode']]}") + if collectionBuilder.data[methods["sync_mode"]].lower() not in ["append", "sync"]: + logger.warning(f"Collection Warning: {collectionBuilder.data[methods['sync_mode']]} sync_mode invalid using general: {collectionBuilder.library.sync_mode}") + else: + collectionBuilder.sync = collectionBuilder.data[methods["sync_mode"]].lower() == "sync" + + collectionBuilder.tmdb_person_offset = 0 + if "tmdb_person_offset" in methods: + logger.debug("") + logger.debug("Validating Method: tmdb_person_offset") + logger.debug(f"Value: {data[methods['tmdb_person_offset']]}") + collectionBuilder.tmdb_person_offset = util.parse(collectionBuilder.Type, "tmdb_person_offset", collectionBuilder.data, datatype="int", methods=methods, default=0, minimum=0) + + collectionBuilder.tmdb_birthday = None + if "tmdb_birthday" in methods: + logger.debug("") + logger.debug("Validating Method: tmdb_birthday") + logger.debug(f"Value: {data[methods['tmdb_birthday']]}") + if not collectionBuilder.data[methods["tmdb_birthday"]]: + raise Failed(f"{collectionBuilder.Type} Error: tmdb_birthday attribute is blank") + parsed_birthday = util.parse(collectionBuilder.Type, "tmdb_birthday", collectionBuilder.data, datatype="dict", methods=methods) + parsed_methods = {m.lower(): m for m in parsed_birthday} + collectionBuilder.tmdb_birthday = { + "before": util.parse(collectionBuilder.Type, "before", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), + "after": util.parse(collectionBuilder.Type, "after", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), + "this_month": util.parse(collectionBuilder.Type, "this_month", parsed_birthday, datatype="bool", methods=parsed_methods, default=False) + } + + first_person = None + collectionBuilder.tmdb_person_birthday = None + if "tmdb_person" in methods: + logger.debug("") + logger.debug("Validating Method: tmdb_person") + if not collectionBuilder.data[methods["tmdb_person"]]: + raise Failed(f"{collectionBuilder.Type} Error: tmdb_person attribute is blank") + else: + logger.debug(f"Value: {collectionBuilder.data[methods['tmdb_person']]}") + valid_names = [] + for tmdb_person in util.get_list(collectionBuilder.data[methods["tmdb_person"]]): + try: + if not first_person: + first_person = tmdb_person + person = collectionBuilder.config.TMDb.get_person(util.regex_first_int(tmdb_person, "TMDb Person ID")) + valid_names.append(person.name) + if person.biography: + collectionBuilder.summaries["tmdb_person"] = person.biography + if person.profile_url: + collectionBuilder.posters["tmdb_person"] = person.profile_url + if person.birthday and not collectionBuilder.tmdb_person_birthday: + collectionBuilder.tmdb_person_birthday = person.birthday + except Failed as e: + if str(e).startswith("TMDb Error"): + logger.error(e) + else: + try: + results = collectionBuilder.config.TMDb.search_people(tmdb_person) + if results: + result_index = len(results) - 1 if collectionBuilder.tmdb_person_offset >= len(results) else collectionBuilder.tmdb_person_offset + valid_names.append(tmdb_person) + if results[result_index].biography: + collectionBuilder.summaries["tmdb_person"] = results[result_index].biography + if results[result_index].profile_url: + collectionBuilder.posters["tmdb_person"] = results[result_index].profile_url + if results[result_index].birthday and not collectionBuilder.tmdb_person_birthday: + collectionBuilder.tmdb_person_birthday = results[result_index].birthday + except Failed as ee: + logger.error(ee) + if len(valid_names) > 0: + collectionBuilder.details["tmdb_person"] = valid_names + else: + raise Failed(f"{collectionBuilder.Type} Error: No valid TMDb Person IDs in {collectionBuilder.data[methods['tmdb_person']]}") + + if collectionBuilder.tmdb_birthday: + if "tmdb_person" not in methods: + raise NotScheduled("Skipped because tmdb_person is required when using tmdb_birthday") + if not collectionBuilder.tmdb_person_birthday: + raise NotScheduled(f"Skipped because No Birthday was found for {first_person}") + now = datetime(collectionBuilder.current_time.year, collectionBuilder.current_time.month, collectionBuilder.current_time.day) + + try: + delta = datetime(now.year, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) + except ValueError: + delta = datetime(now.year, collectionBuilder.tmdb_person_birthday.month, 28) + + before_delta = delta + after_delta = delta + if delta < now: + try: + before_delta = datetime(now.year + 1, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) + except ValueError: + before_delta = datetime(now.year + 1, collectionBuilder.tmdb_person_birthday.month, 28) + elif delta > now: + try: + after_delta = datetime(now.year - 1, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) + except ValueError: + after_delta = datetime(now.year - 1, collectionBuilder.tmdb_person_birthday.month, 28) + days_after = (now - after_delta).days + days_before = (before_delta - now).days + msg = "" + if collectionBuilder.tmdb_birthday["this_month"]: + if now.month != collectionBuilder.tmdb_person_birthday.month: + msg = f"Skipped because Birthday Month: {collectionBuilder.tmdb_person_birthday.month} is not {now.month}" + elif days_before > collectionBuilder.tmdb_birthday["before"] and days_after > collectionBuilder.tmdb_birthday["after"]: + msg = f"Skipped because days until {collectionBuilder.tmdb_person_birthday.month}/{collectionBuilder.tmdb_person_birthday.day}: {days_before} > {collectionBuilder.tmdb_birthday['before']} and days after {collectionBuilder.tmdb_person_birthday.month}/{collectionBuilder.tmdb_person_birthday.day}: {days_after} > {collectionBuilder.tmdb_birthday['after']}" + if msg: + suffix = "" + if collectionBuilder.details["delete_not_scheduled"]: + try: + collectionBuilder.obj = collectionBuilder.library.get_playlist(collectionBuilder.name) if collectionBuilder.playlist else collectionBuilder.library.get_collection(collectionBuilder.name, force_search=True) + logger.info(collectionBuilder.delete()) + collectionBuilder.deleted = True + suffix = f" and was deleted" + except Failed: + suffix = f" and could not be found to delete" + raise NotScheduled(f"{msg}{suffix}") + + collectionBuilder.smart_url = None + collectionBuilder.smart_type_key = None + if "smart_url" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: smart_url") + if not collectionBuilder.data[methods["smart_url"]]: + raise Failed(f"{collectionBuilder.Type} Error: smart_url attribute is blank") + else: + logger.debug(f"Value: {collectionBuilder.data[methods['smart_url']]}") + try: + collectionBuilder.smart_url, collectionBuilder.smart_type_key = collectionBuilder.library.get_smart_filter_from_uri(collectionBuilder.data[methods["smart_url"]]) + except ValueError: + raise Failed(f"{collectionBuilder.Type} Error: smart_url is incorrectly formatted") + + if "smart_filter" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + try: + collectionBuilder.smart_type_key, collectionBuilder.smart_filter_details, collectionBuilder.smart_url = collectionBuilder.build_filter("smart_filter", collectionBuilder.data[methods["smart_filter"]], display=True, default_sort="random") + except FilterFailed as e: + if collectionBuilder.ignore_blank_results: + raise + else: + raise Failed(str(e)) + + if collectionBuilder.collectionless: + for x in ["smart_label", "smart_filter", "smart_url"]: + if x in methods: + collectionBuilder.collectionless = False + logger.info("") + logger.warning(f"{collectionBuilder.Type} Error: {x} is not compatible with plex_collectionless removing plex_collectionless") + + if collectionBuilder.run_again and collectionBuilder.smart_url: + collectionBuilder.run_again = False + logger.info("") + logger.warning(f"{collectionBuilder.Type} Error: smart_filter is not compatible with run_again removing run_again") + + if collectionBuilder.smart_url and collectionBuilder.smart_label_collection: + raise Failed(f"{collectionBuilder.Type} Error: smart_filter is not compatible with smart_label") + + if collectionBuilder.parts_collection and "smart_url" in methods: + raise Failed(f"{collectionBuilder.Type} Error: smart_url is not compatible with builder_level: {collectionBuilder.builder_level}") + + collectionBuilder.smart = collectionBuilder.smart_url or collectionBuilder.smart_label_collection + + test_sort = None + if "collection_order" in methods and not collectionBuilder.playlist and collectionBuilder.build_collection: + if collectionBuilder.data[methods["collection_order"]] is None: + raise Failed(f"{collectionBuilder.Type} Warning: collection_order attribute is blank") + else: + test_sort = collectionBuilder.data[methods["collection_order"]] + elif "collection_order" not in methods and not collectionBuilder.playlist and not collectionBuilder.blank_collection and collectionBuilder.build_collection and collectionBuilder.library.default_collection_order and not collectionBuilder.smart: + test_sort = collectionBuilder.library.default_collection_order + logger.info("") + logger.warning(f"{collectionBuilder.Type} Warning: collection_order not found using library default_collection_order: {test_sort}") + collectionBuilder.custom_sort = "custom" if collectionBuilder.playlist else None + if test_sort: + if collectionBuilder.smart: + raise Failed(f"{collectionBuilder.Type} Error: collection_order does not work with Smart Collections") + logger.debug("") + logger.debug("Validating Method: collection_order") + logger.debug(f"Value: {test_sort}") + if test_sort in plex.collection_order_options + ["custom.asc", "custom.desc"]: + collectionBuilder.details["collection_order"] = test_sort.split(".")[0] + if test_sort.startswith("custom") and collectionBuilder.build_collection: + collectionBuilder.custom_sort = test_sort + else: + sort_type = collectionBuilder.builder_level + if sort_type == "item": + if collectionBuilder.library.is_show: + sort_type = "show" + elif collectionBuilder.library.is_music: + sort_type = "artist" + else: + sort_type = "movie" + _, _, sorts = plex.sort_types[sort_type] + if not isinstance(test_sort, list): + test_sort = [test_sort] + collectionBuilder.custom_sort = [] + for ts in test_sort: + if ts not in sorts: + raise Failed(f"{collectionBuilder.Type} Error: collection_order: {ts} is invalid. Options: {', '.join(sorts)}") + collectionBuilder.custom_sort.append(ts) + if test_sort not in plex.collection_order_options + ["custom.asc", "custom.desc"] and not collectionBuilder.custom_sort: + raise Failed(f"{collectionBuilder.Type} Error: {test_sort} collection_order invalid\n\trelease (Order Collection by release dates)\n\talpha (Order Collection Alphabetically)\n\tcustom.asc/custom.desc (Custom Order Collection)\n\tOther sorting options can be found at https://github.com/Kometa-Team/Kometa/wiki/Smart-Builders#sort-options") + + if collectionBuilder.smart: + collectionBuilder.custom_sort = None + \ No newline at end of file From c0e4bbe7675ee0935d1f48b5fb17755136f62ff4 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 09:17:02 -0400 Subject: [PATCH 11/24] refactor: Add __all__ to limit exposures --- modules/builder/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index dcdce459d..89985305b 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -14,6 +14,9 @@ from ._validate_methods import validateMethods logger = util.logger + +__all__ = ["CollectionBuilder"] + class CollectionBuilder: def __init__(self, config, metadata, name, data, library=None, overlay=None, extra=None): self.config = config From 86f1c886933d7772a80d5410e9ff93594b3776d7 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 09:40:22 -0400 Subject: [PATCH 12/24] refactor: separate out functions in validate_methods --- modules/builder/__init__.py | 4 +- modules/builder/_validate_methods.py | 611 ++++++++++++++------------- 2 files changed, 316 insertions(+), 299 deletions(-) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 89985305b..0643cd02c 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -11,7 +11,7 @@ from urllib.parse import quote from ._config import * from ._attribute_setter import BuilderAttributeSetter -from ._validate_methods import validateMethods +from ._validate_methods import BuilderMethodValidator logger = util.logger @@ -472,7 +472,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): else: server_check = pl_library.PlexServer.machineIdentifier - validateMethods(self, methods, data, logger) + BuilderMethodValidator().validate_methods(self, methods, logger) attributeSetter = BuilderAttributeSetter() attributeSetter.setAttributes(self, methods, logger) diff --git a/modules/builder/_validate_methods.py b/modules/builder/_validate_methods.py index 03d9a4e5d..b73b4d4d5 100644 --- a/modules/builder/_validate_methods.py +++ b/modules/builder/_validate_methods.py @@ -4,330 +4,347 @@ from modules.builder._config import * from modules.util import Failed, FilterFailed, NonExisting, NotScheduled, NotScheduledRange -def validateMethods(collectionBuilder, methods, data, logger): - collectionBuilder.ignore_blank_results = False - if "ignore_blank_results" in methods and not collectionBuilder.playlist: - logger.debug("") - logger.debug("Validating Method: ignore_blank_results") - logger.debug(f"Value: {data[methods['ignore_blank_results']]}") - collectionBuilder.ignore_blank_results = util.parse(collectionBuilder.Type, "ignore_blank_results", collectionBuilder.data, datatype="bool", methods=methods, default=False) +class BuilderMethodValidator: + def validate_methods(self, collectionBuilder, methods, logger): + data = collectionBuilder.data - collectionBuilder.smart_filter_details = "" - collectionBuilder.smart_label_url = None - collectionBuilder.smart_label = {"sort_by": "random", "all": {"label": [collectionBuilder.name]}} - collectionBuilder.smart_label_collection = False - if "smart_label" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay and not collectionBuilder.library.is_music: - logger.debug("") - logger.debug("Validating Method: smart_label") - collectionBuilder.smart_label_collection = True - if not collectionBuilder.data[methods["smart_label"]]: - logger.warning(f"{collectionBuilder.Type} Error: smart_label attribute is blank defaulting to random") - else: - logger.debug(f"Value: {collectionBuilder.data[methods['smart_label']]}") - if isinstance(collectionBuilder.data[methods["smart_label"]], dict): - _data, replaced = util.replace_label(collectionBuilder.name, collectionBuilder.data[methods["smart_label"]]) - if not replaced: - raise Failed("Config Error: <> not found in the smart_label attribute data") - collectionBuilder.smart_label = _data - elif (collectionBuilder.library.is_movie and str(collectionBuilder.data[methods["smart_label"]]).lower() in plex.movie_sorts) \ - or (collectionBuilder.library.is_show and str(collectionBuilder.data[methods["smart_label"]]).lower() in plex.show_sorts): - collectionBuilder.smart_label["sort_by"] = str(collectionBuilder.data[methods["smart_label"]]).lower() - else: - logger.warning(f"{collectionBuilder.Type} Error: smart_label attribute: {collectionBuilder.data[methods['smart_label']]} is invalid defaulting to random") - if collectionBuilder.smart_label_collection and collectionBuilder.library.smart_label_check(collectionBuilder.name): - try: - _, collectionBuilder.smart_filter_details, collectionBuilder.smart_label_url = collectionBuilder.build_filter("smart_label", collectionBuilder.smart_label, default_sort="random") - except FilterFailed as e: - if collectionBuilder.ignore_blank_results: - raise - else: - raise Failed(str(e)) - - if "delete_not_scheduled" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: delete_not_scheduled") - logger.debug(f"Value: {data[methods['delete_not_scheduled']]}") - collectionBuilder.details["delete_not_scheduled"] = util.parse(collectionBuilder.Type, "delete_not_scheduled", collectionBuilder.data, datatype="bool", methods=methods, default=False) - - if "schedule" in methods and not collectionBuilder.config.requested_collections and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: schedule") - if not collectionBuilder.data[methods["schedule"]]: - raise Failed(f"{collectionBuilder.Type} Error: schedule attribute is blank") - else: - logger.debug(f"Value: {collectionBuilder.data[methods['schedule']]}") - err = None - try: - util.schedule_check("schedule", collectionBuilder.data[methods["schedule"]], collectionBuilder.current_time, collectionBuilder.config.run_hour) - except NonExisting as e: - collectionBuilder.non_existing = str(e) - except NotScheduledRange as e: - err = e - except NotScheduled as e: - if not collectionBuilder.config.ignore_schedules: - err = e - if err: - suffix = "" - if collectionBuilder.details["delete_not_scheduled"]: - try: - collectionBuilder.obj = collectionBuilder.library.get_playlist(collectionBuilder.name) if collectionBuilder.playlist else collectionBuilder.library.get_collection(collectionBuilder.name, force_search=True) - logger.info(collectionBuilder.delete()) - collectionBuilder.deleted = True - suffix = f" and was deleted" - except Failed: - suffix = f" and could not be found to delete" - raise NotScheduled(f"{err}\n\n{collectionBuilder.Type} {collectionBuilder.name} not scheduled to run{suffix}") + collectionBuilder.ignore_blank_results = False + if "ignore_blank_results" in methods and not collectionBuilder.playlist: + logger.debug("") + logger.debug("Validating Method: ignore_blank_results") + logger.debug(f"Value: {data[methods['ignore_blank_results']]}") + collectionBuilder.ignore_blank_results = util.parse(collectionBuilder.Type, "ignore_blank_results", data, datatype="bool", methods=methods, default=False) - if "delete_collections_named" in methods and not collectionBuilder.overlay and not collectionBuilder.playlist: - logger.debug("") - logger.debug("Validating Method: delete_collections_named") - logger.debug(f"Value: {data[methods['delete_collections_named']]}") - for del_col in util.parse(collectionBuilder.Type, "delete_collections_named", collectionBuilder.data, datatype="strlist", methods=methods): - try: - del_obj = collectionBuilder.library.get_collection(del_col, force_search=True) - collectionBuilder.library.delete(del_obj) - logger.info(f"Collection: {del_obj.title} deleted") - except Failed as e: - if str(e).startswith("Plex Error: Failed to delete"): - logger.error(e) + self._validate_smart_label(collectionBuilder, methods, data, logger) - collectionBuilder.collectionless = "plex_collectionless" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay + if "delete_not_scheduled" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: delete_not_scheduled") + logger.debug(f"Value: {data[methods['delete_not_scheduled']]}") + collectionBuilder.details["delete_not_scheduled"] = util.parse(collectionBuilder.Type, "delete_not_scheduled", data, datatype="bool", methods=methods, default=False) - collectionBuilder.validate_builders = True - if "validate_builders" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: validate_builders") - logger.debug(f"Value: {data[methods['validate_builders']]}") - collectionBuilder.validate_builders = util.parse(collectionBuilder.Type, "validate_builders", collectionBuilder.data, datatype="bool", methods=methods, default=True) + self._validate_schedule(collectionBuilder, methods, data, logger) - collectionBuilder.run_again = False - if "run_again" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: run_again") - logger.debug(f"Value: {data[methods['run_again']]}") - collectionBuilder.run_again = util.parse(collectionBuilder.Type, "run_again", collectionBuilder.data, datatype="bool", methods=methods, default=False) + if "delete_collections_named" in methods and not collectionBuilder.overlay and not collectionBuilder.playlist: + logger.debug("") + logger.debug("Validating Method: delete_collections_named") + logger.debug(f"Value: {data[methods['delete_collections_named']]}") + for del_col in util.parse(collectionBuilder.Type, "delete_collections_named", data, datatype="strlist", methods=methods): + try: + del_obj = collectionBuilder.library.get_collection(del_col, force_search=True) + collectionBuilder.library.delete(del_obj) + logger.info(f"Collection: {del_obj.title} deleted") + except Failed as e: + if str(e).startswith("Plex Error: Failed to delete"): + logger.error(e) - collectionBuilder.build_collection = False if collectionBuilder.overlay else True - if "build_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: build_collection") - logger.debug(f"Value: {data[methods['build_collection']]}") - collectionBuilder.build_collection = util.parse(collectionBuilder.Type, "build_collection", collectionBuilder.data, datatype="bool", methods=methods, default=True) + collectionBuilder.collectionless = "plex_collectionless" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay - collectionBuilder.blank_collection = False - if "blank_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: blank_collection") - logger.debug(f"Value: {data[methods['blank_collection']]}") - collectionBuilder.blank_collection = util.parse(collectionBuilder.Type, "blank_collection", collectionBuilder.data, datatype="bool", methods=methods, default=False) + collectionBuilder.validate_builders = True + if "validate_builders" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: validate_builders") + logger.debug(f"Value: {data[methods['validate_builders']]}") + collectionBuilder.validate_builders = util.parse(collectionBuilder.Type, "validate_builders", data, datatype="bool", methods=methods, default=True) - collectionBuilder.sync = collectionBuilder.library.sync_mode == "sync" and collectionBuilder.type != "overlay" - if "sync_mode" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: sync_mode") - if not collectionBuilder.data[methods["sync_mode"]]: - logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {collectionBuilder.library.sync_mode}") - else: - logger.debug(f"Value: {collectionBuilder.data[methods['sync_mode']]}") - if collectionBuilder.data[methods["sync_mode"]].lower() not in ["append", "sync"]: - logger.warning(f"Collection Warning: {collectionBuilder.data[methods['sync_mode']]} sync_mode invalid using general: {collectionBuilder.library.sync_mode}") - else: - collectionBuilder.sync = collectionBuilder.data[methods["sync_mode"]].lower() == "sync" + collectionBuilder.run_again = False + if "run_again" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: run_again") + logger.debug(f"Value: {data[methods['run_again']]}") + collectionBuilder.run_again = util.parse(collectionBuilder.Type, "run_again", data, datatype="bool", methods=methods, default=False) - collectionBuilder.tmdb_person_offset = 0 - if "tmdb_person_offset" in methods: - logger.debug("") - logger.debug("Validating Method: tmdb_person_offset") - logger.debug(f"Value: {data[methods['tmdb_person_offset']]}") - collectionBuilder.tmdb_person_offset = util.parse(collectionBuilder.Type, "tmdb_person_offset", collectionBuilder.data, datatype="int", methods=methods, default=0, minimum=0) + collectionBuilder.build_collection = False if collectionBuilder.overlay else True + if "build_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: build_collection") + logger.debug(f"Value: {data[methods['build_collection']]}") + collectionBuilder.build_collection = util.parse(collectionBuilder.Type, "build_collection", data, datatype="bool", methods=methods, default=True) - collectionBuilder.tmdb_birthday = None - if "tmdb_birthday" in methods: - logger.debug("") - logger.debug("Validating Method: tmdb_birthday") - logger.debug(f"Value: {data[methods['tmdb_birthday']]}") - if not collectionBuilder.data[methods["tmdb_birthday"]]: - raise Failed(f"{collectionBuilder.Type} Error: tmdb_birthday attribute is blank") - parsed_birthday = util.parse(collectionBuilder.Type, "tmdb_birthday", collectionBuilder.data, datatype="dict", methods=methods) - parsed_methods = {m.lower(): m for m in parsed_birthday} - collectionBuilder.tmdb_birthday = { - "before": util.parse(collectionBuilder.Type, "before", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), - "after": util.parse(collectionBuilder.Type, "after", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), - "this_month": util.parse(collectionBuilder.Type, "this_month", parsed_birthday, datatype="bool", methods=parsed_methods, default=False) - } + collectionBuilder.blank_collection = False + if "blank_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: blank_collection") + logger.debug(f"Value: {data[methods['blank_collection']]}") + collectionBuilder.blank_collection = util.parse(collectionBuilder.Type, "blank_collection", data, datatype="bool", methods=methods, default=False) - first_person = None - collectionBuilder.tmdb_person_birthday = None - if "tmdb_person" in methods: - logger.debug("") - logger.debug("Validating Method: tmdb_person") - if not collectionBuilder.data[methods["tmdb_person"]]: - raise Failed(f"{collectionBuilder.Type} Error: tmdb_person attribute is blank") - else: - logger.debug(f"Value: {collectionBuilder.data[methods['tmdb_person']]}") - valid_names = [] - for tmdb_person in util.get_list(collectionBuilder.data[methods["tmdb_person"]]): + self._validateSyncMode(collectionBuilder, methods, data, logger) + self._validate_tmdb(collectionBuilder, methods, data, logger) + self._validate_smart_url(collectionBuilder, methods, data, logger) + self._validate_custom_order(collectionBuilder, methods, data, logger) + + def _validate_smart_label(self, collectionBuilder, methods, data, logger): + collectionBuilder.smart_filter_details = "" + collectionBuilder.smart_label_url = None + collectionBuilder.smart_label = {"sort_by": "random", "all": {"label": [collectionBuilder.name]}} + collectionBuilder.smart_label_collection = False + if "smart_label" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay and not collectionBuilder.library.is_music: + logger.debug("") + logger.debug("Validating Method: smart_label") + collectionBuilder.smart_label_collection = True + if not data[methods["smart_label"]]: + logger.warning(f"{collectionBuilder.Type} Error: smart_label attribute is blank defaulting to random") + else: + logger.debug(f"Value: {data[methods['smart_label']]}") + if isinstance(data[methods["smart_label"]], dict): + _data, replaced = util.replace_label(collectionBuilder.name, data[methods["smart_label"]]) + if not replaced: + raise Failed("Config Error: <> not found in the smart_label attribute data") + collectionBuilder.smart_label = _data + elif (collectionBuilder.library.is_movie and str(data[methods["smart_label"]]).lower() in plex.movie_sorts) \ + or (collectionBuilder.library.is_show and str(data[methods["smart_label"]]).lower() in plex.show_sorts): + collectionBuilder.smart_label["sort_by"] = str(data[methods["smart_label"]]).lower() + else: + logger.warning(f"{collectionBuilder.Type} Error: smart_label attribute: {data[methods['smart_label']]} is invalid defaulting to random") + if collectionBuilder.smart_label_collection and collectionBuilder.library.smart_label_check(collectionBuilder.name): + try: + _, collectionBuilder.smart_filter_details, collectionBuilder.smart_label_url = collectionBuilder.build_filter("smart_label", collectionBuilder.smart_label, default_sort="random") + except FilterFailed as e: + if collectionBuilder.ignore_blank_results: + raise + else: + raise Failed(str(e)) + + def _validate_schedule(self, collectionBuilder, methods, data, logger): + if "schedule" in methods and not collectionBuilder.config.requested_collections and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: schedule") + if not data[methods["schedule"]]: + raise Failed(f"{collectionBuilder.Type} Error: schedule attribute is blank") + else: + logger.debug(f"Value: {data[methods['schedule']]}") + err = None try: - if not first_person: - first_person = tmdb_person - person = collectionBuilder.config.TMDb.get_person(util.regex_first_int(tmdb_person, "TMDb Person ID")) - valid_names.append(person.name) - if person.biography: - collectionBuilder.summaries["tmdb_person"] = person.biography - if person.profile_url: - collectionBuilder.posters["tmdb_person"] = person.profile_url - if person.birthday and not collectionBuilder.tmdb_person_birthday: - collectionBuilder.tmdb_person_birthday = person.birthday - except Failed as e: - if str(e).startswith("TMDb Error"): - logger.error(e) - else: + util.schedule_check("schedule", data[methods["schedule"]], collectionBuilder.current_time, collectionBuilder.config.run_hour) + except NonExisting as e: + collectionBuilder.non_existing = str(e) + except NotScheduledRange as e: + err = e + except NotScheduled as e: + if not collectionBuilder.config.ignore_schedules: + err = e + if err: + suffix = "" + if collectionBuilder.details["delete_not_scheduled"]: try: - results = collectionBuilder.config.TMDb.search_people(tmdb_person) - if results: - result_index = len(results) - 1 if collectionBuilder.tmdb_person_offset >= len(results) else collectionBuilder.tmdb_person_offset - valid_names.append(tmdb_person) - if results[result_index].biography: - collectionBuilder.summaries["tmdb_person"] = results[result_index].biography - if results[result_index].profile_url: - collectionBuilder.posters["tmdb_person"] = results[result_index].profile_url - if results[result_index].birthday and not collectionBuilder.tmdb_person_birthday: - collectionBuilder.tmdb_person_birthday = results[result_index].birthday - except Failed as ee: - logger.error(ee) - if len(valid_names) > 0: - collectionBuilder.details["tmdb_person"] = valid_names + collectionBuilder.obj = collectionBuilder.library.get_playlist(collectionBuilder.name) if collectionBuilder.playlist else collectionBuilder.library.get_collection(collectionBuilder.name, force_search=True) + logger.info(collectionBuilder.delete()) + collectionBuilder.deleted = True + suffix = f" and was deleted" + except Failed: + suffix = f" and could not be found to delete" + raise NotScheduled(f"{err}\n\n{collectionBuilder.Type} {collectionBuilder.name} not scheduled to run{suffix}") + + def _validateSyncMode(self, collectionBuilder, methods, data, logger): + collectionBuilder.sync = collectionBuilder.library.sync_mode == "sync" and collectionBuilder.type != "overlay" + if "sync_mode" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: sync_mode") + if not data[methods["sync_mode"]]: + logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {collectionBuilder.library.sync_mode}") else: - raise Failed(f"{collectionBuilder.Type} Error: No valid TMDb Person IDs in {collectionBuilder.data[methods['tmdb_person']]}") + logger.debug(f"Value: {data[methods['sync_mode']]}") + if data[methods["sync_mode"]].lower() not in ["append", "sync"]: + logger.warning(f"Collection Warning: {data[methods['sync_mode']]} sync_mode invalid using general: {collectionBuilder.library.sync_mode}") + else: + collectionBuilder.sync = data[methods["sync_mode"]].lower() == "sync" - if collectionBuilder.tmdb_birthday: - if "tmdb_person" not in methods: - raise NotScheduled("Skipped because tmdb_person is required when using tmdb_birthday") - if not collectionBuilder.tmdb_person_birthday: - raise NotScheduled(f"Skipped because No Birthday was found for {first_person}") - now = datetime(collectionBuilder.current_time.year, collectionBuilder.current_time.month, collectionBuilder.current_time.day) + def _validate_tmdb(self, collectionBuilder, methods, data, logger): + collectionBuilder.tmdb_person_offset = 0 + if "tmdb_person_offset" in methods: + logger.debug("") + logger.debug("Validating Method: tmdb_person_offset") + logger.debug(f"Value: {data[methods['tmdb_person_offset']]}") + collectionBuilder.tmdb_person_offset = util.parse(collectionBuilder.Type, "tmdb_person_offset", data, datatype="int", methods=methods, default=0, minimum=0) - try: - delta = datetime(now.year, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) - except ValueError: - delta = datetime(now.year, collectionBuilder.tmdb_person_birthday.month, 28) + collectionBuilder.tmdb_birthday = None + if "tmdb_birthday" in methods: + logger.debug("") + logger.debug("Validating Method: tmdb_birthday") + logger.debug(f"Value: {data[methods['tmdb_birthday']]}") + if not data[methods["tmdb_birthday"]]: + raise Failed(f"{collectionBuilder.Type} Error: tmdb_birthday attribute is blank") + parsed_birthday = util.parse(collectionBuilder.Type, "tmdb_birthday", data, datatype="dict", methods=methods) + parsed_methods = {m.lower(): m for m in parsed_birthday} + collectionBuilder.tmdb_birthday = { + "before": util.parse(collectionBuilder.Type, "before", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), + "after": util.parse(collectionBuilder.Type, "after", parsed_birthday, datatype="int", methods=parsed_methods, minimum=0, default=0), + "this_month": util.parse(collectionBuilder.Type, "this_month", parsed_birthday, datatype="bool", methods=parsed_methods, default=False) + } + + first_person = None + collectionBuilder.tmdb_person_birthday = None + if "tmdb_person" in methods: + logger.debug("") + logger.debug("Validating Method: tmdb_person") + if not data[methods["tmdb_person"]]: + raise Failed(f"{collectionBuilder.Type} Error: tmdb_person attribute is blank") + else: + logger.debug(f"Value: {data[methods['tmdb_person']]}") + valid_names = [] + for tmdb_person in util.get_list(data[methods["tmdb_person"]]): + try: + if not first_person: + first_person = tmdb_person + person = collectionBuilder.config.TMDb.get_person(util.regex_first_int(tmdb_person, "TMDb Person ID")) + valid_names.append(person.name) + if person.biography: + collectionBuilder.summaries["tmdb_person"] = person.biography + if person.profile_url: + collectionBuilder.posters["tmdb_person"] = person.profile_url + if person.birthday and not collectionBuilder.tmdb_person_birthday: + collectionBuilder.tmdb_person_birthday = person.birthday + except Failed as e: + if str(e).startswith("TMDb Error"): + logger.error(e) + else: + try: + results = collectionBuilder.config.TMDb.search_people(tmdb_person) + if results: + result_index = len(results) - 1 if collectionBuilder.tmdb_person_offset >= len(results) else collectionBuilder.tmdb_person_offset + valid_names.append(tmdb_person) + if results[result_index].biography: + collectionBuilder.summaries["tmdb_person"] = results[result_index].biography + if results[result_index].profile_url: + collectionBuilder.posters["tmdb_person"] = results[result_index].profile_url + if results[result_index].birthday and not collectionBuilder.tmdb_person_birthday: + collectionBuilder.tmdb_person_birthday = results[result_index].birthday + except Failed as ee: + logger.error(ee) + if len(valid_names) > 0: + collectionBuilder.details["tmdb_person"] = valid_names + else: + raise Failed(f"{collectionBuilder.Type} Error: No valid TMDb Person IDs in {data[methods['tmdb_person']]}") + + if collectionBuilder.tmdb_birthday: + if "tmdb_person" not in methods: + raise NotScheduled("Skipped because tmdb_person is required when using tmdb_birthday") + if not collectionBuilder.tmdb_person_birthday: + raise NotScheduled(f"Skipped because No Birthday was found for {first_person}") + now = datetime(collectionBuilder.current_time.year, collectionBuilder.current_time.month, collectionBuilder.current_time.day) - before_delta = delta - after_delta = delta - if delta < now: - try: - before_delta = datetime(now.year + 1, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) - except ValueError: - before_delta = datetime(now.year + 1, collectionBuilder.tmdb_person_birthday.month, 28) - elif delta > now: try: - after_delta = datetime(now.year - 1, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) + delta = datetime(now.year, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) except ValueError: - after_delta = datetime(now.year - 1, collectionBuilder.tmdb_person_birthday.month, 28) - days_after = (now - after_delta).days - days_before = (before_delta - now).days - msg = "" - if collectionBuilder.tmdb_birthday["this_month"]: - if now.month != collectionBuilder.tmdb_person_birthday.month: - msg = f"Skipped because Birthday Month: {collectionBuilder.tmdb_person_birthday.month} is not {now.month}" - elif days_before > collectionBuilder.tmdb_birthday["before"] and days_after > collectionBuilder.tmdb_birthday["after"]: - msg = f"Skipped because days until {collectionBuilder.tmdb_person_birthday.month}/{collectionBuilder.tmdb_person_birthday.day}: {days_before} > {collectionBuilder.tmdb_birthday['before']} and days after {collectionBuilder.tmdb_person_birthday.month}/{collectionBuilder.tmdb_person_birthday.day}: {days_after} > {collectionBuilder.tmdb_birthday['after']}" - if msg: - suffix = "" - if collectionBuilder.details["delete_not_scheduled"]: + delta = datetime(now.year, collectionBuilder.tmdb_person_birthday.month, 28) + + before_delta = delta + after_delta = delta + if delta < now: + try: + before_delta = datetime(now.year + 1, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) + except ValueError: + before_delta = datetime(now.year + 1, collectionBuilder.tmdb_person_birthday.month, 28) + elif delta > now: try: - collectionBuilder.obj = collectionBuilder.library.get_playlist(collectionBuilder.name) if collectionBuilder.playlist else collectionBuilder.library.get_collection(collectionBuilder.name, force_search=True) - logger.info(collectionBuilder.delete()) - collectionBuilder.deleted = True - suffix = f" and was deleted" - except Failed: - suffix = f" and could not be found to delete" - raise NotScheduled(f"{msg}{suffix}") + after_delta = datetime(now.year - 1, collectionBuilder.tmdb_person_birthday.month, collectionBuilder.tmdb_person_birthday.day) + except ValueError: + after_delta = datetime(now.year - 1, collectionBuilder.tmdb_person_birthday.month, 28) + days_after = (now - after_delta).days + days_before = (before_delta - now).days + msg = "" + if collectionBuilder.tmdb_birthday["this_month"]: + if now.month != collectionBuilder.tmdb_person_birthday.month: + msg = f"Skipped because Birthday Month: {collectionBuilder.tmdb_person_birthday.month} is not {now.month}" + elif days_before > collectionBuilder.tmdb_birthday["before"] and days_after > collectionBuilder.tmdb_birthday["after"]: + msg = f"Skipped because days until {collectionBuilder.tmdb_person_birthday.month}/{collectionBuilder.tmdb_person_birthday.day}: {days_before} > {collectionBuilder.tmdb_birthday['before']} and days after {collectionBuilder.tmdb_person_birthday.month}/{collectionBuilder.tmdb_person_birthday.day}: {days_after} > {collectionBuilder.tmdb_birthday['after']}" + if msg: + suffix = "" + if collectionBuilder.details["delete_not_scheduled"]: + try: + collectionBuilder.obj = collectionBuilder.library.get_playlist(collectionBuilder.name) if collectionBuilder.playlist else collectionBuilder.library.get_collection(collectionBuilder.name, force_search=True) + logger.info(collectionBuilder.delete()) + collectionBuilder.deleted = True + suffix = f" and was deleted" + except Failed: + suffix = f" and could not be found to delete" + raise NotScheduled(f"{msg}{suffix}") + + def _validate_smart_url(self, collectionBuilder, methods, data, logger): + collectionBuilder.smart_url = None + collectionBuilder.smart_type_key = None + if "smart_url" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: smart_url") + if not data[methods["smart_url"]]: + raise Failed(f"{collectionBuilder.Type} Error: smart_url attribute is blank") + else: + logger.debug(f"Value: {data[methods['smart_url']]}") + try: + collectionBuilder.smart_url, collectionBuilder.smart_type_key = collectionBuilder.library.get_smart_filter_from_uri(data[methods["smart_url"]]) + except ValueError: + raise Failed(f"{collectionBuilder.Type} Error: smart_url is incorrectly formatted") - collectionBuilder.smart_url = None - collectionBuilder.smart_type_key = None - if "smart_url" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: smart_url") - if not collectionBuilder.data[methods["smart_url"]]: - raise Failed(f"{collectionBuilder.Type} Error: smart_url attribute is blank") - else: - logger.debug(f"Value: {collectionBuilder.data[methods['smart_url']]}") + if "smart_filter" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: try: - collectionBuilder.smart_url, collectionBuilder.smart_type_key = collectionBuilder.library.get_smart_filter_from_uri(collectionBuilder.data[methods["smart_url"]]) - except ValueError: - raise Failed(f"{collectionBuilder.Type} Error: smart_url is incorrectly formatted") + collectionBuilder.smart_type_key, collectionBuilder.smart_filter_details, collectionBuilder.smart_url = collectionBuilder.build_filter("smart_filter", data[methods["smart_filter"]], display=True, default_sort="random") + except FilterFailed as e: + if collectionBuilder.ignore_blank_results: + raise + else: + raise Failed(str(e)) - if "smart_filter" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: - try: - collectionBuilder.smart_type_key, collectionBuilder.smart_filter_details, collectionBuilder.smart_url = collectionBuilder.build_filter("smart_filter", collectionBuilder.data[methods["smart_filter"]], display=True, default_sort="random") - except FilterFailed as e: - if collectionBuilder.ignore_blank_results: - raise - else: - raise Failed(str(e)) + if collectionBuilder.collectionless: + for x in ["smart_label", "smart_filter", "smart_url"]: + if x in methods: + collectionBuilder.collectionless = False + logger.info("") + logger.warning(f"{collectionBuilder.Type} Error: {x} is not compatible with plex_collectionless removing plex_collectionless") - if collectionBuilder.collectionless: - for x in ["smart_label", "smart_filter", "smart_url"]: - if x in methods: - collectionBuilder.collectionless = False - logger.info("") - logger.warning(f"{collectionBuilder.Type} Error: {x} is not compatible with plex_collectionless removing plex_collectionless") + if collectionBuilder.run_again and collectionBuilder.smart_url: + collectionBuilder.run_again = False + logger.info("") + logger.warning(f"{collectionBuilder.Type} Error: smart_filter is not compatible with run_again removing run_again") - if collectionBuilder.run_again and collectionBuilder.smart_url: - collectionBuilder.run_again = False - logger.info("") - logger.warning(f"{collectionBuilder.Type} Error: smart_filter is not compatible with run_again removing run_again") + if collectionBuilder.smart_url and collectionBuilder.smart_label_collection: + raise Failed(f"{collectionBuilder.Type} Error: smart_filter is not compatible with smart_label") - if collectionBuilder.smart_url and collectionBuilder.smart_label_collection: - raise Failed(f"{collectionBuilder.Type} Error: smart_filter is not compatible with smart_label") + if collectionBuilder.parts_collection and "smart_url" in methods: + raise Failed(f"{collectionBuilder.Type} Error: smart_url is not compatible with builder_level: {collectionBuilder.builder_level}") - if collectionBuilder.parts_collection and "smart_url" in methods: - raise Failed(f"{collectionBuilder.Type} Error: smart_url is not compatible with builder_level: {collectionBuilder.builder_level}") + collectionBuilder.smart = collectionBuilder.smart_url or collectionBuilder.smart_label_collection - collectionBuilder.smart = collectionBuilder.smart_url or collectionBuilder.smart_label_collection + def _validate_custom_order(self, collectionBuilder, methods, data, logger): + test_sort = None + if "collection_order" in methods and not collectionBuilder.playlist and collectionBuilder.build_collection: + if data[methods["collection_order"]] is None: + raise Failed(f"{collectionBuilder.Type} Warning: collection_order attribute is blank") + else: + test_sort = data[methods["collection_order"]] + elif "collection_order" not in methods and not collectionBuilder.playlist and not collectionBuilder.blank_collection and collectionBuilder.build_collection and collectionBuilder.library.default_collection_order and not collectionBuilder.smart: + test_sort = collectionBuilder.library.default_collection_order + logger.info("") + logger.warning(f"{collectionBuilder.Type} Warning: collection_order not found using library default_collection_order: {test_sort}") + collectionBuilder.custom_sort = "custom" if collectionBuilder.playlist else None + if test_sort: + if collectionBuilder.smart: + raise Failed(f"{collectionBuilder.Type} Error: collection_order does not work with Smart Collections") + logger.debug("") + logger.debug("Validating Method: collection_order") + logger.debug(f"Value: {test_sort}") + if test_sort in plex.collection_order_options + ["custom.asc", "custom.desc"]: + collectionBuilder.details["collection_order"] = test_sort.split(".")[0] + if test_sort.startswith("custom") and collectionBuilder.build_collection: + collectionBuilder.custom_sort = test_sort + else: + sort_type = collectionBuilder.builder_level + if sort_type == "item": + if collectionBuilder.library.is_show: + sort_type = "show" + elif collectionBuilder.library.is_music: + sort_type = "artist" + else: + sort_type = "movie" + _, _, sorts = plex.sort_types[sort_type] + if not isinstance(test_sort, list): + test_sort = [test_sort] + collectionBuilder.custom_sort = [] + for ts in test_sort: + if ts not in sorts: + raise Failed(f"{collectionBuilder.Type} Error: collection_order: {ts} is invalid. Options: {', '.join(sorts)}") + collectionBuilder.custom_sort.append(ts) + if test_sort not in plex.collection_order_options + ["custom.asc", "custom.desc"] and not collectionBuilder.custom_sort: + raise Failed(f"{collectionBuilder.Type} Error: {test_sort} collection_order invalid\n\trelease (Order Collection by release dates)\n\talpha (Order Collection Alphabetically)\n\tcustom.asc/custom.desc (Custom Order Collection)\n\tOther sorting options can be found at https://github.com/Kometa-Team/Kometa/wiki/Smart-Builders#sort-options") - test_sort = None - if "collection_order" in methods and not collectionBuilder.playlist and collectionBuilder.build_collection: - if collectionBuilder.data[methods["collection_order"]] is None: - raise Failed(f"{collectionBuilder.Type} Warning: collection_order attribute is blank") - else: - test_sort = collectionBuilder.data[methods["collection_order"]] - elif "collection_order" not in methods and not collectionBuilder.playlist and not collectionBuilder.blank_collection and collectionBuilder.build_collection and collectionBuilder.library.default_collection_order and not collectionBuilder.smart: - test_sort = collectionBuilder.library.default_collection_order - logger.info("") - logger.warning(f"{collectionBuilder.Type} Warning: collection_order not found using library default_collection_order: {test_sort}") - collectionBuilder.custom_sort = "custom" if collectionBuilder.playlist else None - if test_sort: if collectionBuilder.smart: - raise Failed(f"{collectionBuilder.Type} Error: collection_order does not work with Smart Collections") - logger.debug("") - logger.debug("Validating Method: collection_order") - logger.debug(f"Value: {test_sort}") - if test_sort in plex.collection_order_options + ["custom.asc", "custom.desc"]: - collectionBuilder.details["collection_order"] = test_sort.split(".")[0] - if test_sort.startswith("custom") and collectionBuilder.build_collection: - collectionBuilder.custom_sort = test_sort - else: - sort_type = collectionBuilder.builder_level - if sort_type == "item": - if collectionBuilder.library.is_show: - sort_type = "show" - elif collectionBuilder.library.is_music: - sort_type = "artist" - else: - sort_type = "movie" - _, _, sorts = plex.sort_types[sort_type] - if not isinstance(test_sort, list): - test_sort = [test_sort] - collectionBuilder.custom_sort = [] - for ts in test_sort: - if ts not in sorts: - raise Failed(f"{collectionBuilder.Type} Error: collection_order: {ts} is invalid. Options: {', '.join(sorts)}") - collectionBuilder.custom_sort.append(ts) - if test_sort not in plex.collection_order_options + ["custom.asc", "custom.desc"] and not collectionBuilder.custom_sort: - raise Failed(f"{collectionBuilder.Type} Error: {test_sort} collection_order invalid\n\trelease (Order Collection by release dates)\n\talpha (Order Collection Alphabetically)\n\tcustom.asc/custom.desc (Custom Order Collection)\n\tOther sorting options can be found at https://github.com/Kometa-Team/Kometa/wiki/Smart-Builders#sort-options") - - if collectionBuilder.smart: - collectionBuilder.custom_sort = None - \ No newline at end of file + collectionBuilder.custom_sort = None From 432922751d352fd88a307aa0ddb68d997e99e832 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 09:41:13 -0400 Subject: [PATCH 13/24] refactor: use underscore naming in attribtue setter --- modules/builder/__init__.py | 2 +- modules/builder/_attribute_setter.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 0643cd02c..7b8e875af 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -475,7 +475,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): BuilderMethodValidator().validate_methods(self, methods, logger) attributeSetter = BuilderAttributeSetter() - attributeSetter.setAttributes(self, methods, logger) + attributeSetter.set_attributes(self, methods, logger) if "append_label" in methods and not self.playlist and not self.overlay: logger.debug("") diff --git a/modules/builder/_attribute_setter.py b/modules/builder/_attribute_setter.py index a85f90df4..a4ccfc47d 100644 --- a/modules/builder/_attribute_setter.py +++ b/modules/builder/_attribute_setter.py @@ -7,7 +7,7 @@ from modules.builder._config import * class BuilderAttributeSetter: - def setAttributes(self, collectionBuilder, methods, logger): + def set_attributes(self, collectionBuilder, methods, logger): self.collectionBuilder = collectionBuilder self.Type = collectionBuilder.Type self.logger = logger @@ -22,15 +22,15 @@ def setAttributes(self, collectionBuilder, methods, logger): logger.debug(f"Validating Method: {method_key}") logger.debug(f"Value: {method_data}") try: - self._setAttribute(method_name, method_data, method_final, methods, method_mod) + self._set_attribute(method_name, method_data, method_final, methods, method_mod) except Failed as e: if collectionBuilder.validate_builders: raise else: logger.error(e) - def _setAttribute(self, method_name, method_data, method_final, methods, method_mod): - self._validateAttributes(method_name, method_data, method_final) + def _set_attribute(self, method_name, method_data, method_final, methods, method_mod): + self._validate_attributes(method_name, method_data, method_final) if method_name in summary_details: self._summary(method_name, method_data) @@ -77,7 +77,7 @@ def _setAttribute(self, method_name, method_data, method_final, methods, method_ else: raise Failed(f"{self.Type} Error: {method_final} attribute not supported") - def _validateAttributes(self, method_name, method_data, method_final): + def _validate_attributes(self, method_name, method_data, method_final): if method_data is None and method_name in all_builders + plex.searches and method_final not in none_builders: raise Failed(f"{self.Type} Error: {method_final} attribute is blank") elif method_data is None and method_final not in none_details: From 3a08d7111108be7cdd54c1f6cb6abd9f2aa60672 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 09:53:54 -0400 Subject: [PATCH 14/24] refactor: validators --- modules/builder/_validate_methods.py | 135 +++++++++++++++------------ 1 file changed, 76 insertions(+), 59 deletions(-) diff --git a/modules/builder/_validate_methods.py b/modules/builder/_validate_methods.py index b73b4d4d5..4d6ef0c13 100644 --- a/modules/builder/_validate_methods.py +++ b/modules/builder/_validate_methods.py @@ -7,7 +7,26 @@ class BuilderMethodValidator: def validate_methods(self, collectionBuilder, methods, logger): data = collectionBuilder.data - + validators = [ + self._validate_ignore_blank_results, + self._validate_smart_label, + self._validate_delete_not_scheduled, + self._validate_schedule, + self._validate_delete_collections_named, + self._validate_collectionless, + self._validate_builders, + self._validate_run_again, + self._validate_build_collection, + self._validate_blank_collection, + self._validateSyncMode, + self._validate_tmdb, + self._validate_smart_url, + self._validate_custom_order, + ] + for validator in validators: + validator(collectionBuilder, methods, data, logger) + + def _validate_ignore_blank_results(self, collectionBuilder, methods, data, logger): collectionBuilder.ignore_blank_results = False if "ignore_blank_results" in methods and not collectionBuilder.playlist: logger.debug("") @@ -15,64 +34,6 @@ def validate_methods(self, collectionBuilder, methods, logger): logger.debug(f"Value: {data[methods['ignore_blank_results']]}") collectionBuilder.ignore_blank_results = util.parse(collectionBuilder.Type, "ignore_blank_results", data, datatype="bool", methods=methods, default=False) - self._validate_smart_label(collectionBuilder, methods, data, logger) - - if "delete_not_scheduled" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: delete_not_scheduled") - logger.debug(f"Value: {data[methods['delete_not_scheduled']]}") - collectionBuilder.details["delete_not_scheduled"] = util.parse(collectionBuilder.Type, "delete_not_scheduled", data, datatype="bool", methods=methods, default=False) - - self._validate_schedule(collectionBuilder, methods, data, logger) - - if "delete_collections_named" in methods and not collectionBuilder.overlay and not collectionBuilder.playlist: - logger.debug("") - logger.debug("Validating Method: delete_collections_named") - logger.debug(f"Value: {data[methods['delete_collections_named']]}") - for del_col in util.parse(collectionBuilder.Type, "delete_collections_named", data, datatype="strlist", methods=methods): - try: - del_obj = collectionBuilder.library.get_collection(del_col, force_search=True) - collectionBuilder.library.delete(del_obj) - logger.info(f"Collection: {del_obj.title} deleted") - except Failed as e: - if str(e).startswith("Plex Error: Failed to delete"): - logger.error(e) - - collectionBuilder.collectionless = "plex_collectionless" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay - - collectionBuilder.validate_builders = True - if "validate_builders" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: validate_builders") - logger.debug(f"Value: {data[methods['validate_builders']]}") - collectionBuilder.validate_builders = util.parse(collectionBuilder.Type, "validate_builders", data, datatype="bool", methods=methods, default=True) - - collectionBuilder.run_again = False - if "run_again" in methods and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: run_again") - logger.debug(f"Value: {data[methods['run_again']]}") - collectionBuilder.run_again = util.parse(collectionBuilder.Type, "run_again", data, datatype="bool", methods=methods, default=False) - - collectionBuilder.build_collection = False if collectionBuilder.overlay else True - if "build_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: build_collection") - logger.debug(f"Value: {data[methods['build_collection']]}") - collectionBuilder.build_collection = util.parse(collectionBuilder.Type, "build_collection", data, datatype="bool", methods=methods, default=True) - - collectionBuilder.blank_collection = False - if "blank_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: - logger.debug("") - logger.debug("Validating Method: blank_collection") - logger.debug(f"Value: {data[methods['blank_collection']]}") - collectionBuilder.blank_collection = util.parse(collectionBuilder.Type, "blank_collection", data, datatype="bool", methods=methods, default=False) - - self._validateSyncMode(collectionBuilder, methods, data, logger) - self._validate_tmdb(collectionBuilder, methods, data, logger) - self._validate_smart_url(collectionBuilder, methods, data, logger) - self._validate_custom_order(collectionBuilder, methods, data, logger) - def _validate_smart_label(self, collectionBuilder, methods, data, logger): collectionBuilder.smart_filter_details = "" collectionBuilder.smart_label_url = None @@ -105,6 +66,13 @@ def _validate_smart_label(self, collectionBuilder, methods, data, logger): else: raise Failed(str(e)) + def _validate_delete_not_scheduled(self, collectionBuilder, methods, data, logger): + if "delete_not_scheduled" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: delete_not_scheduled") + logger.debug(f"Value: {data[methods['delete_not_scheduled']]}") + collectionBuilder.details["delete_not_scheduled"] = util.parse(collectionBuilder.Type, "delete_not_scheduled", data, datatype="bool", methods=methods, default=False) + def _validate_schedule(self, collectionBuilder, methods, data, logger): if "schedule" in methods and not collectionBuilder.config.requested_collections and not collectionBuilder.overlay: logger.debug("") @@ -135,6 +103,55 @@ def _validate_schedule(self, collectionBuilder, methods, data, logger): suffix = f" and could not be found to delete" raise NotScheduled(f"{err}\n\n{collectionBuilder.Type} {collectionBuilder.name} not scheduled to run{suffix}") + def _validate_delete_collections_named(self, collectionBuilder, methods, data, logger): + if "delete_collections_named" in methods and not collectionBuilder.overlay and not collectionBuilder.playlist: + logger.debug("") + logger.debug("Validating Method: delete_collections_named") + logger.debug(f"Value: {data[methods['delete_collections_named']]}") + for del_col in util.parse(collectionBuilder.Type, "delete_collections_named", data, datatype="strlist", methods=methods): + try: + del_obj = collectionBuilder.library.get_collection(del_col, force_search=True) + collectionBuilder.library.delete(del_obj) + logger.info(f"Collection: {del_obj.title} deleted") + except Failed as e: + if str(e).startswith("Plex Error: Failed to delete"): + logger.error(e) + + def _validate_collectionless(self, collectionBuilder, methods, data, logger): + collectionBuilder.collectionless = "plex_collectionless" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay + + def _validate_builders(self, collectionBuilder, methods, data, logger): + collectionBuilder.validate_builders = True + if "validate_builders" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: validate_builders") + logger.debug(f"Value: {data[methods['validate_builders']]}") + collectionBuilder.validate_builders = util.parse(collectionBuilder.Type, "validate_builders", data, datatype="bool", methods=methods, default=True) + + def _validate_run_again(self, collectionBuilder, methods, data, logger): + collectionBuilder.run_again = False + if "run_again" in methods and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: run_again") + logger.debug(f"Value: {data[methods['run_again']]}") + collectionBuilder.run_again = util.parse(collectionBuilder.Type, "run_again", data, datatype="bool", methods=methods, default=False) + + def _validate_build_collection(self, collectionBuilder, methods, data, logger): + collectionBuilder.build_collection = False if collectionBuilder.overlay else True + if "build_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: build_collection") + logger.debug(f"Value: {data[methods['build_collection']]}") + collectionBuilder.build_collection = util.parse(collectionBuilder.Type, "build_collection", data, datatype="bool", methods=methods, default=True) + + def _validate_blank_collection(self, collectionBuilder, methods, data, logger): + collectionBuilder.blank_collection = False + if "blank_collection" in methods and not collectionBuilder.playlist and not collectionBuilder.overlay: + logger.debug("") + logger.debug("Validating Method: blank_collection") + logger.debug(f"Value: {data[methods['blank_collection']]}") + collectionBuilder.blank_collection = util.parse(collectionBuilder.Type, "blank_collection", data, datatype="bool", methods=methods, default=False) + def _validateSyncMode(self, collectionBuilder, methods, data, logger): collectionBuilder.sync = collectionBuilder.library.sync_mode == "sync" and collectionBuilder.type != "overlay" if "sync_mode" in methods and not collectionBuilder.overlay: From 59c81ec5c63d37523c3c5a913affa72764cc0a89 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Tue, 21 May 2024 20:48:26 -0400 Subject: [PATCH 15/24] fix: _missing_filters --- modules/builder/__init__.py | 254 +-------------------------- modules/builder/_missing_filters.py | 263 ++++++++++++++++++++++++++++ 2 files changed, 269 insertions(+), 248 deletions(-) create mode 100644 modules/builder/_missing_filters.py diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 7b8e875af..693815d68 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -12,6 +12,7 @@ from ._config import * from ._attribute_setter import BuilderAttributeSetter from ._validate_methods import BuilderMethodValidator +from ._missing_filters import MissingFiltersUtil logger = util.logger @@ -559,6 +560,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): logger.info("") logger.info("Validation Successful") + self.missing_filters_util = MissingFiltersUtil(self, logger) def gather_ids(self, method, value): expired = None @@ -1265,263 +1267,19 @@ def sync_collection(self): logger.info(f"{amount_removed} {self.builder_level.capitalize()}{'s' if amount_removed == 1 else ''} Removed") return amount_removed - def check_tmdb_filters(self, tmdb_item, filters_in, is_movie): - for filter_method, filter_data in filters_in: - filter_attr, modifier, filter_final = self.library.split(filter_method) - if self.config.TMDb.item_filter(tmdb_item, filter_attr, modifier, filter_final, filter_data, is_movie, self.current_time) is False: - return False - return True - - def check_tvdb_filters(self, tvdb_item, filters_in): - for filter_method, filter_data in filters_in: - filter_attr, modifier, filter_final = self.library.split(filter_method) - if self.config.TVDb.item_filter(tvdb_item, filter_attr, modifier, filter_final, filter_data) is False: - return False - return True - - def check_imdb_filters(self, imdb_info, filters_in): - for filter_method, filter_data in filters_in: - filter_attr, modifier, filter_final = self.library.split(filter_method) - if self.config.IMDb.item_filter(imdb_info, filter_attr, modifier, filter_final, filter_data) is False: - return False - return True - - def check_missing_filters(self, item_id, is_movie, tmdb_item=None, check_released=False): - imdb_info = None - if self.has_tmdb_filters or self.has_imdb_filters or check_released: - try: - if tmdb_item is None: - if is_movie: - tmdb_item = self.config.TMDb.get_movie(item_id, ignore_cache=True) - else: - tmdb_item = self.config.TMDb.get_show(self.config.Convert.tvdb_to_tmdb(item_id, fail=True), ignore_cache=True) - except Failed: - return False - if self.has_imdb_filters and tmdb_item and tmdb_item.imdb_id: - try: - imdb_info = self.config.IMDb.keywords(tmdb_item.imdb_id, self.language) - except Failed as e: - logger.error(e) - return False - if check_released: - date_to_check = tmdb_item.release_date if is_movie else tmdb_item.first_air_date - if not date_to_check or date_to_check > self.current_time: - return False - final_return = True - if self.has_tmdb_filters or self.has_imdb_filters: - final_return = False - for filter_list in self.filters: - tmdb_f = [] - imdb_f = [] - for k, v in filter_list: - if k.split(".")[0] in tmdb_filters: - tmdb_f.append((k, v)) - elif k.split(".")[0] in imdb_filters: - imdb_f.append((k, v)) - or_result = True - if tmdb_f: - if not tmdb_item or self.check_tmdb_filters(tmdb_item, tmdb_f, is_movie) is False: - or_result = False - if imdb_f: - if not imdb_info and self.check_imdb_filters(imdb_info, imdb_f) is False: - or_result = False - if or_result: - final_return = True - return final_return - def check_filters(self, item, display): - final_return = True - if self.filters and not self.details["only_filter_missing"]: - logger.ghost(f"Filtering {display} {item.title}") - item = self.library.reload(item) - final_return = False - tmdb_item = None - tvdb_item = None - imdb_info = None - for filter_list in self.filters: - tmdb_f = [] - tvdb_f = [] - imdb_f = [] - plex_f = [] - for k, v in filter_list: - if k.split(".")[0] in tmdb_filters: - tmdb_f.append((k, v)) - elif k.split(".")[0] in tvdb_filters: - tvdb_f.append((k, v)) - elif k.split(".")[0] in imdb_filters: - imdb_f.append((k, v)) - else: - plex_f.append((k, v)) - or_result = True - if tmdb_f: - if not tmdb_item and isinstance(item, (Movie, Show)): - if item.ratingKey not in self.library.movie_rating_key_map and item.ratingKey not in self.library.show_rating_key_map: - logger.warning(f"Filter Error: No {'TMDb' if self.library.is_movie else 'TVDb'} ID found for {item.title}") - or_result = False - else: - try: - if item.ratingKey in self.library.movie_rating_key_map: - tmdb_item = self.config.TMDb.get_movie(self.library.movie_rating_key_map[item.ratingKey], ignore_cache=True) - else: - tmdb_item = self.config.TMDb.get_show(self.config.Convert.tvdb_to_tmdb(self.library.show_rating_key_map[item.ratingKey], fail=True), ignore_cache=True) - except Failed as e: - logger.error(e) - or_result = False - if not tmdb_item or self.check_tmdb_filters(tmdb_item, tmdb_f, item.ratingKey in self.library.movie_rating_key_map) is False: - or_result = False - if tvdb_f: - if not tvdb_item and isinstance(item, Show): - if item.ratingKey not in self.library.show_rating_key_map: - logger.warning(f"Filter Error: No TVDb ID found for {item.title}") - or_result = False - else: - try: - tvdb_item = self.config.TVDb.get_tvdb_obj(self.library.show_rating_key_map[item.ratingKey]) - except Failed as e: - logger.error(e) - or_result = False - if not tvdb_item or self.check_tvdb_filters(tvdb_item, tvdb_f) is False: - or_result = False - if imdb_f: - if not imdb_info and isinstance(item, (Movie, Show)): - if item.ratingKey not in self.library.imdb_rating_key_map: - logger.warning(f"Filter Error: No IMDb ID found for {item.title}") - or_result = False - else: - try: - imdb_info = self.config.IMDb.keywords(self.library.imdb_rating_key_map[item.ratingKey], self.language) - except Failed as e: - logger.error(e) - or_result = False - if not imdb_info or self.check_imdb_filters(imdb_info, imdb_f) is False: - or_result = False - if plex_f and self.library.check_filters(item, plex_f, self.current_time) is False: - or_result = False - if or_result: - final_return = True - return final_return + return self.missing_filters_util.check_filters(item, display) def display_filters(self): if self.filters: for filter_list in self.filters: logger.info("") for filter_key, filter_value in filter_list: - logger.info(f"Collection Filter {filter_key}: {filter_value}") + logger.info(f"Collection Filter {filter_key}: {filter_value}") def run_missing(self): - added_to_radarr = 0 - added_to_sonarr = 0 - if len(self.missing_movies) > 0: - if self.details["show_missing"] is True: - logger.info("") - logger.separator(f"Missing Movies from Library: {self.library.name}", space=False, border=False) - logger.info("") - missing_movies_with_names = [] - filtered_movies_with_names = [] - for missing_id in self.missing_movies: - try: - movie = self.config.TMDb.get_movie(missing_id) - except Failed as e: - logger.error(e) - continue - current_title = f"{movie.title} ({movie.release_date.year})" if movie.release_date else movie.title - if self.check_missing_filters(missing_id, True, tmdb_item=movie, check_released=self.details["missing_only_released"]): - missing_movies_with_names.append((current_title, missing_id)) - if self.details["show_missing"] is True: - logger.info(f"{self.name} {self.Type} | ? | {current_title} (TMDb: {missing_id})") - else: - filtered_movies_with_names.append((current_title, missing_id)) - if self.details["show_filtered"] is True and self.details["show_missing"] is True: - logger.info(f"{self.name} {self.Type} | X | {current_title} (TMDb: {missing_id})") - logger.info("") - logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing") - if len(missing_movies_with_names) > 0: - if self.do_report: - self.library.add_missing(self.name, missing_movies_with_names, True) - if self.run_again or (self.library.Radarr and (self.radarr_details["add_missing"] or "item_radarr_tag" in self.item_details)): - missing_tmdb_ids = [missing_id for title, missing_id in missing_movies_with_names] - if self.library.Radarr: - if self.radarr_details["add_missing"]: - try: - added = self.library.Radarr.add_tmdb(missing_tmdb_ids, **self.radarr_details) - self.added_to_radarr.extend([{"title": movie.title, "id": movie.tmdbId} for movie in added]) - added_to_radarr += len(added) - except Failed as e: - logger.error(e) - except ArrException as e: - logger.stacktrace() - logger.error(f"Arr Error: {e}") - if "item_radarr_tag" in self.item_details: - try: - self.library.Radarr.edit_tags(missing_tmdb_ids, self.item_details["item_radarr_tag"], self.item_details["apply_tags"]) - except Failed as e: - logger.error(e) - except ArrException as e: - logger.stacktrace() - logger.error(f"Arr Error: {e}") - if self.run_again: - self.run_again_movies.extend(missing_tmdb_ids) - if len(filtered_movies_with_names) > 0 and self.do_report: - self.library.add_filtered(self.name, filtered_movies_with_names, True) - if len(self.missing_shows) > 0 and self.library.is_show: - if self.details["show_missing"] is True: - logger.info("") - logger.separator(f"Missing Shows from Library: {self.name}", space=False, border=False) - logger.info("") - missing_shows_with_names = [] - filtered_shows_with_names = [] - for missing_id in self.missing_shows: - try: - title = self.config.TVDb.get_tvdb_obj(missing_id).title - except Failed as e: - logger.error(e) - continue - if self.check_missing_filters(missing_id, False, check_released=self.details["missing_only_released"]): - missing_shows_with_names.append((title, missing_id)) - if self.details["show_missing"] is True: - logger.info(f"{self.name} {self.Type} | ? | {title} (TVDb: {missing_id})") - else: - filtered_shows_with_names.append((title, missing_id)) - if self.details["show_filtered"] is True and self.details["show_missing"] is True: - logger.info(f"{self.name} {self.Type} | X | {title} (TVDb: {missing_id})") - logger.info("") - logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing") - if len(missing_shows_with_names) > 0: - if self.do_report: - self.library.add_missing(self.name, missing_shows_with_names, False) - if self.run_again or (self.library.Sonarr and (self.sonarr_details["add_missing"] or "item_sonarr_tag" in self.item_details)): - missing_tvdb_ids = [missing_id for title, missing_id in missing_shows_with_names] - if self.library.Sonarr: - if self.sonarr_details["add_missing"]: - try: - added = self.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.sonarr_details) - self.added_to_sonarr.extend([{"title": show.title, "id": show.tvdbId} for show in added]) - added_to_sonarr += len(added) - except Failed as e: - logger.error(e) - except ArrException as e: - logger.stacktrace() - logger.error(f"Arr Error: {e}") - if "item_sonarr_tag" in self.item_details: - try: - self.library.Sonarr.edit_tags(missing_tvdb_ids, self.item_details["item_sonarr_tag"], self.item_details["apply_tags"]) - except Failed as e: - logger.error(e) - except ArrException as e: - logger.stacktrace() - logger.error(f"Arr Error: {e}") - if self.run_again: - self.run_again_shows.extend(missing_tvdb_ids) - if len(filtered_shows_with_names) > 0 and self.do_report: - self.library.add_filtered(self.name, filtered_shows_with_names, False) - if len(self.missing_parts) > 0 and self.library.is_show: - if self.details["show_missing"] is True: - for missing in self.missing_parts: - logger.info(f"{self.name} {self.Type} | ? | {missing}") - if self.do_report: - self.library.add_missing(self.name, self.missing_parts, False) - return added_to_radarr, added_to_sonarr - + return self.missing_filters_util.run_missing() + def load_collection_items(self): if self.build_collection and self.obj: self.items = self.library.get_collection_items(self.obj, self.smart_label_collection) diff --git a/modules/builder/_missing_filters.py b/modules/builder/_missing_filters.py new file mode 100644 index 000000000..c206aeaf7 --- /dev/null +++ b/modules/builder/_missing_filters.py @@ -0,0 +1,263 @@ +from arrapi import ArrException +from modules.util import Failed +from plexapi.video import Movie, Show +from ._config import tmdb_filters, tvdb_filters, imdb_filters + +class MissingFiltersUtil: + def __init__(self, collectionBuilder, logger): + self.logger = logger + self.collectionBuilder = collectionBuilder + + def run_missing(self): + logger = self.logger + added_to_radarr = 0 + added_to_sonarr = 0 + if len(self.collectionBuilder.missing_movies) > 0: + if self.collectionBuilder.details["show_missing"] is True: + logger.info("") + logger.separator(f"Missing Movies from Library: {self.collectionBuilder.library.name}", space=False, border=False) + logger.info("") + missing_movies_with_names = [] + filtered_movies_with_names = [] + for missing_id in self.collectionBuilder.missing_movies: + try: + movie = self.collectionBuilder.config.TMDb.get_movie(missing_id) + except Failed as e: + logger.error(e) + continue + current_title = f"{movie.title} ({movie.release_date.year})" if movie.release_date else movie.title + if self._check_missing_filters(missing_id, True, tmdb_item=movie, check_released=self.collectionBuilder.details["missing_only_released"]): + missing_movies_with_names.append((current_title, missing_id)) + if self.collectionBuilder.details["show_missing"] is True: + logger.info(f"{self.collectionBuilder.name} {self.collectionBuilder.Type} | ? | {current_title} (TMDb: {missing_id})") + else: + filtered_movies_with_names.append((current_title, missing_id)) + if self.collectionBuilder.details["show_filtered"] is True and self.collectionBuilder.details["show_missing"] is True: + logger.info(f"{self.collectionBuilder.name} {self.collectionBuilder.Type} | X | {current_title} (TMDb: {missing_id})") + logger.info("") + logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing") + if len(missing_movies_with_names) > 0: + if self.collectionBuilder.do_report: + self.collectionBuilder.library.add_missing(self.collectionBuilder.name, missing_movies_with_names, True) + if self.collectionBuilder.run_again or (self.collectionBuilder.library.Radarr and (self.collectionBuilder.radarr_details["add_missing"] or "item_radarr_tag" in self.collectionBuilder.item_details)): + missing_tmdb_ids = [missing_id for title, missing_id in missing_movies_with_names] + if self.collectionBuilder.library.Radarr: + if self.collectionBuilder.radarr_details["add_missing"]: + try: + added = self.collectionBuilder.library.Radarr.add_tmdb(missing_tmdb_ids, **self.collectionBuilder.radarr_details) + self.collectionBuilder.added_to_radarr.extend([{"title": movie.title, "id": movie.tmdbId} for movie in added]) + added_to_radarr += len(added) + except Failed as e: + logger.error(e) + except ArrException as e: + logger.stacktrace() + logger.error(f"Arr Error: {e}") + if "item_radarr_tag" in self.collectionBuilder.item_details: + try: + self.collectionBuilder.library.Radarr.edit_tags(missing_tmdb_ids, self.collectionBuilder.item_details["item_radarr_tag"], self.collectionBuilder.item_details["apply_tags"]) + except Failed as e: + logger.error(e) + except ArrException as e: + logger.stacktrace() + logger.error(f"Arr Error: {e}") + if self.collectionBuilder.run_again: + self.collectionBuilder.run_again_movies.extend(missing_tmdb_ids) + if len(filtered_movies_with_names) > 0 and self.collectionBuilder.do_report: + self.collectionBuilder.library.add_filtered(self.collectionBuilder.name, filtered_movies_with_names, True) + if len(self.collectionBuilder.missing_shows) > 0 and self.collectionBuilder.library.is_show: + if self.collectionBuilder.details["show_missing"] is True: + logger.info("") + logger.separator(f"Missing Shows from Library: {self.collectionBuilder.name}", space=False, border=False) + logger.info("") + missing_shows_with_names = [] + filtered_shows_with_names = [] + for missing_id in self.collectionBuilder.missing_shows: + try: + title = self.collectionBuilder.config.TVDb.get_tvdb_obj(missing_id).title + except Failed as e: + logger.error(e) + continue + if self._check_missing_filters(missing_id, False, check_released=self.collectionBuilder.details["missing_only_released"]): + missing_shows_with_names.append((title, missing_id)) + if self.collectionBuilder.details["show_missing"] is True: + logger.info(f"{self.collectionBuilder.name} {self.collectionBuilder.Type} | ? | {title} (TVDb: {missing_id})") + else: + filtered_shows_with_names.append((title, missing_id)) + if self.collectionBuilder.details["show_filtered"] is True and self.collectionBuilder.details["show_missing"] is True: + logger.info(f"{self.collectionBuilder.name} {self.collectionBuilder.Type} | X | {title} (TVDb: {missing_id})") + logger.info("") + logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing") + if len(missing_shows_with_names) > 0: + if self.collectionBuilder.do_report: + self.collectionBuilder.library.add_missing(self.collectionBuilder.name, missing_shows_with_names, False) + if self.collectionBuilder.run_again or (self.collectionBuilder.library.Sonarr and (self.collectionBuilder.sonarr_details["add_missing"] or "item_sonarr_tag" in self.collectionBuilder.item_details)): + missing_tvdb_ids = [missing_id for title, missing_id in missing_shows_with_names] + if self.collectionBuilder.library.Sonarr: + if self.collectionBuilder.sonarr_details["add_missing"]: + try: + added = self.collectionBuilder.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.collectionBuilder.sonarr_details) + self.collectionBuilder.added_to_sonarr.extend([{"title": show.title, "id": show.tvdbId} for show in added]) + added_to_sonarr += len(added) + except Failed as e: + logger.error(e) + except ArrException as e: + logger.stacktrace() + logger.error(f"Arr Error: {e}") + if "item_sonarr_tag" in self.collectionBuilder.item_details: + try: + self.collectionBuilder.library.Sonarr.edit_tags(missing_tvdb_ids, self.collectionBuilder.item_details["item_sonarr_tag"], self.collectionBuilder.item_details["apply_tags"]) + except Failed as e: + logger.error(e) + except ArrException as e: + logger.stacktrace() + logger.error(f"Arr Error: {e}") + if self.collectionBuilder.run_again: + self.collectionBuilder.run_again_shows.extend(missing_tvdb_ids) + if len(filtered_shows_with_names) > 0 and self.collectionBuilder.do_report: + self.collectionBuilder.library.add_filtered(self.collectionBuilder.name, filtered_shows_with_names, False) + if len(self.collectionBuilder.missing_parts) > 0 and self.collectionBuilder.library.is_show: + if self.collectionBuilder.details["show_missing"] is True: + for missing in self.collectionBuilder.missing_parts: + logger.info(f"{self.collectionBuilder.name} {self.collectionBuilder.Type} | ? | {missing}") + if self.collectionBuilder.do_report: + self.collectionBuilder.library.add_missing(self.collectionBuilder.name, self.collectionBuilder.missing_parts, False) + return added_to_radarr, added_to_sonarr + + def check_filters(self, item, display): + logger = self.logger + final_return = True + if self.collectionBuilder.filters and not self.collectionBuilder.details["only_filter_missing"]: + logger.ghost(f"Filtering {display} {item.title}") + item = self.collectionBuilder.library.reload(item) + final_return = False + tmdb_item = None + tvdb_item = None + imdb_info = None + for filter_list in self.collectionBuilder.filters: + tmdb_f = [] + tvdb_f = [] + imdb_f = [] + plex_f = [] + for k, v in filter_list: + if k.split(".")[0] in tmdb_filters: + tmdb_f.append((k, v)) + elif k.split(".")[0] in tvdb_filters: + tvdb_f.append((k, v)) + elif k.split(".")[0] in imdb_filters: + imdb_f.append((k, v)) + else: + plex_f.append((k, v)) + or_result = True + if tmdb_f: + if not tmdb_item and isinstance(item, (Movie, Show)): + if item.ratingKey not in self.collectionBuilder.library.movie_rating_key_map and item.ratingKey not in self.collectionBuilder.library.show_rating_key_map: + logger.warning(f"Filter Error: No {'TMDb' if self.collectionBuilder.library.is_movie else 'TVDb'} ID found for {item.title}") + or_result = False + else: + try: + if item.ratingKey in self.collectionBuilder.library.movie_rating_key_map: + tmdb_item = self.collectionBuilder.config.TMDb.get_movie(self.collectionBuilder.library.movie_rating_key_map[item.ratingKey], ignore_cache=True) + else: + tmdb_item = self.collectionBuilder.config.TMDb.get_show(self.collectionBuilder.config.Convert.tvdb_to_tmdb(self.collectionBuilder.library.show_rating_key_map[item.ratingKey], fail=True), ignore_cache=True) + except Failed as e: + logger.error(e) + or_result = False + if not tmdb_item or self.collectionBuilder.check_tmdb_filters(tmdb_item, tmdb_f, item.ratingKey in self.collectionBuilder.library.movie_rating_key_map) is False: + or_result = False + if tvdb_f: + if not tvdb_item and isinstance(item, Show): + if item.ratingKey not in self.collectionBuilder.library.show_rating_key_map: + logger.warning(f"Filter Error: No TVDb ID found for {item.title}") + or_result = False + else: + try: + tvdb_item = self.collectionBuilder.config.TVDb.get_tvdb_obj(self.collectionBuilder.library.show_rating_key_map[item.ratingKey]) + except Failed as e: + logger.error(e) + or_result = False + if not tvdb_item or self.collectionBuilder.check_tvdb_filters(tvdb_item, tvdb_f) is False: + or_result = False + if imdb_f: + if not imdb_info and isinstance(item, (Movie, Show)): + if item.ratingKey not in self.collectionBuilder.library.imdb_rating_key_map: + logger.warning(f"Filter Error: No IMDb ID found for {item.title}") + or_result = False + else: + try: + imdb_info = self.collectionBuilder.config.IMDb.keywords(self.collectionBuilder.library.imdb_rating_key_map[item.ratingKey], self.collectionBuilder.language) + except Failed as e: + logger.error(e) + or_result = False + if not imdb_info or self.collectionBuilder.check_imdb_filters(imdb_info, imdb_f) is False: + or_result = False + if plex_f and self.collectionBuilder.library.check_filters(item, plex_f, self.collectionBuilder.current_time) is False: + or_result = False + if or_result: + final_return = True + return final_return + + + def _check_missing_filters(self, item_id, is_movie, tmdb_item=None, check_released=False): + logger = self.logger + imdb_info = None + if self.collectionBuilder.has_tmdb_filters or self.collectionBuilder.has_imdb_filters or check_released: + try: + if tmdb_item is None: + if is_movie: + tmdb_item = self.collectionBuilder.config.TMDb.get_movie(item_id, ignore_cache=True) + else: + tmdb_item = self.collectionBuilder.config.TMDb.get_show(self.collectionBuilder.config.Convert.tvdb_to_tmdb(item_id, fail=True), ignore_cache=True) + except Failed: + return False + if self.collectionBuilder.has_imdb_filters and tmdb_item and tmdb_item.imdb_id: + try: + imdb_info = self.collectionBuilder.config.IMDb.keywords(tmdb_item.imdb_id, self.collectionBuilder.language) + except Failed as e: + logger.error(e) + return False + if check_released: + date_to_check = tmdb_item.release_date if is_movie else tmdb_item.first_air_date + if not date_to_check or date_to_check > self.collectionBuilder.current_time: + return False + final_return = True + if self.collectionBuilder.has_tmdb_filters or self.collectionBuilder.has_imdb_filters: + final_return = False + for filter_list in self.collectionBuilder.filters: + tmdb_f = [] + imdb_f = [] + for k, v in filter_list: + if k.split(".")[0] in tmdb_filters: + tmdb_f.append((k, v)) + elif k.split(".")[0] in imdb_filters: + imdb_f.append((k, v)) + or_result = True + if tmdb_f: + if not tmdb_item or self.collectionBuilder.check_tmdb_filters(tmdb_item, tmdb_f, is_movie) is False: + or_result = False + if imdb_f: + if not imdb_info and self.collectionBuilder.check_imdb_filters(imdb_info, imdb_f) is False: + or_result = False + if or_result: + final_return = True + return final_return + + def _check_tmdb_filters(self, tmdb_item, filters_in, is_movie): + for filter_method, filter_data in filters_in: + filter_attr, modifier, filter_final = self.collectionBuilder.library.split(filter_method) + if self.collectionBuilder.config.TMDb.item_filter(tmdb_item, filter_attr, modifier, filter_final, filter_data, is_movie, self.collectionBuilder.current_time) is False: + return False + return True + + def _check_tvdb_filters(self, tvdb_item, filters_in): + for filter_method, filter_data in filters_in: + filter_attr, modifier, filter_final = self.collectionBuilder.library.split(filter_method) + if self.collectionBuilder.config.TVDb.item_filter(tvdb_item, filter_attr, modifier, filter_final, filter_data) is False: + return False + return True + + def _check_imdb_filters(self, imdb_info, filters_in): + for filter_method, filter_data in filters_in: + filter_attr, modifier, filter_final = self.collectionBuilder.library.split(filter_method) + if self.collectionBuilder.config.IMDb.item_filter(imdb_info, filter_attr, modifier, filter_final, filter_data) is False: + return False + return True \ No newline at end of file From 5c534858cbe841283df2643c98aac667e333afbd Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Wed, 22 May 2024 03:12:38 -0400 Subject: [PATCH 16/24] refactor: rename Overlay to OverlayConfig and moved to builder package --- modules/builder/__init__.py | 6 ++--- .../{overlay.py => builder/overlay_config.py} | 6 ++--- modules/overlays.py | 24 +++++++++---------- 3 files changed, 18 insertions(+), 18 deletions(-) rename modules/{overlay.py => builder/overlay_config.py} (99%) diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 693815d68..07b60616b 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -3,7 +3,7 @@ from datetime import datetime from modules import plex, tmdb, util from modules.util import Failed, FilterFailed, NotScheduled, Deleted -from modules.overlay import Overlay +from .overlay_config import OverlayConfig from modules.poster import KometaImage from plexapi.audio import Artist, Album, Track from plexapi.exceptions import NotFound @@ -16,7 +16,7 @@ logger = util.logger -__all__ = ["CollectionBuilder"] +__all__ = ["CollectionBuilder", "OverlayConfig"] class CollectionBuilder: def __init__(self, config, metadata, name, data, library=None, overlay=None, extra=None): @@ -342,7 +342,7 @@ def apply_vars(input_str, var_set, var_key, var_limit): suppress = util.get_list(data[methods["suppress_overlays"]]) else: logger.error(f"Overlay Error: suppress_overlays attribute is blank") - self.overlay = Overlay(config, library, metadata, str(self.mapping_name), overlay_data, suppress, self.builder_level) + self.overlay = OverlayConfig(config, library, metadata, str(self.mapping_name), overlay_data, suppress, self.builder_level) self.sync_to_users = None self.exclude_users = None diff --git a/modules/overlay.py b/modules/builder/overlay_config.py similarity index 99% rename from modules/overlay.py rename to modules/builder/overlay_config.py index 0d149eda8..4b2e9703c 100644 --- a/modules/overlay.py +++ b/modules/builder/overlay_config.py @@ -68,7 +68,7 @@ def get_canvas_size(item): else: return portrait_dim -class Overlay: +class OverlayConfig: def __init__(self, config, library, overlay_file, original_mapping_name, overlay_data, suppress, level): self.config = config self.library = library @@ -191,7 +191,7 @@ def get_and_save_image(image_url): temp_path = temp_path[16:] if not temp_path.endswith(".png"): temp_path = f"{temp_path}.png" - images_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "defaults", "overlays", "images") + images_path = os.path.join(os.path.dirname(os.curdir), "defaults", "overlays", "images") if not os.path.exists(os.path.abspath(os.path.join(images_path, temp_path))): raise Failed(f"Overlay Error: Overlay Image not found at: {os.path.abspath(os.path.join(images_path, temp_path))}") self.path = os.path.abspath(os.path.join(images_path, temp_path)) @@ -239,7 +239,7 @@ def get_and_save_image(image_url): raise Failed(f"Overlay Error: failed to parse overlay text name: {self.name}") self.name = f"text({match.group(1)})" text = f"{match.group(1)}" - code_base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + code_base = os.path.dirname(os.curdir) font_base = os.path.join(code_base, "fonts") self.font_name = os.path.join(font_base, "Roboto-Medium.ttf") if "font_size" in self.data: diff --git a/modules/overlays.py b/modules/overlays.py index 5b0be0214..fbbd12383 100644 --- a/modules/overlays.py +++ b/modules/overlays.py @@ -1,7 +1,7 @@ import os, re from datetime import datetime -from modules import plex, util, overlay -from modules.builder import CollectionBuilder +from modules import plex, util +from modules.builder import CollectionBuilder, overlay_config from modules.util import Failed, FilterFailed, NotScheduled, LimitReached from num2words import num2words from plexapi.exceptions import BadRequest @@ -136,11 +136,11 @@ def trakt_ratings(): real_value = getattr(item, actual) if cache_value is None or real_value is None: continue - if cache_key in overlay.float_vars: + if cache_key in overlay_config.float_vars: cache_value = float(cache_value) - if cache_key in overlay.int_vars: + if cache_key in overlay_config.int_vars: cache_value = int(cache_value) - if cache_key in overlay.date_vars: + if cache_key in overlay_config.date_vars: real_value = real_value.strftime("%Y-%m-%d") if real_value != cache_value: overlay_change = f"Special Text Changed from {cache_value} to {real_value}" @@ -207,7 +207,7 @@ def trakt_ratings(): logger.trace(" Overlay Reason: New image detected") elif not self.library.reapply_overlays and overlay_change: logger.trace(f" Overlay Reason: Overlay changed {overlay_change}") - canvas_width, canvas_height = overlay.get_canvas_size(item) + canvas_width, canvas_height = overlay_config.get_canvas_size(item) with Image.open(poster.location if poster else has_original) as new_poster: exif_tags = new_poster.getexif() exif_tags[0x04bc] = "overlay" @@ -218,14 +218,14 @@ def trakt_ratings(): def get_text(text_overlay): full_text = text_overlay.name[5:-1] - for format_var in overlay.vars_by_type[text_overlay.level]: + for format_var in overlay_config.vars_by_type[text_overlay.level]: if f"<<{format_var}" in full_text and format_var == "originally_available[": mod = re.search("<>", full_text).group(1) format_var = "originally_available" - elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in overlay.double_mods)): + elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in overlay_config.double_mods)): mod = format_var[-2:] format_var = format_var[:-2] - elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in overlay.single_mods)): + elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in overlay_config.single_mods)): mod = format_var[-1] format_var = format_var[:-1] elif f"<<{format_var}>>" in full_text: @@ -250,7 +250,7 @@ def get_text(text_overlay): actual_value = current elif mod == "L" and current < actual_value: actual_value = current - elif format_var in overlay.rating_sources: + elif format_var in overlay_config.rating_sources: found_rating = None try: item_to_id = item.show() if isinstance(item, (Season, Episode)) else item @@ -395,7 +395,7 @@ def get_text(text_overlay): if format_var == "versions": actual_value = len(actual_value) if self.config.Cache: - cache_store = actual_value.strftime("%Y-%m-%d") if format_var in overlay.date_vars else actual_value + cache_store = actual_value.strftime("%Y-%m-%d") if format_var in overlay_config.date_vars else actual_value self.config.Cache.update_overlay_special_text(item.ratingKey, format_var, cache_store) sub_value = None if format_var == "originally_available": @@ -434,7 +434,7 @@ def get_text(text_overlay): final_value = str(actual_value).lower() elif mod == "P": final_value = str(actual_value).title() - elif format_var in overlay.rating_sources: + elif format_var in overlay_config.rating_sources: final_value = f"{float(actual_value):.1f}" else: final_value = actual_value From e025e2217e01f6111fa9bfd64c0db98d90293ee1 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Wed, 22 May 2024 18:45:36 -0400 Subject: [PATCH 17/24] fix: add back missing filters check --- modules/builder/_attribute_setter.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/builder/_attribute_setter.py b/modules/builder/_attribute_setter.py index a4ccfc47d..1c4492cef 100644 --- a/modules/builder/_attribute_setter.py +++ b/modules/builder/_attribute_setter.py @@ -74,6 +74,8 @@ def _set_attribute(self, method_name, method_data, method_final, methods, method self._tvdb(method_name, method_data) elif method_name in mdblist.builders: self._mdblist(method_name, method_data) + elif method_name == "filters": + self._filters(method_name, method_data) else: raise Failed(f"{self.Type} Error: {method_final} attribute not supported") From b643df8c1f8313bbaae7c30d588b97e7e4711363 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Thu, 23 May 2024 02:25:09 -0400 Subject: [PATCH 18/24] fix: _check_tmdb_filters callsites in _missing_filters --- modules/builder/_missing_filters.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/builder/_missing_filters.py b/modules/builder/_missing_filters.py index c206aeaf7..57d79cf09 100644 --- a/modules/builder/_missing_filters.py +++ b/modules/builder/_missing_filters.py @@ -175,7 +175,7 @@ def check_filters(self, item, display): except Failed as e: logger.error(e) or_result = False - if not tvdb_item or self.collectionBuilder.check_tvdb_filters(tvdb_item, tvdb_f) is False: + if not tvdb_item or self._check_tvdb_filters(tvdb_item, tvdb_f) is False: or_result = False if imdb_f: if not imdb_info and isinstance(item, (Movie, Show)): @@ -232,10 +232,10 @@ def _check_missing_filters(self, item_id, is_movie, tmdb_item=None, check_releas imdb_f.append((k, v)) or_result = True if tmdb_f: - if not tmdb_item or self.collectionBuilder.check_tmdb_filters(tmdb_item, tmdb_f, is_movie) is False: + if not tmdb_item or self._check_tmdb_filters(tmdb_item, tmdb_f, is_movie) is False: or_result = False if imdb_f: - if not imdb_info and self.collectionBuilder.check_imdb_filters(imdb_info, imdb_f) is False: + if not imdb_info and self._check_imdb_filters(imdb_info, imdb_f) is False: or_result = False if or_result: final_return = True From d15d480ae36ec3c7ef8e7284ddab9a29071d671d Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Thu, 23 May 2024 02:35:37 -0400 Subject: [PATCH 19/24] fix: _check_tmdb_filters --- modules/builder/_missing_filters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/builder/_missing_filters.py b/modules/builder/_missing_filters.py index 57d79cf09..d8c1ec386 100644 --- a/modules/builder/_missing_filters.py +++ b/modules/builder/_missing_filters.py @@ -162,7 +162,7 @@ def check_filters(self, item, display): except Failed as e: logger.error(e) or_result = False - if not tmdb_item or self.collectionBuilder.check_tmdb_filters(tmdb_item, tmdb_f, item.ratingKey in self.collectionBuilder.library.movie_rating_key_map) is False: + if not tmdb_item or self._check_tmdb_filters(tmdb_item, tmdb_f, item.ratingKey in self.collectionBuilder.library.movie_rating_key_map) is False: or_result = False if tvdb_f: if not tvdb_item and isinstance(item, Show): From d70a0f6c5ac2e5704f61f51ebc8835a28b269e1c Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Thu, 23 May 2024 19:54:16 -0400 Subject: [PATCH 20/24] refactor: remove unused member variable from overlay_config --- modules/builder/overlay_config.py | 132 +++++++++++++++--------------- 1 file changed, 64 insertions(+), 68 deletions(-) diff --git a/modules/builder/overlay_config.py b/modules/builder/overlay_config.py index 4b2e9703c..70c6a85a0 100644 --- a/modules/builder/overlay_config.py +++ b/modules/builder/overlay_config.py @@ -71,10 +71,6 @@ def get_canvas_size(item): class OverlayConfig: def __init__(self, config, library, overlay_file, original_mapping_name, overlay_data, suppress, level): self.config = config - self.library = library - self.overlay_file = overlay_file - self.original_mapping_name = original_mapping_name - self.data = overlay_data self.level = level self.keys = [] self.updated = False @@ -100,54 +96,54 @@ def __init__(self, config, library, overlay_file, original_mapping_name, overlay logger.debug("") logger.debug("Validating Method: overlay") - logger.debug(f"Value: {self.data}") - if not isinstance(self.data, dict): - self.data = {"name": str(self.data)} - logger.warning(f"Overlay Warning: No overlay attribute using mapping name {self.data} as the overlay name") - if "name" not in self.data or not self.data["name"]: + logger.debug(f"Value: {overlay_data}") + if not isinstance(overlay_data, dict): + overlay_data = {"name": str(overlay_data)} + logger.warning(f"Overlay Warning: No overlay attribute using mapping name {overlay_data} as the overlay name") + if "name" not in overlay_data or not overlay_data["name"]: raise Failed(f"Overlay Error: overlay must have the name attribute") - self.name = str(self.data["name"]) + self.name = str(overlay_data["name"]) - self.prefix = f"Overlay File ({self.overlay_file.file_num}) " + self.prefix = f"Overlay File ({overlay_file.file_num}) " - self.mapping_name = f"{self.prefix}{self.original_mapping_name}" + self.mapping_name = f"{self.prefix}{original_mapping_name}" self.suppress = [f"{self.prefix}{s}" for s in suppress] - if "group" in self.data and self.data["group"]: - self.group = str(self.data["group"]) - if "queue" in self.data and self.data["queue"]: - self.queue_name = str(self.data["queue"]) - if self.queue_name not in self.overlay_file.queue_names: + if "group" in overlay_data and overlay_data["group"]: + self.group = str(overlay_data["group"]) + if "queue" in overlay_data and overlay_data["queue"]: + self.queue_name = str(overlay_data["queue"]) + if self.queue_name not in overlay_file.queue_names: raise Failed(f"Overlay Error: queue: {self.queue_name} not found") - self.queue = self.overlay_file.queue_names[self.queue_name] - if "weight" in self.data: - self.weight = util.parse("Overlay", "weight", self.data["weight"], datatype="int", parent="overlay", minimum=0) - if "group" in self.data and (self.weight is None or not self.group): + self.queue = overlay_file.queue_names[self.queue_name] + if "weight" in overlay_data: + self.weight = util.parse("Overlay", "weight", overlay_data["weight"], datatype="int", parent="overlay", minimum=0) + if "group" in overlay_data and (self.weight is None or not self.group): raise Failed(f"Overlay Error: overlay attribute's group requires the weight attribute") - elif "queue" in self.data and (self.weight is None or not self.queue_name): + elif "queue" in overlay_data and (self.weight is None or not self.queue_name): raise Failed(f"Overlay Error: overlay attribute's queue requires the weight attribute") elif self.group and self.queue_name: raise Failed(f"Overlay Error: overlay attribute's group and queue cannot be used together") - self.horizontal_offset, self.horizontal_align, self.vertical_offset, self.vertical_align = util.parse_cords(self.data, "overlay") + self.horizontal_offset, self.horizontal_align, self.vertical_offset, self.vertical_align = util.parse_cords(overlay_data, "overlay") if (self.horizontal_offset is None and self.vertical_offset is not None) or (self.vertical_offset is None and self.horizontal_offset is not None): raise Failed(f"Overlay Error: overlay attribute's horizontal_offset and vertical_offset must be used together") def color(attr): - if attr in self.data and self.data[attr]: + if attr in overlay_data and overlay_data[attr]: try: - return ImageColor.getcolor(self.data[attr], "RGBA") + return ImageColor.getcolor(overlay_data[attr], "RGBA") except ValueError: - raise Failed(f"Overlay Error: overlay {attr}: {self.data[attr]} invalid") + raise Failed(f"Overlay Error: overlay {attr}: {overlay_data[attr]} invalid") self.back_color = color("back_color") - self.back_radius = util.parse("Overlay", "back_radius", self.data["back_radius"], datatype="int", parent="overlay") if "back_radius" in self.data and self.data["back_radius"] else None - self.back_line_width = util.parse("Overlay", "back_line_width", self.data["back_line_width"], datatype="int", parent="overlay") if "back_line_width" in self.data and self.data["back_line_width"] else None + self.back_radius = util.parse("Overlay", "back_radius", overlay_data["back_radius"], datatype="int", parent="overlay") if "back_radius" in overlay_data and overlay_data["back_radius"] else None + self.back_line_width = util.parse("Overlay", "back_line_width", overlay_data["back_line_width"], datatype="int", parent="overlay") if "back_line_width" in overlay_data and overlay_data["back_line_width"] else None self.back_line_color = color("back_line_color") - self.back_padding = util.parse("Overlay", "back_padding", self.data["back_padding"], datatype="int", parent="overlay", minimum=0, default=0) if "back_padding" in self.data else 0 - self.back_align = util.parse("Overlay", "back_align", self.data["back_align"], parent="overlay", default="center", options=["left", "right", "center", "top", "bottom"]) if "back_align" in self.data else "center" + self.back_padding = util.parse("Overlay", "back_padding", overlay_data["back_padding"], datatype="int", parent="overlay", minimum=0, default=0) if "back_padding" in overlay_data else 0 + self.back_align = util.parse("Overlay", "back_align", overlay_data["back_align"], parent="overlay", default="center", options=["left", "right", "center", "top", "bottom"]) if "back_align" in overlay_data else "center" self.back_box = None - back_width = util.parse("Overlay", "back_width", self.data["back_width"], datatype="int", parent="overlay", minimum=0) if "back_width" in self.data else -1 - back_height = util.parse("Overlay", "back_height", self.data["back_height"], datatype="int", parent="overlay", minimum=0) if "back_height" in self.data else -1 + back_width = util.parse("Overlay", "back_width", overlay_data["back_width"], datatype="int", parent="overlay", minimum=0) if "back_width" in overlay_data else -1 + back_height = util.parse("Overlay", "back_height", overlay_data["back_height"], datatype="int", parent="overlay", minimum=0) if "back_height" in overlay_data else -1 if self.name == "backdrop": self.back_box = (back_width, back_height) elif self.back_align != "center" and back_width < 0: @@ -180,13 +176,13 @@ def get_and_save_image(image_url): return image_path if not self.name.startswith(("blur", "backdrop")): - if ("default" in self.data and self.data["default"]) or ("pmm" in self.data and self.data["pmm"]) or ("git" in self.data and self.data["git"] and self.data["git"].startswith("PMM/")): - if "default" in self.data and self.data["default"]: - temp_path = self.data["default"] - elif "pmm" in self.data and self.data["pmm"]: - temp_path = self.data["pmm"] + if ("default" in overlay_data and overlay_data["default"]) or ("pmm" in overlay_data and overlay_data["pmm"]) or ("git" in overlay_data and overlay_data["git"] and overlay_data["git"].startswith("PMM/")): + if "default" in overlay_data and overlay_data["default"]: + temp_path = overlay_data["default"] + elif "pmm" in overlay_data and overlay_data["pmm"]: + temp_path = overlay_data["pmm"] else: - temp_path = self.data["git"][4:] + temp_path = overlay_data["git"][4:] if temp_path.startswith("overlays/images/"): temp_path = temp_path[16:] if not temp_path.endswith(".png"): @@ -195,14 +191,14 @@ def get_and_save_image(image_url): if not os.path.exists(os.path.abspath(os.path.join(images_path, temp_path))): raise Failed(f"Overlay Error: Overlay Image not found at: {os.path.abspath(os.path.join(images_path, temp_path))}") self.path = os.path.abspath(os.path.join(images_path, temp_path)) - elif "file" in self.data and self.data["file"]: - self.path = self.data["file"] - elif "git" in self.data and self.data["git"]: - self.path = get_and_save_image(f"{self.config.GitHub.configs_url}{self.data['git']}.png") - elif "repo" in self.data and self.data["repo"]: - self.path = get_and_save_image(f"{self.config.custom_repo}{self.data['repo']}.png") - elif "url" in self.data and self.data["url"]: - self.path = get_and_save_image(self.data["url"]) + elif "file" in overlay_data and overlay_data["file"]: + self.path = overlay_data["file"] + elif "git" in overlay_data and overlay_data["git"]: + self.path = get_and_save_image(f"{self.config.GitHub.configs_url}{overlay_data['git']}.png") + elif "repo" in overlay_data and overlay_data["repo"]: + self.path = get_and_save_image(f"{self.config.custom_repo}{overlay_data['repo']}.png") + elif "url" in overlay_data and overlay_data["url"]: + self.path = get_and_save_image(overlay_data["url"]) if "|" in self.name: raise Failed(f"Overlay Error: Overlay Name: {self.name} cannot contain '|'") @@ -221,17 +217,17 @@ def get_and_save_image(image_url): if self.path: if not os.path.exists(self.path): raise Failed(f"Overlay Error: Text Overlay Addon Image not found at: {self.path}") - self.addon_offset = util.parse("Overlay", "addon_offset", self.data["addon_offset"], datatype="int", parent="overlay") if "addon_offset" in self.data else 0 - self.addon_position = util.parse("Overlay", "addon_position", self.data["addon_position"], parent="overlay", options=["left", "right", "top", "bottom"]) if "addon_position" in self.data else "left" + self.addon_offset = util.parse("Overlay", "addon_offset", overlay_data["addon_offset"], datatype="int", parent="overlay") if "addon_offset" in overlay_data else 0 + self.addon_position = util.parse("Overlay", "addon_position", overlay_data["addon_position"], parent="overlay", options=["left", "right", "top", "bottom"]) if "addon_position" in overlay_data else "left" image_compare = None if self.config.Cache: - _, image_compare, _ = self.config.Cache.query_image_map(self.mapping_name, f"{self.library.image_table_name}_overlays") + _, image_compare, _ = self.config.Cache.query_image_map(self.mapping_name, f"{library.image_table_name}_overlays") overlay_size = os.stat(self.path).st_size self.updated = not image_compare or str(overlay_size) != str(image_compare) try: self.image = Image.open(self.path).convert("RGBA") if self.config.Cache: - self.config.Cache.update_image_map(self.mapping_name, f"{self.library.image_table_name}_overlays", self.name, overlay_size) + self.config.Cache.update_image_map(self.mapping_name, f"{library.image_table_name}_overlays", self.name, overlay_size) except OSError: raise Failed(f"Overlay Error: overlay image {self.path} failed to load") match = re.search("\\(([^)]+)\\)", self.name) @@ -242,10 +238,10 @@ def get_and_save_image(image_url): code_base = os.path.dirname(os.curdir) font_base = os.path.join(code_base, "fonts") self.font_name = os.path.join(font_base, "Roboto-Medium.ttf") - if "font_size" in self.data: - self.font_size = util.parse("Overlay", "font_size", self.data["font_size"], datatype="int", parent="overlay", default=self.font_size) - if "font" in self.data and self.data["font"]: - font = str(self.data["font"]) + if "font_size" in overlay_data: + self.font_size = util.parse("Overlay", "font_size", overlay_data["font_size"], datatype="int", parent="overlay", default=self.font_size) + if "font" in overlay_data and overlay_data["font"]: + font = str(overlay_data["font"]) if not os.path.exists(font) and os.path.exists(os.path.join(code_base, font)): font = os.path.join(code_base, font) if not os.path.exists(font): @@ -257,27 +253,27 @@ def get_and_save_image(image_url): font = os.path.join(font_base, font) self.font_name = font self.font = ImageFont.truetype(self.font_name, self.font_size) - if "font_style" in self.data and self.data["font_style"]: + if "font_style" in overlay_data and overlay_data["font_style"]: try: variation_names = [n.decode("utf-8") for n in self.font.get_variation_names()] - if self.data["font_style"] in variation_names: - self.font.set_variation_by_name(self.data["font_style"]) + if overlay_data["font_style"] in variation_names: + self.font.set_variation_by_name(overlay_data["font_style"]) else: - raise Failed(f"Overlay Error: Font Style {self.data['font_style']} not found. Options: {','.join(variation_names)}") + raise Failed(f"Overlay Error: Font Style {overlay_data['font_style']} not found. Options: {','.join(variation_names)}") except OSError: logger.warning(f"Overlay Warning: font: {self.font} does not have variations") - if "font_color" in self.data and self.data["font_color"]: + if "font_color" in overlay_data and overlay_data["font_color"]: try: - self.font_color = ImageColor.getcolor(self.data["font_color"], "RGBA") + self.font_color = ImageColor.getcolor(overlay_data["font_color"], "RGBA") except ValueError: - raise Failed(f"Overlay Error: overlay font_color: {self.data['font_color']} invalid") - if "stroke_width" in self.data: - self.stroke_width = util.parse("Overlay", "stroke_width", self.data["stroke_width"], datatype="int", parent="overlay", default=self.stroke_width) - if "stroke_color" in self.data and self.data["stroke_color"]: + raise Failed(f"Overlay Error: overlay font_color: {overlay_data['font_color']} invalid") + if "stroke_width" in overlay_data: + self.stroke_width = util.parse("Overlay", "stroke_width", overlay_data["stroke_width"], datatype="int", parent="overlay", default=self.stroke_width) + if "stroke_color" in overlay_data and overlay_data["stroke_color"]: try: - self.stroke_color = ImageColor.getcolor(self.data["stroke_color"], "RGBA") + self.stroke_color = ImageColor.getcolor(overlay_data["stroke_color"], "RGBA") except ValueError: - raise Failed(f"Overlay Error: overlay stroke_color: {self.data['stroke_color']} invalid") + raise Failed(f"Overlay Error: overlay stroke_color: {overlay_data['stroke_color']} invalid") if text in old_special_text: text_mod = text[-1] if text[-1] in ["0", "%", "#"] else None text = text if text_mod is None else text[:-1] @@ -309,7 +305,7 @@ def get_and_save_image(image_url): raise Failed(f"Overlay Error: Overlay Image not found at: {self.path}") image_compare = None if self.config.Cache: - _, image_compare, _ = self.config.Cache.query_image_map(self.mapping_name, f"{self.library.image_table_name}_overlays") + _, image_compare, _ = self.config.Cache.query_image_map(self.mapping_name, f"{library.image_table_name}_overlays") overlay_size = os.stat(self.path).st_size self.updated = not image_compare or str(overlay_size) != str(image_compare) try: @@ -317,7 +313,7 @@ def get_and_save_image(image_url): if self.has_coordinates(): self.backdrop_box = self.image.size if self.config.Cache: - self.config.Cache.update_image_map(self.mapping_name, f"{self.library.image_table_name}_overlays", self.mapping_name, overlay_size) + self.config.Cache.update_image_map(self.mapping_name, f"{library.image_table_name}_overlays", self.mapping_name, overlay_size) except OSError: raise Failed(f"Overlay Error: overlay image {self.path} failed to load") From 095204440e0a3961ea14b64a47c697b5292bf9ca Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Thu, 23 May 2024 20:21:33 -0400 Subject: [PATCH 21/24] refactor: refactor consts in overlay_config --- modules/builder/overlay_config.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/modules/builder/overlay_config.py b/modules/builder/overlay_config.py index 70c6a85a0..699c8c1ff 100644 --- a/modules/builder/overlay_config.py +++ b/modules/builder/overlay_config.py @@ -49,16 +49,11 @@ var_mods[mod] = ["", "U", "L", "P"] for mod in ["season_number", "episode_number", "episode_count", "versions"]: var_mods[mod] = ["", "W", "WU", "WL", "0", "00"] -single_mods = list(set([m for a, ms in var_mods.items() for m in ms if len(m) == 1])) -double_mods = list(set([m for a, ms in var_mods.items() for m in ms if len(m) == 2])) -vars_by_type = { - "movie": [f"{item}{m}" for check, sub in types_for_var.items() for item in sub for m in var_mods[item] if "movie" in check], - "show": [f"{item}{m}" for check, sub in types_for_var.items() for item in sub for m in var_mods[item] if "show" in check], - "season": [f"{item}{m}" for check, sub in types_for_var.items() for item in sub for m in var_mods[item] if "season" in check], - "episode": [f"{item}{m}" for check, sub in types_for_var.items() for item in sub for m in var_mods[item] if "episode" in check], - "artist": [f"{item}{m}" for check, sub in types_for_var.items() for item in sub for m in var_mods[item] if "artist" in check], - "album": [f"{item}{m}" for check, sub in types_for_var.items() for item in sub for m in var_mods[item] if "album" in check], -} +single_mods = list(set([mod for mods in var_mods.values() for mod in mods if len(mod) == 1])) +double_mods = list(set([mod for mods in var_mods.values() for mod in mods if len(mod) == 2])) +vars_by_type = {} +for key in ["movie", "show", "season", "episode", "artist", "album"]: + vars_by_type[key] = [f"{item}{mod}" for type, var in types_for_var.items() for item in var for mod in var_mods[item] if key in type] def get_canvas_size(item): if isinstance(item, Episode): From 489dd5ed0d1ddcc48785e58977c8f02009ec7247 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Thu, 23 May 2024 20:36:09 -0400 Subject: [PATCH 22/24] refactor: rename consts and move them to appropriate file --- modules/builder/overlay_config.py | 64 ++++++------------------------- modules/overlays.py | 62 +++++++++++++++++++++++++----- 2 files changed, 64 insertions(+), 62 deletions(-) diff --git a/modules/builder/overlay_config.py b/modules/builder/overlay_config.py index 699c8c1ff..afab5447b 100644 --- a/modules/builder/overlay_config.py +++ b/modules/builder/overlay_config.py @@ -8,60 +8,18 @@ logger = util.logger -portrait_dim = (1000, 1500) -landscape_dim = (1920, 1080) -square_dim = (1000, 1000) -old_special_text = [f"{a}{s}" for a in ["audience_rating", "critic_rating", "user_rating"] for s in ["", "0", "%", "#"]] -rating_sources = [ - "tmdb_rating", "imdb_rating", "trakt_user_rating", "omdb_rating", "mdb_rating", "mdb_average_rating", - "mdb_imdb_rating", "mdb_metacritic_rating", "mdb_metacriticuser_rating", "mdb_trakt_rating", "mdb_tomatoes_rating", - "mdb_tomatoesaudience_rating", "mdb_tmdb_rating", "mdb_letterboxd_rating", "mdb_myanimelist_rating", - "anidb_rating", "anidb_average_rating", "anidb_score_rating", "mal_rating" -] -float_vars = ["audience_rating", "critic_rating", "user_rating"] + rating_sources -int_vars = ["runtime", "season_number", "episode_number", "episode_count", "versions"] -date_vars = ["originally_available"] -types_for_var = { - "movie_show_season_episode_artist_album": ["runtime", "user_rating", "title"], - "movie_show_episode_album": ["critic_rating", "originally_available"], - "movie_show_season_episode": ["tmdb_rating"], - "movie_show_episode": ["audience_rating", "content_rating", "tmdb_rating", "imdb_rating"], - "movie_show": [ - "original_title", "trakt_user_rating", "omdb_rating", "mdb_rating", "mdb_average_rating", "mdb_imdb_rating", - "mdb_metacritic_rating", "mdb_metacriticuser_rating", "mdb_trakt_rating", "mdb_tomatoes_rating", - "mdb_tomatoesaudience_rating", "mdb_tmdb_rating", "mdb_letterboxd_rating", "mdb_myanimelist_rating", - "anidb_rating", "anidb_average_rating", "anidb_score_rating", "mal_rating" - ], - "movie_episode": ["versions", "bitrate"], - "season_episode": ["show_title", "season_number"], - "show_season": ["episode_count"], - "movie": ["edition"], - "episode": ["season_title", "episode_number"] -} -var_mods = { - "bitrate": ["", "H", "L"], - "originally_available": ["", "["], - "runtime": ["", "H", "M"], -} -for mod in float_vars: - var_mods[mod] = ["", "%", "#", "/"] -for mod in ["title", "content_rating", "original_title", "edition", "show_title", "season_title"]: - var_mods[mod] = ["", "U", "L", "P"] -for mod in ["season_number", "episode_number", "episode_count", "versions"]: - var_mods[mod] = ["", "W", "WU", "WL", "0", "00"] -single_mods = list(set([mod for mods in var_mods.values() for mod in mods if len(mod) == 1])) -double_mods = list(set([mod for mods in var_mods.values() for mod in mods if len(mod) == 2])) -vars_by_type = {} -for key in ["movie", "show", "season", "episode", "artist", "album"]: - vars_by_type[key] = [f"{item}{mod}" for type, var in types_for_var.items() for item in var for mod in var_mods[item] if key in type] +_PORTRAIT_DIM = (1000, 1500) +_LANDSCAPE_DIM = (1920, 1080) +_SQUARE_DIM = (1000, 1000) +_OLD_SPECIAL_TEXT = [f"{a}{s}" for a in ["audience_rating", "critic_rating", "user_rating"] for s in ["", "0", "%", "#"]] def get_canvas_size(item): if isinstance(item, Episode): - return landscape_dim + return _LANDSCAPE_DIM elif isinstance(item, Album): - return square_dim + return _SQUARE_DIM else: - return portrait_dim + return _PORTRAIT_DIM class OverlayConfig: def __init__(self, config, library, overlay_file, original_mapping_name, overlay_data, suppress, level): @@ -269,7 +227,7 @@ def get_and_save_image(image_url): self.stroke_color = ImageColor.getcolor(overlay_data["stroke_color"], "RGBA") except ValueError: raise Failed(f"Overlay Error: overlay stroke_color: {overlay_data['stroke_color']} invalid") - if text in old_special_text: + if text in _OLD_SPECIAL_TEXT: text_mod = text[-1] if text[-1] in ["0", "%", "#"] else None text = text if text_mod is None else text[:-1] if text_mod is None: @@ -436,9 +394,9 @@ def get_cord(value, image_value, over_value, align): def get_canvas(self, item): if isinstance(item, Episode): - canvas_size = landscape_dim + canvas_size = _LANDSCAPE_DIM elif isinstance(item, Album): - canvas_size = square_dim + canvas_size = _SQUARE_DIM else: - canvas_size = portrait_dim + canvas_size = _PORTRAIT_DIM return self.get_backdrop(canvas_size, box=self.backdrop_box, text=self.backdrop_text) diff --git a/modules/overlays.py b/modules/overlays.py index fbbd12383..73c247762 100644 --- a/modules/overlays.py +++ b/modules/overlays.py @@ -10,6 +10,50 @@ logger = util.logger +_RATING_SOURCES = [ + "tmdb_rating", "imdb_rating", "trakt_user_rating", "omdb_rating", "mdb_rating", "mdb_average_rating", + "mdb_imdb_rating", "mdb_metacritic_rating", "mdb_metacriticuser_rating", "mdb_trakt_rating", "mdb_tomatoes_rating", + "mdb_tomatoesaudience_rating", "mdb_tmdb_rating", "mdb_letterboxd_rating", "mdb_myanimelist_rating", + "anidb_rating", "anidb_average_rating", "anidb_score_rating", "mal_rating" +] +_FLOAT_VARS = ["audience_rating", "critic_rating", "user_rating"] + _RATING_SOURCES +_INT_VARS = ["runtime", "season_number", "episode_number", "episode_count", "versions"] +_DATE_VARS = ["originally_available"] +_TYPES_FOR_VAR = { + "movie_show_season_episode_artist_album": ["runtime", "user_rating", "title"], + "movie_show_episode_album": ["critic_rating", "originally_available"], + "movie_show_season_episode": ["tmdb_rating"], + "movie_show_episode": ["audience_rating", "content_rating", "tmdb_rating", "imdb_rating"], + "movie_show": [ + "original_title", "trakt_user_rating", "omdb_rating", "mdb_rating", "mdb_average_rating", "mdb_imdb_rating", + "mdb_metacritic_rating", "mdb_metacriticuser_rating", "mdb_trakt_rating", "mdb_tomatoes_rating", + "mdb_tomatoesaudience_rating", "mdb_tmdb_rating", "mdb_letterboxd_rating", "mdb_myanimelist_rating", + "anidb_rating", "anidb_average_rating", "anidb_score_rating", "mal_rating" + ], + "movie_episode": ["versions", "bitrate"], + "season_episode": ["show_title", "season_number"], + "show_season": ["episode_count"], + "movie": ["edition"], + "episode": ["season_title", "episode_number"] +} +_VAR_MODS = { + "bitrate": ["", "H", "L"], + "originally_available": ["", "["], + "runtime": ["", "H", "M"], +} +for mod in _FLOAT_VARS: + _VAR_MODS[mod] = ["", "%", "#", "/"] +for mod in ["title", "content_rating", "original_title", "edition", "show_title", "season_title"]: + _VAR_MODS[mod] = ["", "U", "L", "P"] +for mod in ["season_number", "episode_number", "episode_count", "versions"]: + _VAR_MODS[mod] = ["", "W", "WU", "WL", "0", "00"] + +_SINGLE_MODS = list(set([mod for mods in _VAR_MODS.values() for mod in mods if len(mod) == 1])) +_DOUBLE_MODS = list(set([mod for mods in _VAR_MODS.values() for mod in mods if len(mod) == 2])) +_VARS_BY_TYPE = {} +for key in ["movie", "show", "season", "episode", "artist", "album"]: + _VARS_BY_TYPE[key] = [f"{item}{mod}" for type, var in _TYPES_FOR_VAR.items() for item in var for mod in _VAR_MODS[item] if key in type] + class Overlays: def __init__(self, config, library): self.config = config @@ -136,11 +180,11 @@ def trakt_ratings(): real_value = getattr(item, actual) if cache_value is None or real_value is None: continue - if cache_key in overlay_config.float_vars: + if cache_key in _FLOAT_VARS: cache_value = float(cache_value) - if cache_key in overlay_config.int_vars: + if cache_key in _INT_VARS: cache_value = int(cache_value) - if cache_key in overlay_config.date_vars: + if cache_key in _DATE_VARS: real_value = real_value.strftime("%Y-%m-%d") if real_value != cache_value: overlay_change = f"Special Text Changed from {cache_value} to {real_value}" @@ -218,14 +262,14 @@ def trakt_ratings(): def get_text(text_overlay): full_text = text_overlay.name[5:-1] - for format_var in overlay_config.vars_by_type[text_overlay.level]: + for format_var in _VARS_BY_TYPE[text_overlay.level]: if f"<<{format_var}" in full_text and format_var == "originally_available[": mod = re.search("<>", full_text).group(1) format_var = "originally_available" - elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in overlay_config.double_mods)): + elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in _DOUBLE_MODS)): mod = format_var[-2:] format_var = format_var[:-2] - elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in overlay_config.single_mods)): + elif f"<<{format_var}>>" in full_text and format_var.endswith(tuple(m for m in _SINGLE_MODS)): mod = format_var[-1] format_var = format_var[:-1] elif f"<<{format_var}>>" in full_text: @@ -250,7 +294,7 @@ def get_text(text_overlay): actual_value = current elif mod == "L" and current < actual_value: actual_value = current - elif format_var in overlay_config.rating_sources: + elif format_var in _RATING_SOURCES: found_rating = None try: item_to_id = item.show() if isinstance(item, (Season, Episode)) else item @@ -395,7 +439,7 @@ def get_text(text_overlay): if format_var == "versions": actual_value = len(actual_value) if self.config.Cache: - cache_store = actual_value.strftime("%Y-%m-%d") if format_var in overlay_config.date_vars else actual_value + cache_store = actual_value.strftime("%Y-%m-%d") if format_var in _DATE_VARS else actual_value self.config.Cache.update_overlay_special_text(item.ratingKey, format_var, cache_store) sub_value = None if format_var == "originally_available": @@ -434,7 +478,7 @@ def get_text(text_overlay): final_value = str(actual_value).lower() elif mod == "P": final_value = str(actual_value).title() - elif format_var in overlay_config.rating_sources: + elif format_var in _RATING_SOURCES: final_value = f"{float(actual_value):.1f}" else: final_value = actual_value From 9afda44d5901437201dff03bad770556a9a5b963 Mon Sep 17 00:00:00 2001 From: Minwei Xu Date: Thu, 23 May 2024 21:41:31 -0400 Subject: [PATCH 23/24] refactor: rename display_filters to log_filters as a more accurate method name --- kometa.py | 4 ++-- modules/builder/__init__.py | 2 +- modules/overlays.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/kometa.py b/kometa.py index f6bb293a7..b2e8d7d7d 100644 --- a/kometa.py +++ b/kometa.py @@ -819,7 +819,7 @@ def run_collection(config, library, metadata, requested_collections): else: raise Failed(e) - builder.display_filters() + builder.log_filters() if len(builder.found_items) > 0 and len(builder.found_items) + builder.beginning_count >= builder.minimum and builder.build_collection: items_added, items_unchanged = builder.add_to_collection() @@ -1013,7 +1013,7 @@ def run_playlists(config): else: ids = builder.gather_ids(method, value) - builder.display_filters() + builder.log_filters() builder.filter_and_save_items(ids) if len(builder.found_items) > 0 and len(builder.found_items) + builder.beginning_count >= builder.minimum: diff --git a/modules/builder/__init__.py b/modules/builder/__init__.py index 07b60616b..4d5cf246b 100644 --- a/modules/builder/__init__.py +++ b/modules/builder/__init__.py @@ -1270,7 +1270,7 @@ def sync_collection(self): def check_filters(self, item, display): return self.missing_filters_util.check_filters(item, display) - def display_filters(self): + def log_filters(self): if self.filters: for filter_list in self.filters: logger.info("") diff --git a/modules/overlays.py b/modules/overlays.py index 73c247762..3ae5bf9d5 100644 --- a/modules/overlays.py +++ b/modules/overlays.py @@ -597,7 +597,7 @@ def compile_overlays(self): prop_name = builder.overlay.mapping_name properties[prop_name] = builder.overlay - builder.display_filters() + builder.log_filters() for method, value in builder.builders: logger.debug("") From 721d316b93c34de14b0398e75d8d3da91c928a0f Mon Sep 17 00:00:00 2001 From: Chaz Larson Date: Fri, 27 Sep 2024 14:38:31 -0500 Subject: [PATCH 24/24] Update kometa.py to trigger a build --- kometa.py | 1 + 1 file changed, 1 insertion(+) diff --git a/kometa.py b/kometa.py index b2e8d7d7d..e042afdc2 100644 --- a/kometa.py +++ b/kometa.py @@ -1162,3 +1162,4 @@ def run_playlists(config): time.sleep(60) except KeyboardInterrupt: logger.separator("Exiting Kometa") +