Merge pull request #279 from luigi311/env_file

Add support for env file support, set via ENV_FILE
pull/291/head
Luigi311 2025-07-11 10:24:40 -06:00 committed by GitHub
commit 98266de678
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 402 additions and 286 deletions

View File

@ -3,11 +3,8 @@
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
DRYRUN = "True"
## Additional logging information
DEBUG = "False"
## Debugging level, "info" is default, "debug" is more verbose
DEBUG_LEVEL = "info"
DEBUG_LEVEL = "INFO"
## If set to true then the script will only run once and then exit
RUN_ONLY_ONCE = "False"
@ -16,7 +13,7 @@ RUN_ONLY_ONCE = "False"
SLEEP_DURATION = "3600"
## Log file where all output will be written to
LOGFILE = "log.log"
LOG_FILE = "log.log"
## Mark file where all shows/movies that have been marked as played will be written to
MARK_FILE = "mark.log"
@ -24,26 +21,24 @@ MARK_FILE = "mark.log"
## Timeout for requests for jellyfin
REQUEST_TIMEOUT = 300
## Generate guids
## Generating guids is a slow process, so this is a way to speed up the process
## by using the location only, useful when using same files on multiple servers
GENERATE_GUIDS = "True"
## Generate locations
## Generating locations is a slow process, so this is a way to speed up the process
## by using the guid only, useful when using different files on multiple servers
GENERATE_LOCATIONS = "True"
## Max threads for processing
MAX_THREADS = 2
MAX_THREADS = 1
## Generate guids/locations
## These are slow processes, so this is a way to speed things up
## If media servers are using the same files then you can enable only generate locations
## If media servers are using different files then you can enable only generate guids
## Default is to generate both
GENERATE_GUIDS = "True"
GENERATE_LOCATIONS = "True"
## Map usernames between servers in the event that they are different, order does not matter
## Comma separated for multiple options
#USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
USER_MAPPING = { "Username": "User", "Second User": "User Dos" }
## Map libraries between servers in the event that they are different, order does not matter
## Comma separated for multiple options
#LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
## Comma separated for multiple options
@ -52,7 +47,7 @@ MAX_THREADS = 2
#BLACKLIST_LIBRARY_TYPE = ""
#WHITELIST_LIBRARY_TYPE = ""
#BLACKLIST_USERS = ""
WHITELIST_USERS = "testuser1,testuser2"
#WHITELIST_USERS = ""
# Plex
@ -96,7 +91,7 @@ EMBY_BASEURL = "http://localhost:8097"
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
## Comma seperated list for multiple servers
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
EMBY_TOKEN = "SuperSecretToken"
# Syncing Options
@ -113,4 +108,4 @@ SYNC_FROM_JELLYFIN_TO_EMBY = "True"
SYNC_FROM_EMBY_TO_PLEX = "True"
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
SYNC_FROM_EMBY_TO_EMBY = "True"
SYNC_FROM_EMBY_TO_EMBY = "True"

View File

@ -70,40 +70,35 @@ jobs:
- name: "Test Plex"
run: |
mv test/ci_plex.env .env
uv run main.py
ENV_FILE="test/ci_plex.env" uv run main.py
uv run test/validate_ci_marklog.py --plex
rm mark.log
- name: "Test Jellyfin"
run: |
mv test/ci_jellyfin.env .env
uv run main.py
ENV_FILE="test/ci_jellyfin.env" uv run main.py
uv run test/validate_ci_marklog.py --jellyfin
rm mark.log
- name: "Test Emby"
run: |
mv test/ci_emby.env .env
uv run main.py
ENV_FILE="test/ci_emby.env" uv run main.py
uv run test/validate_ci_marklog.py --emby
rm mark.log
- name: "Test Guids"
run: |
mv test/ci_guids.env .env
uv run main.py
ENV_FILE="test/ci_guids.env" uv run main.py
uv run test/validate_ci_marklog.py --guids
rm mark.log
- name: "Test Locations"
run: |
mv test/ci_locations.env .env
uv run main.py
ENV_FILE="test/ci_locations.env" uv run main.py
uv run test/validate_ci_marklog.py --locations
rm mark.log
@ -111,11 +106,10 @@ jobs:
- name: "Test writing to the servers"
run: |
# Test writing to the servers
mv test/ci_write.env .env
uv run main.py
ENV_FILE="test/ci_write.env" uv run main.py
# Test again to test if it can handle existing data
uv run main.py
ENV_FILE="test/ci_write.env" uv run main.py
uv run test/validate_ci_marklog.py --write

View File

@ -1,18 +1,17 @@
import os
from typing import Literal
from dotenv import load_dotenv
from loguru import logger
from src.functions import str_to_bool
from src.functions import str_to_bool, get_env_value
from src.plex import Plex
from src.jellyfin import Jellyfin
from src.emby import Emby
load_dotenv(override=True)
def jellyfin_emby_server_connection(
server_baseurl: str, server_token: str, server_type: Literal["jellyfin", "emby"]
env,
server_baseurl: str,
server_token: str,
server_type: Literal["jellyfin", "emby"],
) -> list[Jellyfin | Emby]:
servers: list[Jellyfin | Emby] = []
server: Jellyfin | Emby
@ -31,11 +30,13 @@ def jellyfin_emby_server_connection(
base_url = base_url[:-1]
if server_type == "jellyfin":
server = Jellyfin(base_url=base_url, token=server_tokens[i].strip())
server = Jellyfin(
env=env, base_url=base_url, token=server_tokens[i].strip()
)
servers.append(server)
elif server_type == "emby":
server = Emby(base_url=base_url, token=server_tokens[i].strip())
server = Emby(env=env, base_url=base_url, token=server_tokens[i].strip())
servers.append(server)
else:
raise Exception("Unknown server type")
@ -45,15 +46,15 @@ def jellyfin_emby_server_connection(
return servers
def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
def generate_server_connections(env) -> list[Plex | Jellyfin | Emby]:
servers: list[Plex | Jellyfin | Emby] = []
plex_baseurl_str: str | None = os.getenv("PLEX_BASEURL", None)
plex_token_str: str | None = os.getenv("PLEX_TOKEN", None)
plex_username_str: str | None = os.getenv("PLEX_USERNAME", None)
plex_password_str: str | None = os.getenv("PLEX_PASSWORD", None)
plex_servername_str: str | None = os.getenv("PLEX_SERVERNAME", None)
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
plex_baseurl_str: str | None = get_env_value(env, "PLEX_BASEURL", None)
plex_token_str: str | None = get_env_value(env, "PLEX_TOKEN", None)
plex_username_str: str | None = get_env_value(env, "PLEX_USERNAME", None)
plex_password_str: str | None = get_env_value(env, "PLEX_PASSWORD", None)
plex_servername_str: str | None = get_env_value(env, "PLEX_SERVERNAME", None)
ssl_bypass = str_to_bool(get_env_value(env, "SSL_BYPASS", "False"))
if plex_baseurl_str and plex_token_str:
plex_baseurl = plex_baseurl_str.split(",")
@ -66,6 +67,7 @@ def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
for i, url in enumerate(plex_baseurl):
server = Plex(
env,
base_url=url.strip(),
token=plex_token[i].strip(),
user_name=None,
@ -92,6 +94,7 @@ def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
for i, username in enumerate(plex_username):
server = Plex(
env,
base_url=None,
token=None,
user_name=username.strip(),
@ -103,20 +106,20 @@ def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
logger.debug(f"Plex Server {i} info: {server.info()}")
servers.append(server)
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
jellyfin_baseurl = get_env_value(env, "JELLYFIN_BASEURL", None)
jellyfin_token = get_env_value(env, "JELLYFIN_TOKEN", None)
if jellyfin_baseurl and jellyfin_token:
servers.extend(
jellyfin_emby_server_connection(
jellyfin_baseurl, jellyfin_token, "jellyfin"
env, jellyfin_baseurl, jellyfin_token, "jellyfin"
)
)
emby_baseurl = os.getenv("EMBY_BASEURL", None)
emby_token = os.getenv("EMBY_TOKEN", None)
emby_baseurl = get_env_value(env, "EMBY_BASEURL", None)
emby_token = get_env_value(env, "EMBY_TOKEN", None)
if emby_baseurl and emby_token:
servers.extend(
jellyfin_emby_server_connection(emby_baseurl, emby_token, "emby")
jellyfin_emby_server_connection(env, emby_baseurl, emby_token, "emby")
)
return servers

View File

@ -4,7 +4,7 @@ from loguru import logger
class Emby(JellyfinEmby):
def __init__(self, base_url: str, token: str) -> None:
def __init__(self, env, base_url: str, token: str) -> None:
authorization = (
"Emby , "
'Client="JellyPlex-Watched", '
@ -19,7 +19,7 @@ class Emby(JellyfinEmby):
}
super().__init__(
server_type="Emby", base_url=base_url, token=token, headers=headers
env, server_type="Emby", base_url=base_url, token=token, headers=headers
)
def is_partial_update_supported(self, server_version: Version) -> bool:

View File

@ -5,8 +5,6 @@ from dotenv import load_dotenv
load_dotenv(override=True)
mark_file = os.getenv("MARK_FILE", os.getenv("MARKFILE", "mark.log"))
def log_marked(
server_type: str,
@ -16,6 +14,7 @@ def log_marked(
movie_show: str,
episode: str | None = None,
duration: float | None = None,
mark_file: str = "mark.log",
) -> None:
output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}"
@ -29,9 +28,18 @@ def log_marked(
file.write(output + "\n")
def get_env_value(env, key: str, default: Any = None):
if env and key in env:
return env[key]
elif os.getenv(key):
return os.getenv(key)
else:
return default
# Reimplementation of distutils.util.strtobool due to it being deprecated
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
def str_to_bool(value: str) -> bool:
def str_to_bool(value: str | None) -> bool:
if not value:
return False
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
@ -73,13 +81,13 @@ def future_thread_executor(
args: list[tuple[Callable[..., Any], ...]],
threads: int | None = None,
override_threads: bool = False,
max_threads: int | None = None,
) -> list[Any]:
results: list[Any] = []
# Determine the number of workers, defaulting to 1 if os.cpu_count() returns None
max_threads_env: int = int(os.getenv("MAX_THREADS", 32))
cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None
workers: int = min(max_threads_env, cpu_threads * 2)
workers: int = min(max_threads, cpu_threads * 2) if max_threads else cpu_threads * 2
# Adjust workers based on threads parameter and override_threads flag
if threads is not None:

View File

@ -4,7 +4,7 @@ from loguru import logger
class Jellyfin(JellyfinEmby):
def __init__(self, base_url: str, token: str) -> None:
def __init__(self, env, base_url: str, token: str) -> None:
authorization = (
"MediaBrowser , "
'Client="JellyPlex-Watched", '
@ -19,7 +19,7 @@ class Jellyfin(JellyfinEmby):
}
super().__init__(
server_type="Jellyfin", base_url=base_url, token=token, headers=headers
env, server_type="Jellyfin", base_url=base_url, token=token, headers=headers
)
def is_partial_update_supported(self, server_version: Version) -> bool:

View File

@ -5,7 +5,6 @@ import traceback
import os
from math import floor
from typing import Any, Literal
from dotenv import load_dotenv
from packaging.version import parse, Version
from loguru import logger
@ -13,6 +12,7 @@ from src.functions import (
search_mapping,
log_marked,
str_to_bool,
get_env_value,
)
from src.watched import (
LibraryData,
@ -24,14 +24,12 @@ from src.watched import (
check_same_identifiers,
)
load_dotenv(override=True)
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
def extract_identifiers_from_item(
server_type: str, item: dict[str, Any]
server_type: str,
item: dict[str, Any],
generate_guids: bool,
generate_locations: bool,
) -> MediaIdentifiers:
title = item.get("Name")
id = None
@ -72,9 +70,16 @@ def extract_identifiers_from_item(
)
def get_mediaitem(server_type: str, item: dict[str, Any]) -> MediaItem:
def get_mediaitem(
server_type: str,
item: dict[str, Any],
generate_guids: bool,
generate_locations: bool,
) -> MediaItem:
return MediaItem(
identifiers=extract_identifiers_from_item(server_type, item),
identifiers=extract_identifiers_from_item(
server_type, item, generate_guids, generate_locations
),
status=WatchedStatus(
completed=item.get("UserData", {}).get("Played"),
time=floor(
@ -87,18 +92,21 @@ def get_mediaitem(server_type: str, item: dict[str, Any]) -> MediaItem:
class JellyfinEmby:
def __init__(
self,
env,
server_type: Literal["Jellyfin", "Emby"],
base_url: str,
token: str,
headers: dict[str, str],
) -> None:
self.env = env
if server_type not in ["Jellyfin", "Emby"]:
raise Exception(f"Server type {server_type} not supported")
self.server_type: str = server_type
self.base_url: str = base_url
self.token: str = token
self.headers: dict[str, str] = headers
self.timeout: int = int(os.getenv("REQUEST_TIMEOUT", 300))
self.timeout: int = int(get_env_value(self.env, "REQUEST_TIMEOUT", 300))
if not self.base_url:
raise Exception(f"{self.server_type} base_url not set")
@ -113,6 +121,12 @@ class JellyfinEmby:
self.update_partial: bool = self.is_partial_update_supported(
self.server_version
)
self.generate_guids: bool = str_to_bool(
get_env_value(self.env, "GENERATE_GUIDS", "True")
)
self.generate_locations: bool = str_to_bool(
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
)
def query(
self,
@ -327,7 +341,14 @@ class JellyfinEmby:
movie["UserData"].get("Played")
or movie["UserData"].get("PlaybackPositionTicks", 0) > 600000000
):
watched.movies.append(get_mediaitem(self.server_type, movie))
watched.movies.append(
get_mediaitem(
self.server_type,
movie,
self.generate_guids,
self.generate_locations,
)
)
# TV Shows
if library_type == "tvshows":
@ -394,7 +415,12 @@ class JellyfinEmby:
> 600000000
):
episode_mediaitem.append(
get_mediaitem(self.server_type, episode)
get_mediaitem(
self.server_type,
episode,
self.generate_guids,
self.generate_locations,
)
)
if episode_mediaitem:
@ -506,7 +532,10 @@ class JellyfinEmby:
for jellyfin_video in jellyfin_search.get("Items", []):
jelly_identifiers = extract_identifiers_from_item(
self.server_type, jellyfin_video
self.server_type,
jellyfin_video,
self.generate_guids,
self.generate_locations,
)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
@ -529,6 +558,9 @@ class JellyfinEmby:
user_name,
library_name,
jellyfin_video.get("Name"),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
elif self.update_partial:
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}"
@ -552,6 +584,9 @@ class JellyfinEmby:
library_name,
jellyfin_video.get("Name"),
duration=floor(stored_movie.status.time / 60_000),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
logger.trace(
@ -576,7 +611,10 @@ class JellyfinEmby:
for jellyfin_show in jellyfin_shows:
jellyfin_show_identifiers = extract_identifiers_from_item(
self.server_type, jellyfin_show
self.server_type,
jellyfin_show,
self.generate_guids,
self.generate_locations,
)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
@ -606,7 +644,10 @@ class JellyfinEmby:
for jellyfin_episode in jellyfin_episodes.get("Items", []):
jellyfin_episode_identifiers = (
extract_identifiers_from_item(
self.server_type, jellyfin_episode
self.server_type,
jellyfin_episode,
self.generate_guids,
self.generate_locations,
)
)
for stored_ep in stored_series.episodes:
@ -636,6 +677,9 @@ class JellyfinEmby:
library_name,
jellyfin_episode.get("SeriesName"),
jellyfin_episode.get("Name"),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
elif self.update_partial:
msg = (
@ -667,6 +711,9 @@ class JellyfinEmby:
duration=floor(
stored_ep.status.time / 60_000
),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
logger.trace(

View File

@ -2,7 +2,7 @@ import os
import traceback
import json
import sys
from dotenv import load_dotenv
from dotenv import dotenv_values
from time import sleep, perf_counter
from loguru import logger
@ -13,6 +13,7 @@ from src.library import setup_libraries
from src.functions import (
parse_string_to_list,
str_to_bool,
get_env_value,
)
from src.users import setup_users
from src.watched import (
@ -21,53 +22,59 @@ from src.watched import (
from src.black_white import setup_black_white_lists
from src.connection import generate_server_connections
load_dotenv(override=True)
log_file = os.getenv("LOG_FILE", os.getenv("LOGFILE", "log.log"))
level = os.getenv("DEBUG_LEVEL", "INFO").upper()
def configure_logger() -> None:
def configure_logger(log_file: str = "log.log", debug_level: str = "INFO") -> None:
# Remove default logger to configure our own
logger.remove()
# Choose log level based on environment
# If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO.
if level not in ["INFO", "DEBUG", "TRACE"]:
if debug_level not in ["INFO", "DEBUG", "TRACE"]:
logger.add(sys.stdout)
raise Exception("Invalid DEBUG_LEVEL, please choose between INFO, DEBUG, TRACE")
raise Exception(
f"Invalid DEBUG_LEVEL {debug_level}, please choose between INFO, DEBUG, TRACE"
)
# Add a sink for file logging and the console.
logger.add(log_file, level=level, mode="w")
logger.add(sys.stdout, level=level)
logger.add(log_file, level=debug_level, mode="w")
logger.add(sys.stdout, level=debug_level)
def should_sync_server(
env,
server_1: Plex | Jellyfin | Emby,
server_2: Plex | Jellyfin | Emby,
) -> bool:
sync_from_plex_to_jellyfin = str_to_bool(
os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True")
get_env_value(env, "SYNC_FROM_PLEX_TO_JELLYFIN", "True")
)
sync_from_plex_to_plex = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_PLEX", "True")
)
sync_from_plex_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_EMBY", "True")
)
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
sync_from_plex_to_emby = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_EMBY", "True"))
sync_from_jelly_to_plex = str_to_bool(
os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True")
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_PLEX", "True")
)
sync_from_jelly_to_jellyfin = str_to_bool(
os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
)
sync_from_jelly_to_emby = str_to_bool(
os.getenv("SYNC_FROM_JELLYFIN_TO_EMBY", "True")
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_EMBY", "True")
)
sync_from_emby_to_plex = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_PLEX", "True"))
sync_from_emby_to_jellyfin = str_to_bool(
os.getenv("SYNC_FROM_EMBY_TO_JELLYFIN", "True")
sync_from_emby_to_plex = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_PLEX", "True")
)
sync_from_emby_to_jellyfin = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_JELLYFIN", "True")
)
sync_from_emby_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_EMBY", "True")
)
sync_from_emby_to_emby = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_EMBY", "True"))
if isinstance(server_1, Plex):
if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin:
@ -111,17 +118,17 @@ def should_sync_server(
return True
def main_loop() -> None:
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
def main_loop(env) -> None:
dryrun = str_to_bool(get_env_value(env, "DRYRUN", "False"))
logger.info(f"Dryrun: {dryrun}")
user_mapping_env = os.getenv("USER_MAPPING", None)
user_mapping_env = get_env_value(env, "USER_MAPPING", None)
user_mapping = None
if user_mapping_env:
user_mapping = json.loads(user_mapping_env.lower())
logger.info(f"User Mapping: {user_mapping}")
library_mapping_env = os.getenv("LIBRARY_MAPPING", None)
library_mapping_env = get_env_value(env, "LIBRARY_MAPPING", None)
library_mapping = None
if library_mapping_env:
library_mapping = json.loads(library_mapping_env)
@ -129,16 +136,20 @@ def main_loop() -> None:
# Create (black/white)lists
logger.info("Creating (black/white)lists")
blacklist_library = parse_string_to_list(os.getenv("BLACKLIST_LIBRARY", None))
whitelist_library = parse_string_to_list(os.getenv("WHITELIST_LIBRARY", None))
blacklist_library = parse_string_to_list(
get_env_value(env, "BLACKLIST_LIBRARY", None)
)
whitelist_library = parse_string_to_list(
get_env_value(env, "WHITELIST_LIBRARY", None)
)
blacklist_library_type = parse_string_to_list(
os.getenv("BLACKLIST_LIBRARY_TYPE", None)
get_env_value(env, "BLACKLIST_LIBRARY_TYPE", None)
)
whitelist_library_type = parse_string_to_list(
os.getenv("WHITELIST_LIBRARY_TYPE", None)
get_env_value(env, "WHITELIST_LIBRARY_TYPE", None)
)
blacklist_users = parse_string_to_list(os.getenv("BLACKLIST_USERS", None))
whitelist_users = parse_string_to_list(os.getenv("WHITELIST_USERS", None))
blacklist_users = parse_string_to_list(get_env_value(env, "BLACKLIST_USERS", None))
whitelist_users = parse_string_to_list(get_env_value(env, "WHITELIST_USERS", None))
(
blacklist_library,
@ -160,7 +171,7 @@ def main_loop() -> None:
# Create server connections
logger.info("Creating server connections")
servers = generate_server_connections()
servers = generate_server_connections(env)
for server_1 in servers:
# If server is the final server in the list, then we are done with the loop
@ -170,9 +181,9 @@ def main_loop() -> None:
# Start server_2 at the next server in the list
for server_2 in servers[servers.index(server_1) + 1 :]:
# Check if server 1 and server 2 are going to be synced in either direction, skip if not
if not should_sync_server(server_1, server_2) and not should_sync_server(
server_2, server_1
):
if not should_sync_server(
env, server_1, server_2
) and not should_sync_server(env, server_2, server_1):
continue
logger.info(f"Server 1: {type(server_1)}: {server_1.info()}")
@ -223,7 +234,7 @@ def main_loop() -> None:
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
)
if should_sync_server(server_2, server_1):
if should_sync_server(env, server_2, server_1):
logger.info(f"Syncing {server_2.info()} -> {server_1.info()}")
server_1.update_watched(
server_2_watched_filtered,
@ -232,7 +243,7 @@ def main_loop() -> None:
dryrun,
)
if should_sync_server(server_1, server_2):
if should_sync_server(env, server_1, server_2):
logger.info(f"Syncing {server_1.info()} -> {server_2.info()}")
server_2.update_watched(
server_1_watched_filtered,
@ -244,15 +255,24 @@ def main_loop() -> None:
@logger.catch
def main() -> None:
run_only_once = str_to_bool(os.getenv("RUN_ONLY_ONCE", "False"))
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
# Get environment variables
env_file = get_env_value(None, "ENV_FILE", ".env")
env = dotenv_values(env_file)
run_only_once = str_to_bool(get_env_value(env, "RUN_ONLY_ONCE", "False"))
sleep_duration = float(get_env_value(env, "SLEEP_DURATION", "3600"))
log_file = get_env_value(env, "LOG_FILE", "log.log")
debug_level = get_env_value(env, "DEBUG_LEVEL", "INFO")
if debug_level:
debug_level = debug_level.upper()
times: list[float] = []
while True:
try:
start = perf_counter()
# Reconfigure the logger on each loop so the logs are rotated on each run
configure_logger()
main_loop()
configure_logger(log_file, debug_level)
main_loop(env)
end = perf_counter()
times.append(end - start)

View File

@ -1,6 +1,4 @@
import os
import requests
from dotenv import load_dotenv
from loguru import logger
from urllib3.poolmanager import PoolManager
@ -17,6 +15,7 @@ from src.functions import (
search_mapping,
log_marked,
str_to_bool,
get_env_value,
)
from src.watched import (
LibraryData,
@ -28,11 +27,6 @@ from src.watched import (
check_same_identifiers,
)
load_dotenv(override=True)
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
class HostNameIgnoringAdapter(RequestsHTTPAdapter):
@ -48,7 +42,9 @@ class HostNameIgnoringAdapter(RequestsHTTPAdapter):
)
def extract_guids_from_item(item: Movie | Show | Episode) -> dict[str, str]:
def extract_guids_from_item(
item: Movie | Show | Episode, generate_guids: bool
) -> dict[str, str]:
# If GENERATE_GUIDS is set to False, then return an empty dict
if not generate_guids:
return {}
@ -62,9 +58,12 @@ def extract_guids_from_item(item: Movie | Show | Episode) -> dict[str, str]:
return guids
def extract_identifiers_from_item(item: Movie | Show | Episode) -> MediaIdentifiers:
guids = extract_guids_from_item(item)
def extract_identifiers_from_item(
item: Movie | Show | Episode,
generate_guids: bool,
generate_locations: bool,
) -> MediaIdentifiers:
guids = extract_guids_from_item(item, generate_guids)
return MediaIdentifiers(
title=item.title,
locations=(
@ -78,165 +77,25 @@ def extract_identifiers_from_item(item: Movie | Show | Episode) -> MediaIdentifi
)
def get_mediaitem(item: Movie | Episode, completed: bool) -> MediaItem:
def get_mediaitem(
item: Movie | Episode,
completed: bool,
generate_guids: bool = True,
generate_locations: bool = True,
) -> MediaItem:
return MediaItem(
identifiers=extract_identifiers_from_item(item),
identifiers=extract_identifiers_from_item(
item, generate_guids, generate_locations
),
status=WatchedStatus(completed=completed, time=item.viewOffset),
)
def update_user_watched(
user: MyPlexAccount,
user_plex: PlexServer,
library_data: LibraryData,
library_name: str,
dryrun: bool,
) -> None:
# If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies:
return
logger.info(f"Plex: Updating watched for {user.title} in library {library_name}")
library_section = user_plex.library.section(library_name)
if not library_section:
logger.error(
f"Plex: Library {library_name} not found for {user.title}, skipping",
)
return
# Update movies.
if library_data.movies:
# Search for Plex movies that are currently marked as unwatched.
for plex_movie in library_section.search(unwatched=True):
plex_identifiers = extract_identifiers_from_item(plex_movie)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
if check_same_identifiers(plex_identifiers, stored_movie.identifiers):
# If the stored movie is marked as watched (or has enough progress),
# update the Plex movie accordingly.
if stored_movie.status.completed:
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_movie.title} as watched, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
None,
None,
)
else:
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.updateTimeline(stored_movie.status.time)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_movie.title} timeline, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
duration=stored_movie.status.time,
)
# Once matched, no need to check further.
break
# Update TV Shows (series/episodes).
if library_data.series:
# For each Plex show in the library section:
plex_shows = library_section.search(unwatched=True)
for plex_show in plex_shows:
# Extract identifiers from the Plex show.
plex_show_identifiers = extract_identifiers_from_item(plex_show)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
if check_same_identifiers(
plex_show_identifiers, stored_series.identifiers
):
logger.trace(f"Found matching show for '{plex_show.title}'")
# Now update episodes.
# Get the list of Plex episodes for this show.
plex_episodes = plex_show.episodes()
for plex_episode in plex_episodes:
plex_episode_identifiers = extract_identifiers_from_item(
plex_episode
)
for stored_ep in stored_series.episodes:
if check_same_identifiers(
plex_episode_identifiers, stored_ep.identifiers
):
if stored_ep.status.completed:
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_show.title} {plex_episode.title} as watched, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
)
else:
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.updateTimeline(
stored_ep.status.time
)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_show.title} {plex_episode.title} timeline, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
stored_ep.status.time,
)
break # Found a matching episode.
break # Found a matching show.
# class plex accept base url and token and username and password but default with none
class Plex:
def __init__(
self,
env,
base_url: str | None = None,
token: str | None = None,
user_name: str | None = None,
@ -245,6 +104,8 @@ class Plex:
ssl_bypass: bool = False,
session: requests.Session | None = None,
) -> None:
self.env = env
self.server_type: str = "Plex"
self.ssl_bypass: bool = ssl_bypass
if ssl_bypass:
@ -261,6 +122,12 @@ class Plex:
self.admin_user: MyPlexAccount = self.plex.myPlexAccount()
self.users: list[MyPlexUser | MyPlexAccount] = self.get_users()
self.generate_guids: bool = str_to_bool(
get_env_value(self.env, "GENERATE_GUIDS", "True")
)
self.generate_locations: bool = str_to_bool(
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
)
def login(
self,
@ -346,7 +213,14 @@ class Plex:
unwatched=False
) + library_videos.search(inProgress=True):
if video.isWatched or video.viewOffset >= 60000:
watched.movies.append(get_mediaitem(video, video.isWatched))
watched.movies.append(
get_mediaitem(
video,
video.isWatched,
self.generate_guids,
self.generate_locations,
)
)
elif library.type == "show":
# Keep track of processed shows to reduce duplicate shows
@ -357,7 +231,7 @@ class Plex:
if show.key in processed_shows:
continue
processed_shows.append(show.key)
show_guids = extract_guids_from_item(show)
show_guids = extract_guids_from_item(show, self.generate_guids)
episode_mediaitem = []
# Fetch watched or partially watched episodes
@ -365,7 +239,12 @@ class Plex:
viewOffset__gte=60_000
):
episode_mediaitem.append(
get_mediaitem(episode, episode.isWatched)
get_mediaitem(
episode,
episode.isWatched,
self.generate_guids,
self.generate_locations,
)
)
if episode_mediaitem:
@ -380,7 +259,7 @@ class Plex:
for location in show.locations
]
)
if generate_locations
if self.generate_locations
else tuple()
),
imdb_id=show_guids.get("imdb"),
@ -442,6 +321,176 @@ class Plex:
logger.error(f"Plex: Failed to get watched, Error: {e}")
return {}
def update_user_watched(
self,
user: MyPlexAccount,
user_plex: PlexServer,
library_data: LibraryData,
library_name: str,
dryrun: bool,
) -> None:
# If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies:
return
logger.info(
f"Plex: Updating watched for {user.title} in library {library_name}"
)
library_section = user_plex.library.section(library_name)
if not library_section:
logger.error(
f"Plex: Library {library_name} not found for {user.title}, skipping",
)
return
# Update movies.
if library_data.movies:
# Search for Plex movies that are currently marked as unwatched.
for plex_movie in library_section.search(unwatched=True):
plex_identifiers = extract_identifiers_from_item(
plex_movie, self.generate_guids, self.generate_locations
)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
if check_same_identifiers(
plex_identifiers, stored_movie.identifiers
):
# If the stored movie is marked as watched (or has enough progress),
# update the Plex movie accordingly.
if stored_movie.status.completed:
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_movie.title} as watched, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
None,
None,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.updateTimeline(stored_movie.status.time)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_movie.title} timeline, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
duration=stored_movie.status.time,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
# Once matched, no need to check further.
break
# Update TV Shows (series/episodes).
if library_data.series:
# For each Plex show in the library section:
plex_shows = library_section.search(unwatched=True)
for plex_show in plex_shows:
# Extract identifiers from the Plex show.
plex_show_identifiers = extract_identifiers_from_item(
plex_show, self.generate_guids, self.generate_locations
)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
if check_same_identifiers(
plex_show_identifiers, stored_series.identifiers
):
logger.trace(f"Found matching show for '{plex_show.title}'")
# Now update episodes.
# Get the list of Plex episodes for this show.
plex_episodes = plex_show.episodes()
for plex_episode in plex_episodes:
plex_episode_identifiers = extract_identifiers_from_item(
plex_episode,
self.generate_guids,
self.generate_locations,
)
for stored_ep in stored_series.episodes:
if check_same_identifiers(
plex_episode_identifiers, stored_ep.identifiers
):
if stored_ep.status.completed:
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_show.title} {plex_episode.title} as watched, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.updateTimeline(
stored_ep.status.time
)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_show.title} {plex_episode.title} timeline, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
stored_ep.status.time,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
break # Found a matching episode.
break # Found a matching show.
def update_watched(
self,
watched_list: dict[str, UserData],
@ -525,7 +574,7 @@ class Plex:
continue
try:
update_user_watched(
self.update_user_watched(
user,
user_plex,
library_data,