From c5895004cc8e091b3e0e668a563e262b8e696fb9 Mon Sep 17 00:00:00 2001 From: olived Date: Mon, 21 Jun 2021 20:31:45 +0200 Subject: [PATCH 1/9] [backend] Enhance sonarr job error handling --- server/api/v1/endpoints/requests.py | 4 +--- server/jobs/sonarr.py | 2 ++ server/services/sonarr.py | 9 +++++++-- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/server/api/v1/endpoints/requests.py b/server/api/v1/endpoints/requests.py index 0fad3998..44ebf43e 100644 --- a/server/api/v1/endpoints/requests.py +++ b/server/api/v1/endpoints/requests.py @@ -394,7 +394,7 @@ async def update_series_request( if request is None: raise HTTPException(status.HTTP_404_NOT_FOUND, "The request was not found.") if request.status != RequestStatus.pending: - raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot update a non pending request.") + raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot update a non-pending request.") if request_update.status == RequestStatus.approved: if request_update.provider_id is not None: selected_provider = await media_provider_repo.find_by( @@ -455,7 +455,5 @@ async def delete_series_request( and not check_permissions(current_user.roles, [UserRole.manage_requests]) ): raise HTTPException(status.HTTP_404_NOT_FOUND, "This request does not exist.") - if request.status != RequestStatus.pending and request.requesting_user_id == current_user.id: - raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot delete a non pending request.") await media_request_repo.remove(request) return {"detail": "Request deleted."} diff --git a/server/jobs/sonarr.py b/server/jobs/sonarr.py index b2b7d689..2fede6fe 100644 --- a/server/jobs/sonarr.py +++ b/server/jobs/sonarr.py @@ -21,6 +21,8 @@ async def sonarr_sync(): if series_lookup is None: continue series = await sonarr.get_series(setting, series_lookup.id) + if series is None: + continue req_seasons_available = 0 for req_season in request.seasons: if not req_season.episodes: diff --git a/server/services/sonarr.py b/server/services/sonarr.py index a1cc7456..a30bdbb4 100644 --- a/server/services/sonarr.py +++ b/server/services/sonarr.py @@ -133,7 +133,7 @@ async def lookup( return SonarrSeries.parse_obj(lookup_result[0]) -async def get_series(setting: SonarrSetting, series_id: int) -> SonarrSeries: +async def get_series(setting: SonarrSetting, series_id: int) -> Optional[SonarrSeries]: url = make_url( api_key=setting.api_key, host=setting.host, @@ -142,7 +142,10 @@ async def get_series(setting: SonarrSetting, series_id: int) -> SonarrSeries: version=setting.version, resource_path=f"/series/{series_id}", ) - resp = await HttpClient.request("GET", url) + try: + resp = await HttpClient.request("GET", url) + except HTTPException: + return None return SonarrSeries.parse_obj(resp) @@ -232,6 +235,8 @@ async def send_request(request: SeriesRequest): await asyncio.sleep(2) else: series = await get_series(setting, series.id) + if series is None: + return # request seasons is empty so we are requesting all the series if not request.seasons: for season in series.seasons: From 72f896261db9acacebbd44b25a0dd6da936b9ddd Mon Sep 17 00:00:00 2001 From: BogetC Date: Mon, 12 Jul 2021 21:53:55 +0200 Subject: [PATCH 2/9] fix: profile update access --- .../components/user-dropdown/UserDropdown.tsx | 2 +- .../logged-in-app/pages/user-profile/Profile.tsx | 5 +---- client/src/router/routes.ts | 3 +-- client/src/shared/hooks/useUser.ts | 16 ++++++++++++++-- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/client/src/logged-in-app/navbar/components/user-dropdown/UserDropdown.tsx b/client/src/logged-in-app/navbar/components/user-dropdown/UserDropdown.tsx index 4fee99ea..6c2efc2f 100644 --- a/client/src/logged-in-app/navbar/components/user-dropdown/UserDropdown.tsx +++ b/client/src/logged-in-app/navbar/components/user-dropdown/UserDropdown.tsx @@ -74,7 +74,7 @@ const UserDropdown = ({ return ( - history.push(routes.PROFILE.url(""))}> + history.push(routes.PROFILE.url())}> diff --git a/client/src/logged-in-app/pages/user-profile/Profile.tsx b/client/src/logged-in-app/pages/user-profile/Profile.tsx index 439d4f59..9f10c993 100644 --- a/client/src/logged-in-app/pages/user-profile/Profile.tsx +++ b/client/src/logged-in-app/pages/user-profile/Profile.tsx @@ -118,10 +118,7 @@ const Profile = () => { - {user && - checkRole(user.roles, [Roles.ADMIN, Roles.MANAGE_USERS], true) && ( - - )} + ); diff --git a/client/src/router/routes.ts b/client/src/router/routes.ts index ae24effb..3e3e2444 100644 --- a/client/src/router/routes.ts +++ b/client/src/router/routes.ts @@ -42,8 +42,7 @@ const routes = { /** USERS **/ USERS: { url: "/users", component: UsersSettings }, PROFILE: { - url: (id: number | string) => - "/profile" + (id.toString().length > 0 ? `/${id}` : ""), + url: (id?: number | string) => `/profile${id ? `/${id}` : ""}`, component: Profile, }, diff --git a/client/src/shared/hooks/useUser.ts b/client/src/shared/hooks/useUser.ts index 747ded75..e2b0777d 100644 --- a/client/src/shared/hooks/useUser.ts +++ b/client/src/shared/hooks/useUser.ts @@ -3,6 +3,10 @@ import { DefaultAsyncCall, IAsyncCall } from "../models/IAsyncCall"; import { IUser } from "../models/IUser"; import { useSession } from "../contexts/SessionContext"; import { useUserService } from "../toRefactor/useUserService"; +import { checkRole } from "../../utils/roles"; +import { Roles } from "../enums/Roles"; +import { useHistory } from "react-router-dom"; +import { routes } from "../../router/routes"; export const useUser = (id?: string) => { const [currentUser, setCurrentUser] = useState>( @@ -12,16 +16,24 @@ export const useUser = (id?: string) => { session: { user }, } = useSession(); const { getUserById } = useUserService(); + const history = useHistory(); useEffect(() => { - if (id && user && id !== user.id.toString(10)) { + if ( + id && + user && + id !== user.id.toString(10) && + checkRole(user.roles, [Roles.MANAGE_USERS, Roles.ADMIN], true) + ) { getUserById(parseInt(id, 10)).then((res) => { if (res.status === 200) { setCurrentUser(res); } }); - } else if (user) { + } else if (user && id === undefined) { setCurrentUser({ isLoading: false, data: user, status: 200 }); + } else { + history.push(routes.PROFILE.url()); } // eslint-disable-next-line react-hooks/exhaustive-deps From 78d4eaed40493e5c1acb50366132ea4f928db2b9 Mon Sep 17 00:00:00 2001 From: olived Date: Mon, 12 Jul 2021 22:38:07 +0200 Subject: [PATCH 3/9] Revert "[backend] Fix access logs" This reverts commit 6bbe4701 --- server/core/config.py | 4 ---- server/core/logger.py | 19 ++++++++++++++----- server/main.py | 4 +--- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/server/core/config.py b/server/core/config.py index 33a903c5..2e1a31b7 100644 --- a/server/core/config.py +++ b/server/core/config.py @@ -12,8 +12,6 @@ ) from tzlocal import get_localzone -from server.core.logger import Logger - class Config(BaseSettings): @@ -93,8 +91,6 @@ def update(self, **config_kwargs): for field_k, field_v in config_kwargs.items(): if field_k in self.__fields__ and field_v is not None: setattr(self, field_k, field_v) - if field_k == "log_level": - Logger.make_logger(self.logs_folder / self.logs_filename, self.log_level) self.write_file() get_config.cache_clear() diff --git a/server/core/logger.py b/server/core/logger.py index b8178d24..17e18d47 100644 --- a/server/core/logger.py +++ b/server/core/logger.py @@ -6,6 +6,8 @@ from loguru import logger +from server.core.config import get_config + class InterceptHandler(logging.Handler): loglevel_mapping = { @@ -31,6 +33,12 @@ def emit(self, record): logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) +class LogLevelFilter: + def __call__(self, record): + levelno = logger.level(get_config().log_level).no + return record["level"].no >= levelno + + class Formatter: fmt = "{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} | {name}{extra[padding]} | {message}\n{exception}" padding = 0 @@ -45,11 +53,10 @@ def format(cls, record): class Logger: @classmethod - def make_logger(cls, log_path: Path, log_level: str): - logger.remove() + def make_logger(cls): log = cls.customize_logging( - filepath=log_path, - level=log_level, + get_config().logs_folder / get_config().logs_filename, + level=get_config().log_level, rotation="1 day", retention="1 week", ) @@ -64,7 +71,8 @@ def customize_logging(cls, filepath: Path, level: str, rotation: str, retention: backtrace=True, diagnose=True, colorize=True, - level=level.upper(), + level=0, + filter=LogLevelFilter(), format=Formatter.format, ) logger.add( @@ -76,6 +84,7 @@ def customize_logging(cls, filepath: Path, level: str, rotation: str, retention: diagnose=False, colorize=False, level=level.upper(), + filter=LogLevelFilter(), serialize=True, format="{message}", ) diff --git a/server/main.py b/server/main.py index 464e8c27..03777209 100644 --- a/server/main.py +++ b/server/main.py @@ -26,9 +26,7 @@ def setup_app() -> FastAPI: allow_methods=["*"], allow_headers=["*"], ) - Logger.make_logger( - get_config().logs_folder / get_config().logs_filename, get_config().log_level - ) + application.logger = Logger.make_logger() from server import jobs # noqa From 993206911d1787161125775ab26110816bb7c00e Mon Sep 17 00:00:00 2001 From: olived Date: Mon, 12 Jul 2021 22:38:32 +0200 Subject: [PATCH 4/9] [backend] Try using uvicorn instead of gunicorn --- Dockerfile | 9 +++++++-- server/alembic.ini | 5 +++++ server/api/v1/endpoints/system.py | 7 +++---- server/database/init_db.py | 4 ++-- server/schemas/system.py | 4 ++-- 5 files changed, 19 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index ab6ca345..dc095667 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ COPY /client ./client RUN cd client && yarn install && yarn cache clean && yarn build --production -FROM jerolico/cheddarr-base-image +FROM python:3.9.5-slim WORKDIR /app # Copy front build @@ -18,7 +18,7 @@ COPY --from=FRONT_STAGE /app/client/build ./client/build COPY /pyproject.toml /poetry.lock* /app/ # Install Poetry and backend dependencies -RUN pip install poetry && \ +RUN pip install poetry alembic && \ poetry config virtualenvs.create false && \ poetry install --no-root --no-dev @@ -26,4 +26,9 @@ RUN pip install poetry && \ COPY /server ./server COPY cheddarr.py . +# Run migration +RUN cd server && alembic upgrade head + EXPOSE 9090 +ENTRYPOINT ["python", "cheddarr.py", "run"] + diff --git a/server/alembic.ini b/server/alembic.ini index cd7b8ace..023a4130 100644 --- a/server/alembic.ini +++ b/server/alembic.ini @@ -7,6 +7,11 @@ script_location = ./database/migrations # template used to generate migration files # file_template = %%(rev)s_%%(slug)s +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = .. + + # timezone to use when rendering the date # within the migration file as well as the filename. # string value is passed to dateutil.tz.gettz() diff --git a/server/api/v1/endpoints/system.py b/server/api/v1/endpoints/system.py index ee8b10ff..022dd489 100644 --- a/server/api/v1/endpoints/system.py +++ b/server/api/v1/endpoints/system.py @@ -1,6 +1,6 @@ import json import math -from typing import Dict, List, Literal, Optional +from typing import List, Literal from fastapi import APIRouter, Body, Depends, HTTPException, status @@ -72,14 +72,13 @@ def get_jobs(): ) def modify_job( job_id: str, - action: Literal["run", "pause", "resume"] = Body(...), - params: Optional[Dict] = Body(None), + action: Literal["run", "pause", "resume"] = Body(..., embed=True), ): job = scheduler.get_job(job_id) if job is None: raise HTTPException(status.HTTP_404_NOT_FOUND, "This job does not exist.") if action == "run": - scheduler.add_job(job.func, replace_existing=True, kwargs=params) + scheduler.add_job(job.func, replace_existing=True) elif action == "pause": job.pause() elif action == "resume": diff --git a/server/database/init_db.py b/server/database/init_db.py index af6608aa..dedbf057 100644 --- a/server/database/init_db.py +++ b/server/database/init_db.py @@ -4,8 +4,8 @@ def init_db(): - from server.database.session import DBSession + from server.database.session import EngineMaker - with DBSession.create_sync_engine().begin() as conn: + with EngineMaker.create_sync_engine().begin() as conn: Base.metadata.drop_all(bind=conn) Base.metadata.create_all(bind=conn) diff --git a/server/schemas/system.py b/server/schemas/system.py index 5ce6675a..b0a28531 100644 --- a/server/schemas/system.py +++ b/server/schemas/system.py @@ -7,8 +7,8 @@ class PublicConfig(BaseModel): - log_level: str - default_roles: int + log_level: Optional[str] + default_roles: Optional[int] class Log(BaseModel): From 35c5e011976272b2766849c33e44c5d92f6fd2e4 Mon Sep 17 00:00:00 2001 From: olived Date: Tue, 13 Jul 2021 21:28:58 +0200 Subject: [PATCH 5/9] [backend] Fix media table constraints + fix request deletion --- server/api/v1/endpoints/requests.py | 2 - .../migrations/versions/1b33a8f77eda_.py | 42 +++++++++++++++++++ server/models/media.py | 11 +++-- 3 files changed, 50 insertions(+), 5 deletions(-) create mode 100644 server/database/migrations/versions/1b33a8f77eda_.py diff --git a/server/api/v1/endpoints/requests.py b/server/api/v1/endpoints/requests.py index 44ebf43e..73f218d6 100644 --- a/server/api/v1/endpoints/requests.py +++ b/server/api/v1/endpoints/requests.py @@ -232,8 +232,6 @@ async def delete_movie_request( and not check_permissions(current_user.roles, [UserRole.manage_requests]) ): raise HTTPException(status.HTTP_404_NOT_FOUND, "This request does not exist.") - if request.status != RequestStatus.pending and request.requesting_user_id == current_user.id: - raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot delete a non pending request.") await media_request_repo.remove(request) return {"detail": "Request deleted."} diff --git a/server/database/migrations/versions/1b33a8f77eda_.py b/server/database/migrations/versions/1b33a8f77eda_.py new file mode 100644 index 00000000..223395bb --- /dev/null +++ b/server/database/migrations/versions/1b33a8f77eda_.py @@ -0,0 +1,42 @@ +"""empty message + +Revision ID: 1b33a8f77eda +Revises: 81c905106fb6 +Create Date: 2021-07-13 21:28:37.805508 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1b33a8f77eda' +down_revision = '81c905106fb6' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('media', schema=None) as batch_op: + batch_op.drop_index('ix_media_imdb_id') + batch_op.drop_index('ix_media_tmdb_id') + batch_op.drop_index('ix_media_tvdb_id') + batch_op.create_unique_constraint(batch_op.f('uq_media_imdb_id'), ['imdb_id', 'media_type']) + batch_op.create_unique_constraint(batch_op.f('uq_media_tmdb_id'), ['tmdb_id', 'media_type']) + batch_op.create_unique_constraint(batch_op.f('uq_media_tvdb_id'), ['tvdb_id', 'media_type']) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('media', schema=None) as batch_op: + batch_op.drop_constraint(batch_op.f('uq_media_tvdb_id'), type_='unique') + batch_op.drop_constraint(batch_op.f('uq_media_tmdb_id'), type_='unique') + batch_op.drop_constraint(batch_op.f('uq_media_imdb_id'), type_='unique') + batch_op.create_index('ix_media_tvdb_id', ['tvdb_id'], unique=False) + batch_op.create_index('ix_media_tmdb_id', ['tmdb_id'], unique=False) + batch_op.create_index('ix_media_imdb_id', ['imdb_id'], unique=False) + + # ### end Alembic commands ### diff --git a/server/models/media.py b/server/models/media.py index 6a42815d..abbf3f71 100644 --- a/server/models/media.py +++ b/server/models/media.py @@ -27,11 +27,16 @@ class SeriesType(str, Enum): class Media(Model): __repr_props__ = ("title", "media_type", "tmdb_id", "imdb_id", "tvdb_id") + __table_args__ = ( + UniqueConstraint("tmdb_id", "media_type"), + UniqueConstraint("imdb_id", "media_type"), + UniqueConstraint("tvdb_id", "media_type"), + ) id = Column(Integer, primary_key=True) - tmdb_id = Column(Integer, unique=True, index=True) - imdb_id = Column(String, unique=True, index=True) - tvdb_id = Column(Integer, unique=True, index=True) + tmdb_id = Column(Integer) + imdb_id = Column(String) + tvdb_id = Column(Integer) title = Column(String, nullable=False) media_type = Column(DBEnum(MediaType), nullable=False) From 8329e9c1765a7177556f28c5447329c70f532ddd Mon Sep 17 00:00:00 2001 From: olived Date: Tue, 13 Jul 2021 23:08:58 +0200 Subject: [PATCH 6/9] [backend] Move auto migration in code --- Dockerfile | 3 --- cheddarr.py | 7 ++++++- server/alembic.ini | 7 +------ 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index dc095667..598c7a45 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,9 +26,6 @@ RUN pip install poetry alembic && \ COPY /server ./server COPY cheddarr.py . -# Run migration -RUN cd server && alembic upgrade head - EXPOSE 9090 ENTRYPOINT ["python", "cheddarr.py", "run"] diff --git a/cheddarr.py b/cheddarr.py index 7237bb3b..9c06de08 100755 --- a/cheddarr.py +++ b/cheddarr.py @@ -1,6 +1,7 @@ #!/usr/bin/env python -import click +from pathlib import Path +import click """USAGE: python cheddarr.py [OPTIONS] COMMAND @@ -45,6 +46,10 @@ def run(ctx): import uvicorn debug = ctx.obj["DEBUG"] + if not debug: + import alembic.command, alembic.config + + alembic.command.upgrade(alembic.config.Config(Path.cwd() / "server/alembic.ini"), "head") uvicorn.run( "server.main:app", host="0.0.0.0", diff --git a/server/alembic.ini b/server/alembic.ini index 023a4130..956100a4 100644 --- a/server/alembic.ini +++ b/server/alembic.ini @@ -2,16 +2,11 @@ [alembic] # path to migration scripts -script_location = ./database/migrations +script_location = %(here)s/database/migrations # template used to generate migration files # file_template = %%(rev)s_%%(slug)s -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = .. - - # timezone to use when rendering the date # within the migration file as well as the filename. # string value is passed to dateutil.tz.gettz() From e07133cc37dc99bac62702b8dbdfbe6446d4c042 Mon Sep 17 00:00:00 2001 From: olived Date: Wed, 14 Jul 2021 00:17:16 +0200 Subject: [PATCH 7/9] [backend] Fix logs --- cheddarr.py | 5 +++-- server/core/logger.py | 3 +-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cheddarr.py b/cheddarr.py index 9c06de08..7307089d 100755 --- a/cheddarr.py +++ b/cheddarr.py @@ -47,9 +47,10 @@ def run(ctx): debug = ctx.obj["DEBUG"] if not debug: - import alembic.command, alembic.config + from alembic.command import upgrade + from alembic.config import Config - alembic.command.upgrade(alembic.config.Config(Path.cwd() / "server/alembic.ini"), "head") + upgrade(Config(Path.cwd() / "server/alembic.ini"), "head") uvicorn.run( "server.main:app", host="0.0.0.0", diff --git a/server/core/logger.py b/server/core/logger.py index 17e18d47..6919e379 100644 --- a/server/core/logger.py +++ b/server/core/logger.py @@ -88,8 +88,7 @@ def customize_logging(cls, filepath: Path, level: str, rotation: str, retention: serialize=True, format="{message}", ) - - logging.basicConfig(handlers=[InterceptHandler()], level=0) + logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True) for _log in [ *logging.root.manager.loggerDict.keys(), From 033a6d9c076f15d092489cdfd941e6daf84bfbd6 Mon Sep 17 00:00:00 2001 From: olived Date: Wed, 14 Jul 2021 01:18:50 +0200 Subject: [PATCH 8/9] [backend] Update Radarr/Sonarr jobs --- server/jobs/radarr.py | 4 ++++ server/jobs/sonarr.py | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/server/jobs/radarr.py b/server/jobs/radarr.py index 79e4b24d..e2e12f00 100644 --- a/server/jobs/radarr.py +++ b/server/jobs/radarr.py @@ -22,6 +22,10 @@ async def sync_radarr(): ) if movie_lookup is None: continue + if movie_lookup.id is None: + request.status = RequestStatus.refused + await media_request_repo.save(request) + continue if movie_lookup.has_file: request.status = RequestStatus.available await media_request_repo.save(request) diff --git a/server/jobs/sonarr.py b/server/jobs/sonarr.py index 2fede6fe..a1671c08 100644 --- a/server/jobs/sonarr.py +++ b/server/jobs/sonarr.py @@ -20,6 +20,11 @@ async def sonarr_sync(): series_lookup = await sonarr.lookup(setting, tvdb_id=request.media.tvdb_id) if series_lookup is None: continue + if series_lookup.id is None: + request.status = RequestStatus.refused + await media_request_repo.save(request) + continue + series = await sonarr.get_series(setting, series_lookup.id) if series is None: continue From 90f59b98b6e1bd3c18463656460ff866e2c56413 Mon Sep 17 00:00:00 2001 From: olived Date: Fri, 16 Jul 2021 22:41:42 +0200 Subject: [PATCH 9/9] [backend] Remove tmdbsimple lib --- poetry.lock | 19 +----- pyproject.toml | 1 - server/core/http_client.py | 4 +- server/jobs/plex.py | 10 +-- server/services/tmdb.py | 127 +++++++++++++++++++++++++------------ 5 files changed, 94 insertions(+), 67 deletions(-) diff --git a/poetry.lock b/poetry.lock index 62d4438c..6ddbf235 100644 --- a/poetry.lock +++ b/poetry.lock @@ -847,17 +847,6 @@ python-versions = ">=3.6" [package.extras] full = ["aiofiles", "graphene", "itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] -[[package]] -name = "tmdbsimple" -version = "2.8.0" -description = "A Python wrapper for The Movie Database API v3" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -requests = "*" - [[package]] name = "toml" version = "0.10.2" @@ -927,8 +916,8 @@ dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"] [metadata] lock-version = "1.1" -python-versions = "^3.8" -content-hash = "e403d7f23a09f8c5dc5be4911a3eae71db1c96564ef2c9410d39c04680793974" +python-versions = "^3.9.5" +content-hash = "1476b7ce219c62f6adce0a4b4c5c034ca7b6fee425fdd33a6a732eefb7b74453" [metadata.files] aiofiles = [ @@ -1500,10 +1489,6 @@ starlette = [ {file = "starlette-0.14.2-py3-none-any.whl", hash = "sha256:3c8e48e52736b3161e34c9f0e8153b4f32ec5d8995a3ee1d59410d92f75162ed"}, {file = "starlette-0.14.2.tar.gz", hash = "sha256:7d49f4a27f8742262ef1470608c59ddbc66baf37c148e938c7038e6bc7a998aa"}, ] -tmdbsimple = [ - {file = "tmdbsimple-2.8.0-py3-none-any.whl", hash = "sha256:c882112396634bc8d42689fdf347e4a230f6897e5c423fd4d3ab1411dcb4b98f"}, - {file = "tmdbsimple-2.8.0.tar.gz", hash = "sha256:2f6f4b762e07e71e222b3200e3b83c4fce8ff61892402a60be925f79d49ab7fd"}, -] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, diff --git a/pyproject.toml b/pyproject.toml index 4dd9721b..9edf5b09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,6 @@ pydantic = "^1.8.2" PyJWT = {version = "^2.1.0", extras = ["crypto"]} python-multipart = "^0.0.5" SQLAlchemy = "^1.4.18" -tmdbsimple = "^2.8.0" uvicorn = "^0.14.0" diff --git a/server/core/http_client.py b/server/core/http_client.py index 044fe38b..9b558c83 100644 --- a/server/core/http_client.py +++ b/server/core/http_client.py @@ -26,8 +26,8 @@ async def request( method: str, url: str, *, - params: Optional[Mapping[str, str]] = None, - headers: Optional[Mapping[str, str]] = None, + params: Optional[Mapping[str, Any]] = None, + headers: Optional[Mapping[str, Any]] = None, data: Any = None, ) -> Any: client = cls.get_http_client() diff --git a/server/jobs/plex.py b/server/jobs/plex.py index 69cc0a37..5dff3a15 100644 --- a/server/jobs/plex.py +++ b/server/jobs/plex.py @@ -29,10 +29,6 @@ from server.repositories.settings import PlexSettingRepository from server.services import plex, tmdb -TMDB_REGEX = "tmdb|themoviedb" -IMDB_REGEX = "imdb" -TVDB_REGEX = "tvdb|thetvdb" - @scheduler.scheduled_job( "interval", @@ -342,6 +338,10 @@ async def process_plex_episode( async def find_guids(media: Union[PlexMovie, PlexSeries]) -> (int, str, int): + tmdb_regex = "tmdb|themoviedb" + imdb_regex = "imdb" + tvdb_regex = "tvdb|thetvdb" + try: guids = [media.guid] if hasattr(media, "guids") and media.guids is not None: @@ -362,7 +362,7 @@ def find_guid(regex): None, ) - tmdb_id, imdb_id, tvdb_id = find_guid(TMDB_REGEX), find_guid(IMDB_REGEX), find_guid(TVDB_REGEX) + tmdb_id, imdb_id, tvdb_id = find_guid(tmdb_regex), find_guid(imdb_regex), find_guid(tvdb_regex) try: if tmdb_id is None: diff --git a/server/services/tmdb.py b/server/services/tmdb.py index a36d52f1..73897dc1 100644 --- a/server/services/tmdb.py +++ b/server/services/tmdb.py @@ -1,10 +1,8 @@ import re from typing import List, Optional, Union -import tmdbsimple as tmdb -from asgiref.sync import sync_to_async - from server.core.config import get_config +from server.core.http_client import HttpClient from server.models.media import MediaType, SeriesType from server.schemas.media import ( EpisodeSchema, @@ -17,14 +15,19 @@ TmdbSeries, ) -tmdb.API_KEY = get_config().tmdb_api_key +TMDB_API_KEY = get_config().tmdb_api_key +TMDB_URL = "https://api.themoviedb.org/3" async def search_tmdb_media( term: str, page: int ) -> (List[Union[MovieSchema, SeriesSchema]], int, int): - search = await sync_to_async(tmdb.Search().multi)( - query=term, page=page, append_to_response="external_ids" + search = await HttpClient.request( + "GET", + f"{TMDB_URL}/search/multi", + params=dict( + api_key=TMDB_API_KEY, append_to_response="external_ids", query=term, page=page + ), ) results = [] for media in search["results"]: @@ -41,7 +44,13 @@ async def search_tmdb_media( async def search_tmdb_movies(term: str, page: int) -> (List[MovieSchema], int, int): - search = await sync_to_async(tmdb.Search().movie)(query=term, page=page) + search = await HttpClient.request( + "GET", + f"{TMDB_URL}/search/movie", + params=dict( + api_key=TMDB_API_KEY, append_to_response="external_ids", query=term, page=page + ), + ) results = [] for movie in search["results"]: parsed_movie = MovieSchema(**TmdbMovie.parse_obj(movie).dict()) @@ -50,7 +59,13 @@ async def search_tmdb_movies(term: str, page: int) -> (List[MovieSchema], int, i async def search_tmdb_series(term: str, page: int) -> (List[SeriesSchema], int, int): - search = await sync_to_async(tmdb.Search().tv)(query=term, page=page) + search = await HttpClient.request( + "GET", + f"{TMDB_URL}/search/tv", + params=dict( + api_key=TMDB_API_KEY, append_to_response="external_ids", query=term, page=page + ), + ) results = [] for series in search["results"]: parsed_media = SeriesSchema(**TmdbSeries.parse_obj(series).dict()) @@ -59,65 +74,77 @@ async def search_tmdb_series(term: str, page: int) -> (List[SeriesSchema], int, async def get_tmdb_movie(tmdb_id: int) -> Optional[MovieSchema]: - try: - movie = await sync_to_async(tmdb.Movies(tmdb_id).info)( - append_to_response="external_ids,credits,videos" - ) - except Exception: - return None + movie = await HttpClient.request( + "GET", + f"{TMDB_URL}/movie/{tmdb_id}", + params=dict(api_key=TMDB_API_KEY, append_to_response="external_ids,credits,videos"), + ) set_tmdb_movie_info(movie) return MovieSchema(**TmdbMovie.parse_obj(movie).dict()) async def get_tmdb_series(tmdb_id: int) -> Optional[SeriesSchema]: - try: - series = await sync_to_async(tmdb.TV(tmdb_id).info)( - append_to_response="external_ids,aggregate_credits,videos" - ) - except Exception: - return None + series = await HttpClient.request( + "GET", + f"{TMDB_URL}/tv/{tmdb_id}", + params=dict( + api_key=TMDB_API_KEY, append_to_response="external_ids,aggregate_credits,videos" + ), + ) set_tmdb_series_info(series) return SeriesSchema(**TmdbSeries.parse_obj(series).dict()) async def get_tmdb_season(tmdb_id: int, season_number: int) -> Optional[SeasonSchema]: - try: - season = await sync_to_async(tmdb.TV_Seasons(tmdb_id, season_number).info)( - append_to_response="external_ids,credits" - ) - except Exception: - return None + season = await HttpClient.request( + "GET", + f"{TMDB_URL}/tv/{tmdb_id}/season/{season_number}", + params=dict(api_key=TMDB_API_KEY, append_to_response="external_ids,credits"), + ) return SeasonSchema(**TmdbSeason.parse_obj(season).dict()) async def get_tmdb_episode( tmdb_id: int, season_number: int, episode_number: int ) -> Optional[EpisodeSchema]: - try: - episode = await sync_to_async( - tmdb.TV_Episodes(tmdb_id, season_number, episode_number).info - )(append_to_response="external_ids,credits") - except Exception: - return None + episode = await HttpClient.request( + "GET", + f"{TMDB_URL}/tv/{tmdb_id}/season/{season_number}/episode/{episode_number}", + params=dict(api_key=TMDB_API_KEY, append_to_response="external_ids,credits"), + ) return EpisodeSchema(**TmdbEpisode.parse_obj(episode).dict()) async def find_tmdb_id_from_external_id(imdb_id=None, tvdb_id=None) -> int: find = {} if not find and imdb_id is not None: - find = await sync_to_async(tmdb.Find(imdb_id).info)(external_source="imdb_id") + find = await HttpClient.request( + "GET", + f"{TMDB_URL}/find/{imdb_id}", + params=dict(api_key=TMDB_API_KEY, external_source="imdb_id"), + ) elif not find and tvdb_id is not None: - find = await sync_to_async(tmdb.Find(tvdb_id).info)(external_source="tvdb_id") + find = await HttpClient.request( + "GET", + f"{TMDB_URL}/find/{imdb_id}", + params=dict(api_key=TMDB_API_KEY, external_source="tvdb_id"), + ) tmdb_media = next((m[0] for m in find.values() if m), {}) return tmdb_media.get("id") async def find_external_ids_from_tmdb_id(tmdb_id: int) -> dict: - return await sync_to_async(tmdb.TV(tmdb_id).external_ids)() + return await HttpClient.request( + "GET", + f"{TMDB_URL}/tv/{tmdb_id}/external_ids", + params=dict(api_key=TMDB_API_KEY), + ) async def get_tmdb_popular_movies(page: int = 1) -> (List[MovieSchema], int, int): - search = await sync_to_async(tmdb.Movies().popular)(page=page) + search = await HttpClient.request( + "GET", f"{TMDB_URL}/movie/popular", params=dict(api_key=TMDB_API_KEY, page=page) + ) results = [] for movie in search["results"]: @@ -128,7 +155,9 @@ async def get_tmdb_popular_movies(page: int = 1) -> (List[MovieSchema], int, int async def get_tmdb_upcoming_movies(page: int = 1) -> (List[MovieSchema], int, int): - search = await sync_to_async(tmdb.Movies().upcoming)(page=page) + search = await HttpClient.request( + "GET", f"{TMDB_URL}/movie/upcoming", params=dict(api_key=TMDB_API_KEY, page=page) + ) results = [] for movie in search["results"]: @@ -139,7 +168,9 @@ async def get_tmdb_upcoming_movies(page: int = 1) -> (List[MovieSchema], int, in async def get_tmdb_similar_movies(tmdb_id: int, page: int = 1) -> (List[MovieSchema], int, int): - search = await sync_to_async(tmdb.Movies(tmdb_id).similar_movies)(page=page) + search = await HttpClient.request( + "GET", f"{TMDB_URL}/movie/{tmdb_id}/similar", params=dict(api_key=TMDB_API_KEY, page=page) + ) results = [] for movie in search["results"]: @@ -152,7 +183,11 @@ async def get_tmdb_similar_movies(tmdb_id: int, page: int = 1) -> (List[MovieSch async def get_tmdb_recommended_movies( tmdb_id: int, page: int = 1 ) -> (List[MovieSchema], int, int): - search = await sync_to_async(tmdb.Movies(tmdb_id).recommendations)(page=page) + search = await HttpClient.request( + "GET", + f"{TMDB_URL}/movie/{tmdb_id}/recommendations", + params=dict(api_key=TMDB_API_KEY, page=page), + ) results = [] for movie in search["results"]: @@ -163,7 +198,9 @@ async def get_tmdb_recommended_movies( async def get_tmdb_popular_series(page: int = 1) -> (List[SeriesSchema], int, int): - search = await sync_to_async(tmdb.TV().popular)(page=page) + search = await HttpClient.request( + "GET", f"{TMDB_URL}/tv/popular", params=dict(api_key=TMDB_API_KEY, page=page) + ) results = [] for series in search["results"]: @@ -174,7 +211,9 @@ async def get_tmdb_popular_series(page: int = 1) -> (List[SeriesSchema], int, in async def get_tmdb_similar_series(tmdb_id: int, page: int = 1) -> (List[SeriesSchema], int, int): - search = await sync_to_async(tmdb.TV(tmdb_id).similar)(page=page) + search = await HttpClient.request( + "GET", f"{TMDB_URL}/tv/{tmdb_id}/similar", params=dict(api_key=TMDB_API_KEY, page=page) + ) results = [] for series in search["results"]: @@ -187,7 +226,11 @@ async def get_tmdb_similar_series(tmdb_id: int, page: int = 1) -> (List[SeriesSc async def get_tmdb_recommended_series( tmdb_id: int, page: int = 1 ) -> (List[SeriesSchema], int, int): - search = await sync_to_async(tmdb.TV(tmdb_id).recommendations)(page=page) + search = await HttpClient.request( + "GET", + f"{TMDB_URL}/tv/{tmdb_id}/recommendations", + params=dict(api_key=TMDB_API_KEY, page=page), + ) results = [] for series in search["results"]: