Skip to content

Commit

Permalink
Merge pull request #120 from Jeroli-co/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
Olived-fr committed Oct 4, 2021
2 parents 296ad3a + 90f59b9 commit 6eadffe
Show file tree
Hide file tree
Showing 24 changed files with 215 additions and 112 deletions.
6 changes: 4 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ COPY /client ./client
RUN cd client && yarn install && yarn cache clean && yarn build --production


FROM jerolico/cheddarr-base-image
FROM python:3.9.5-slim
WORKDIR /app

# Copy front build
Expand All @@ -18,7 +18,7 @@ COPY --from=FRONT_STAGE /app/client/build ./client/build
COPY /pyproject.toml /poetry.lock* /app/

# Install Poetry and backend dependencies
RUN pip install poetry && \
RUN pip install poetry alembic && \
poetry config virtualenvs.create false && \
poetry install --no-root --no-dev

Expand All @@ -27,3 +27,5 @@ COPY /server ./server
COPY cheddarr.py .

EXPOSE 9090
ENTRYPOINT ["python", "cheddarr.py", "run"]

8 changes: 7 additions & 1 deletion cheddarr.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/usr/bin/env python
import click
from pathlib import Path

import click

"""USAGE:
python cheddarr.py [OPTIONS] COMMAND
Expand Down Expand Up @@ -45,6 +46,11 @@ def run(ctx):
import uvicorn

debug = ctx.obj["DEBUG"]
if not debug:
from alembic.command import upgrade
from alembic.config import Config

upgrade(Config(Path.cwd() / "server/alembic.ini"), "head")
uvicorn.run(
"server.main:app",
host="0.0.0.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ const UserDropdown = ({

return (
<Container isVisible={isVisible} ref={dropdownRef}>
<Item onClick={() => history.push(routes.PROFILE.url(""))}>
<Item onClick={() => history.push(routes.PROFILE.url())}>
<DropdownMenuItemIcon>
<Icon icon={faUserCircle} />
</DropdownMenuItemIcon>
Expand Down
5 changes: 1 addition & 4 deletions client/src/logged-in-app/pages/user-profile/Profile.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,7 @@ const Profile = () => {
</div>
</SubContainer>
<PrimaryDivider />
{user &&
checkRole(user.roles, [Roles.ADMIN, Roles.MANAGE_USERS], true) && (
<UpdateProfile id={profileOwner.data.id} />
)}
<UpdateProfile id={profileOwner.data.id} />
</div>
</>
);
Expand Down
3 changes: 1 addition & 2 deletions client/src/router/routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@ const routes = {
/** USERS **/
USERS: { url: "/users", component: UsersSettings },
PROFILE: {
url: (id: number | string) =>
"/profile" + (id.toString().length > 0 ? `/${id}` : ""),
url: (id?: number | string) => `/profile${id ? `/${id}` : ""}`,
component: Profile,
},

Expand Down
16 changes: 14 additions & 2 deletions client/src/shared/hooks/useUser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ import { DefaultAsyncCall, IAsyncCall } from "../models/IAsyncCall";
import { IUser } from "../models/IUser";
import { useSession } from "../contexts/SessionContext";
import { useUserService } from "../toRefactor/useUserService";
import { checkRole } from "../../utils/roles";
import { Roles } from "../enums/Roles";
import { useHistory } from "react-router-dom";
import { routes } from "../../router/routes";

export const useUser = (id?: string) => {
const [currentUser, setCurrentUser] = useState<IAsyncCall<IUser | null>>(
Expand All @@ -12,16 +16,24 @@ export const useUser = (id?: string) => {
session: { user },
} = useSession();
const { getUserById } = useUserService();
const history = useHistory();

useEffect(() => {
if (id && user && id !== user.id.toString(10)) {
if (
id &&
user &&
id !== user.id.toString(10) &&
checkRole(user.roles, [Roles.MANAGE_USERS, Roles.ADMIN], true)
) {
getUserById(parseInt(id, 10)).then((res) => {
if (res.status === 200) {
setCurrentUser(res);
}
});
} else if (user) {
} else if (user && id === undefined) {
setCurrentUser({ isLoading: false, data: user, status: 200 });
} else {
history.push(routes.PROFILE.url());
}

// eslint-disable-next-line react-hooks/exhaustive-deps
Expand Down
19 changes: 2 additions & 17 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ pydantic = "^1.8.2"
PyJWT = {version = "^2.1.0", extras = ["crypto"]}
python-multipart = "^0.0.5"
SQLAlchemy = "^1.4.18"
tmdbsimple = "^2.8.0"
uvicorn = "^0.14.0"


Expand Down
2 changes: 1 addition & 1 deletion server/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[alembic]
# path to migration scripts
script_location = ./database/migrations
script_location = %(here)s/database/migrations

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
Expand Down
6 changes: 1 addition & 5 deletions server/api/v1/endpoints/requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,6 @@ async def delete_movie_request(
and not check_permissions(current_user.roles, [UserRole.manage_requests])
):
raise HTTPException(status.HTTP_404_NOT_FOUND, "This request does not exist.")
if request.status != RequestStatus.pending and request.requesting_user_id == current_user.id:
raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot delete a non pending request.")
await media_request_repo.remove(request)
return {"detail": "Request deleted."}

Expand Down Expand Up @@ -394,7 +392,7 @@ async def update_series_request(
if request is None:
raise HTTPException(status.HTTP_404_NOT_FOUND, "The request was not found.")
if request.status != RequestStatus.pending:
raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot update a non pending request.")
raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot update a non-pending request.")
if request_update.status == RequestStatus.approved:
if request_update.provider_id is not None:
selected_provider = await media_provider_repo.find_by(
Expand Down Expand Up @@ -455,7 +453,5 @@ async def delete_series_request(
and not check_permissions(current_user.roles, [UserRole.manage_requests])
):
raise HTTPException(status.HTTP_404_NOT_FOUND, "This request does not exist.")
if request.status != RequestStatus.pending and request.requesting_user_id == current_user.id:
raise HTTPException(status.HTTP_403_FORBIDDEN, "Cannot delete a non pending request.")
await media_request_repo.remove(request)
return {"detail": "Request deleted."}
7 changes: 3 additions & 4 deletions server/api/v1/endpoints/system.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import math
from typing import Dict, List, Literal, Optional
from typing import List, Literal

from fastapi import APIRouter, Body, Depends, HTTPException, status

Expand Down Expand Up @@ -72,14 +72,13 @@ def get_jobs():
)
def modify_job(
job_id: str,
action: Literal["run", "pause", "resume"] = Body(...),
params: Optional[Dict] = Body(None),
action: Literal["run", "pause", "resume"] = Body(..., embed=True),
):
job = scheduler.get_job(job_id)
if job is None:
raise HTTPException(status.HTTP_404_NOT_FOUND, "This job does not exist.")
if action == "run":
scheduler.add_job(job.func, replace_existing=True, kwargs=params)
scheduler.add_job(job.func, replace_existing=True)
elif action == "pause":
job.pause()
elif action == "resume":
Expand Down
4 changes: 0 additions & 4 deletions server/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
)
from tzlocal import get_localzone

from server.core.logger import Logger


class Config(BaseSettings):

Expand Down Expand Up @@ -93,8 +91,6 @@ def update(self, **config_kwargs):
for field_k, field_v in config_kwargs.items():
if field_k in self.__fields__ and field_v is not None:
setattr(self, field_k, field_v)
if field_k == "log_level":
Logger.make_logger(self.logs_folder / self.logs_filename, self.log_level)
self.write_file()
get_config.cache_clear()

Expand Down
4 changes: 2 additions & 2 deletions server/core/http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ async def request(
method: str,
url: str,
*,
params: Optional[Mapping[str, str]] = None,
headers: Optional[Mapping[str, str]] = None,
params: Optional[Mapping[str, Any]] = None,
headers: Optional[Mapping[str, Any]] = None,
data: Any = None,
) -> Any:
client = cls.get_http_client()
Expand Down
22 changes: 15 additions & 7 deletions server/core/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from loguru import logger

from server.core.config import get_config


class InterceptHandler(logging.Handler):
loglevel_mapping = {
Expand All @@ -31,6 +33,12 @@ def emit(self, record):
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())


class LogLevelFilter:
def __call__(self, record):
levelno = logger.level(get_config().log_level).no
return record["level"].no >= levelno


class Formatter:
fmt = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>{extra[padding]} | <level>{message}</level>\n{exception}"
padding = 0
Expand All @@ -45,11 +53,10 @@ def format(cls, record):

class Logger:
@classmethod
def make_logger(cls, log_path: Path, log_level: str):
logger.remove()
def make_logger(cls):
log = cls.customize_logging(
filepath=log_path,
level=log_level,
get_config().logs_folder / get_config().logs_filename,
level=get_config().log_level,
rotation="1 day",
retention="1 week",
)
Expand All @@ -64,7 +71,8 @@ def customize_logging(cls, filepath: Path, level: str, rotation: str, retention:
backtrace=True,
diagnose=True,
colorize=True,
level=level.upper(),
level=0,
filter=LogLevelFilter(),
format=Formatter.format,
)
logger.add(
Expand All @@ -76,11 +84,11 @@ def customize_logging(cls, filepath: Path, level: str, rotation: str, retention:
diagnose=False,
colorize=False,
level=level.upper(),
filter=LogLevelFilter(),
serialize=True,
format="{message}",
)

logging.basicConfig(handlers=[InterceptHandler()], level=0)
logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)

for _log in [
*logging.root.manager.loggerDict.keys(),
Expand Down
4 changes: 2 additions & 2 deletions server/database/init_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@


def init_db():
from server.database.session import DBSession
from server.database.session import EngineMaker

with DBSession.create_sync_engine().begin() as conn:
with EngineMaker.create_sync_engine().begin() as conn:
Base.metadata.drop_all(bind=conn)
Base.metadata.create_all(bind=conn)
42 changes: 42 additions & 0 deletions server/database/migrations/versions/1b33a8f77eda_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""empty message
Revision ID: 1b33a8f77eda
Revises: 81c905106fb6
Create Date: 2021-07-13 21:28:37.805508
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '1b33a8f77eda'
down_revision = '81c905106fb6'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('media', schema=None) as batch_op:
batch_op.drop_index('ix_media_imdb_id')
batch_op.drop_index('ix_media_tmdb_id')
batch_op.drop_index('ix_media_tvdb_id')
batch_op.create_unique_constraint(batch_op.f('uq_media_imdb_id'), ['imdb_id', 'media_type'])
batch_op.create_unique_constraint(batch_op.f('uq_media_tmdb_id'), ['tmdb_id', 'media_type'])
batch_op.create_unique_constraint(batch_op.f('uq_media_tvdb_id'), ['tvdb_id', 'media_type'])

# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('media', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('uq_media_tvdb_id'), type_='unique')
batch_op.drop_constraint(batch_op.f('uq_media_tmdb_id'), type_='unique')
batch_op.drop_constraint(batch_op.f('uq_media_imdb_id'), type_='unique')
batch_op.create_index('ix_media_tvdb_id', ['tvdb_id'], unique=False)
batch_op.create_index('ix_media_tmdb_id', ['tmdb_id'], unique=False)
batch_op.create_index('ix_media_imdb_id', ['imdb_id'], unique=False)

# ### end Alembic commands ###
10 changes: 5 additions & 5 deletions server/jobs/plex.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,6 @@
from server.repositories.settings import PlexSettingRepository
from server.services import plex, tmdb

TMDB_REGEX = "tmdb|themoviedb"
IMDB_REGEX = "imdb"
TVDB_REGEX = "tvdb|thetvdb"


@scheduler.scheduled_job(
"interval",
Expand Down Expand Up @@ -342,6 +338,10 @@ async def process_plex_episode(


async def find_guids(media: Union[PlexMovie, PlexSeries]) -> (int, str, int):
tmdb_regex = "tmdb|themoviedb"
imdb_regex = "imdb"
tvdb_regex = "tvdb|thetvdb"

try:
guids = [media.guid]
if hasattr(media, "guids") and media.guids is not None:
Expand All @@ -362,7 +362,7 @@ def find_guid(regex):
None,
)

tmdb_id, imdb_id, tvdb_id = find_guid(TMDB_REGEX), find_guid(IMDB_REGEX), find_guid(TVDB_REGEX)
tmdb_id, imdb_id, tvdb_id = find_guid(tmdb_regex), find_guid(imdb_regex), find_guid(tvdb_regex)

try:
if tmdb_id is None:
Expand Down
Loading

0 comments on commit 6eadffe

Please sign in to comment.