Skip to content

Commit

Permalink
Jellyfin integration (#195)
Browse files Browse the repository at this point in the history
* perf(backend): use exposed db

* docs: change position of important docs

* feat(backend): basic code for jellyfin integration

* build(backend): add nanoid deps

* feat(backend): generate nanoid application token

* feat(backend): add resolver to create jellyfin integration

* refactor(backend): use one method to delete all integrations

* chore(frontend): adapt to new gql schema

* refactor(backend): return all integrations from one resolver

* feat(frontend): display all integrations

* fix(backend): change text returned for integrations

* fix(frontend): sink integration not being created

* docs: add jellyfin to readme

* docs: add jellyfin integration guide

* fix(docs): make guides for informative

* build(backend): bump version

* feat(backend): add endpoint for jellyfin webhook

* docs: add instructions for the new plugin

* feat(backend): jellyfin payload structure for movies

* docs: remove useless info

* docs: add info about jellyfin shows

* build(backend): add deps for hashing user id

* feat(backend): use hashed id for user id

* feat(backend): extract slug from webhook

* feat(backend): default value for user sink integrations

* feat(backend): add handling for progress

* refactor(backend): use macro for decimals

* docs: add info about shows and change webhook url

* refactor(backend): move parsing logic to service

* feat(backend): commit jellyfin movie update

* feat(backend): send sink integration info about shows

* fix(backend): decimal for audiobookshelf progress

* refactor(backend): account for single progress update

* refactor(backend): extract fn for progress update

* feat(backend): calculate exact media progress

* fix(backend): start to handle finished media

* feat(backend): add config parameters for duplicate progress input

* refactor(backend): change memory db type

* feat(backend): handle intgrations with more than 100% progress

* build(backend): add sha deps

* refactor(backend): change order of enums

* feat(backend): nest webhook routes

* refactor(backend): change order of fns

* try(backend): try hashing approach for progress update

* Revert "build(backend): add sha deps"

This reverts commit 29bca95.

* Revert "try(backend): try hashing approach for progress update"

This reverts commit e76ebaf.

* refactor(backend): remove auth db ref from gql schema

* style(backend): apply clippy lints

* build(backend): change TBR version

* refactor(backend): move struct to where it makes sense

* build(backend): add retainer deps

* docs(backend): better explanation of config param

* Revert "Revert "build(backend): add sha deps""

This reverts commit 9811825.

* feat(backend): add cache to service

* Revert "Revert "try(backend): try hashing approach for progress update""

This reverts commit 9c3cf13.

* fix(backend): change type of config param

* feat(backend): handle duplicate progress update cases

* feat(kodi): adapt to new resolver improvements

* Revert "Revert "Revert "build(backend): add sha deps"""

This reverts commit 8bd0bfe.

* build(kodi): bump version
  • Loading branch information
IgnisDa committed Jul 19, 2023
1 parent 76fd5a4 commit 30de721
Show file tree
Hide file tree
Showing 37 changed files with 911 additions and 450 deletions.
43 changes: 42 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ special tool on your computer or phone that lets you keep track of all these dig
## 🚀 Features

-[Supports](https://github.com/IgnisDa/ryot/discussions/4) tracking media
and fitness.
and fitness
- ✅ Import data from Goodreads, MediaTracker, Trakt, Movary, StoryGraph
- ✅ Integration with Kodi, Audiobookshelf
- ✅ Integration with Jellyfin, Kodi, Audiobookshelf
- ✅ Self-hosted
- ✅ PWA enabled
- ✅ Documented GraphQL API
Expand Down
5 changes: 4 additions & 1 deletion apps/backend/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "ryot"
version = "1.13.2"
version = "1.14.0-beta.1"
edition = "2021"
repository = "https://github.com/IgnisDa/ryot"
license = "GPL-V3"
Expand Down Expand Up @@ -29,14 +29,17 @@ dotenvy = "0.15.7"
enum_meta = "0.6.0"
futures = "0.3.28"
graphql_client = "0.13.0"
harsh = "0.2.2"
http = "0.2.9"
http-types = "2.12.0"
isolang = { version = "2.3.0", features = ["list_languages"] }
itertools = "0.10.5"
markdown = "1.0.0-alpha.10"
mime_guess = "2.0.4"
nanoid = "0.4.0"
quick-xml = { version = "0.28.2", features = ["serde", "serialize"] }
regex = "1.8.1"
retainer = "0.3.0"
rust-embed = "6.6.1"
rust_decimal = "1.29.1"
rust_decimal_macros = "1.29.1"
Expand Down
9 changes: 9 additions & 0 deletions apps/backend/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,9 @@ pub struct IntegrationConfig {
/// every `n` hours.
#[setting(default = 2)]
pub pull_every: i32,
/// The salt used to hash user IDs.
#[setting(default = format!("{}", PROJECT_NAME))]
pub hasher_salt: String,
}

impl IsFeatureEnabled for FileStorageConfig {
Expand Down Expand Up @@ -386,6 +389,11 @@ pub struct ServerConfig {
/// are running the server on `localhost`.
/// [More information](https://github.com/IgnisDa/ryot/issues/23)
pub insecure_cookie: bool,
/// The hours in which a media can be marked as seen again for a user. This
/// is used so that the same media can not be used marked as started when
/// it has been already marked as seen in the last `n` hours.
#[setting(default = 2)]
pub progress_update_threshold: u64,
}

#[derive(Debug, Serialize, Deserialize, Clone, Config)]
Expand Down Expand Up @@ -453,6 +461,7 @@ impl AppConfig {
cl.file_storage.s3_access_key_id = gt();
cl.file_storage.s3_secret_access_key = gt();
cl.file_storage.s3_url = gt();
cl.integration.hasher_salt = gt();
cl.movies.tmdb.access_token = gt();
cl.podcasts.listennotes.api_token = gt();
cl.shows.tmdb.access_token = gt();
Expand Down
4 changes: 3 additions & 1 deletion apps/backend/src/entities/user.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize};

use crate::{
migrator::UserLot,
users::{UserPreferences, UserYankIntegrations},
users::{UserPreferences, UserSinkIntegrations, UserYankIntegrations},
};

fn get_hasher() -> Argon2<'static> {
Expand All @@ -33,6 +33,8 @@ pub struct Model {
pub preferences: UserPreferences,
#[graphql(skip)]
pub yank_integrations: Option<UserYankIntegrations>,
#[graphql(skip)]
pub sink_integrations: UserSinkIntegrations,
}

#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
Expand Down
5 changes: 2 additions & 3 deletions apps/backend/src/graphql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::{
fitness::exercise::resolver::{ExerciseMutation, ExerciseQuery},
importer::{ImporterMutation, ImporterQuery},
miscellaneous::resolver::{MiscellaneousMutation, MiscellaneousQuery},
utils::{AppServices, MemoryAuthDb},
utils::AppServices,
};

#[derive(Debug, SimpleObject, Serialize, Deserialize)]
Expand All @@ -21,13 +21,12 @@ pub struct MutationRoot(MiscellaneousMutation, ImporterMutation, ExerciseMutatio

pub type GraphqlSchema = Schema<QueryRoot, MutationRoot, EmptySubscription>;

pub async fn get_schema(app_services: &AppServices, auth_db: MemoryAuthDb) -> GraphqlSchema {
pub async fn get_schema(app_services: &AppServices) -> GraphqlSchema {
Schema::build(
QueryRoot::default(),
MutationRoot::default(),
EmptySubscription,
)
.data(auth_db)
.data(app_services.media_service.clone())
.data(app_services.importer_service.clone())
.data(app_services.exercise_service.clone())
Expand Down
5 changes: 2 additions & 3 deletions apps/backend/src/importer/goodreads.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use async_graphql::Result;
use chrono::{DateTime, Utc};
use itertools::Itertools;
use rust_decimal::{prelude::FromPrimitive, Decimal};
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use serde::{Deserialize, Serialize};

Expand Down Expand Up @@ -79,8 +79,7 @@ pub async fn import(input: DeployGoodreadsImportInput) -> Result<ImportResult> {
let rating: Decimal = d.user_rating.parse().unwrap();
if rating != dec!(0) {
// DEV: Rates items out of 5
single_review.rating =
Some(rating.saturating_mul(Decimal::from_u8(20).unwrap()))
single_review.rating = Some(rating.saturating_mul(dec!(20)))
}
};
if single_review.review.is_some() || single_review.rating.is_some() {
Expand Down
7 changes: 3 additions & 4 deletions apps/backend/src/importer/media_tracker.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
// Responsible for importing from https://github.com/bonukai/MediaTracker.

use async_graphql::Result;
use rust_decimal::{prelude::FromPrimitive, Decimal};
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use sea_orm::prelude::DateTimeUtc;
use serde::{Deserialize, Serialize};
use serde_with::{formats::Flexible, serde_as, TimestampMilliSeconds};
Expand Down Expand Up @@ -295,9 +296,7 @@ pub async fn import(input: DeployMediaTrackerImportInput) -> Result<ImportResult
ImportItemRating {
id: Some(r.id.to_string()),
review,
rating: r
.rating
.map(|d| d.saturating_mul(Decimal::from_u8(20).unwrap())),
rating: r.rating.map(|d| d.saturating_mul(dec!(20))),
}
})),
seen_history: details
Expand Down
25 changes: 14 additions & 11 deletions apps/backend/src/importer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ use crate::{
AddMediaToCollection, CreateOrUpdateCollectionInput, MediaDetails, PostReviewInput,
ProgressUpdateInput,
},
utils::user_id_from_ctx,
traits::AuthProvider,
utils::MemoryDatabase,
};

mod goodreads;
Expand Down Expand Up @@ -170,11 +171,9 @@ impl ImporterQuery {
&self,
gql_ctx: &Context<'_>,
) -> Result<Vec<media_import_report::Model>> {
let user_id = user_id_from_ctx(gql_ctx).await?;
gql_ctx
.data_unchecked::<Arc<ImporterService>>()
.media_import_reports(user_id)
.await
let service = gql_ctx.data_unchecked::<Arc<ImporterService>>();
let user_id = service.user_id_from_ctx(gql_ctx).await?;
service.media_import_reports(user_id).await
}
}

Expand All @@ -189,11 +188,9 @@ impl ImporterMutation {
gql_ctx: &Context<'_>,
input: DeployImportJobInput,
) -> Result<String> {
let user_id = user_id_from_ctx(gql_ctx).await?;
gql_ctx
.data_unchecked::<Arc<ImporterService>>()
.deploy_import_job(user_id, input)
.await
let service = gql_ctx.data_unchecked::<Arc<ImporterService>>();
let user_id = service.user_id_from_ctx(gql_ctx).await?;
service.deploy_import_job(user_id, input).await
}
}

Expand All @@ -203,6 +200,12 @@ pub struct ImporterService {
import_media: SqliteStorage<ImportMedia>,
}

impl AuthProvider for ImporterService {
fn get_auth_db(&self) -> &MemoryDatabase {
self.media_service.get_auth_db()
}
}

impl ImporterService {
#[allow(clippy::too_many_arguments)]
pub fn new(
Expand Down
9 changes: 3 additions & 6 deletions apps/backend/src/importer/movary.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use async_graphql::Result;
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
use csv::Reader;
use rust_decimal::{prelude::FromPrimitive, Decimal};
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use serde::{Deserialize, Serialize};

use crate::{
Expand Down Expand Up @@ -64,11 +65,7 @@ pub async fn import(input: DeployMovaryImportInput) -> Result<ImportResult> {
reviews: vec![ImportItemRating {
id: None,
// DEV: Rates items out of 10
rating: Some(
record
.user_rating
.saturating_mul(Decimal::from_u16(10).unwrap()),
),
rating: Some(record.user_rating.saturating_mul(dec!(10))),
review: None,
}],
collections: vec![],
Expand Down
5 changes: 3 additions & 2 deletions apps/backend/src/importer/story_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
use convert_case::{Case, Casing};
use csv::Reader;
use itertools::Itertools;
use rust_decimal::{prelude::FromPrimitive, Decimal};
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use serde::{Deserialize, Serialize};

use crate::{
Expand Down Expand Up @@ -110,7 +111,7 @@ pub async fn import(
rating: record
.rating
// DEV: Rates items out of 10
.map(|d| d.saturating_mul(Decimal::from_u8(10).unwrap())),
.map(|d| d.saturating_mul(dec!(10))),
review: record.review.map(|r| ImportItemReview {
date: None,
spoiler: false,
Expand Down
Loading

0 comments on commit 30de721

Please sign in to comment.