diff --git a/src/api/update.rs b/src/api/update.rs index e7cdb3c..b06b073 100644 --- a/src/api/update.rs +++ b/src/api/update.rs @@ -2,25 +2,31 @@ use std::sync::Arc; use actix_web::{web, HttpResponse, Responder}; -use crate::db::AppDB; +use crate::{db::AppDB, models::FeedSourceId}; #[derive(serde::Serialize, serde::Deserialize)] -pub struct Url { - url: String, +pub struct UrlInput { + pub url: String, +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct FeedEdit { + pub id: FeedSourceId, + pub new_url: UrlInput, } #[actix_web::post("/feed/add")] -pub async fn add_feed_source(data: web::Data>, body: web::Json) -> impl Responder { +pub async fn add_feed_source(data: web::Data>, body: web::Json) -> impl Responder { let db = data.into_inner(); // TODO(swarnim): validate before adding to db - _ = db.add_feed_source(body.into_inner().url).await; + _ = db.add_feed_source(body.into_inner().url, "rss".into()).await; HttpResponse::Ok() } #[actix_web::post("/feed/remove")] pub async fn remove_feed_source( data: web::Data>, - body: web::Json, + body: web::Json, ) -> impl Responder { let db = data.into_inner(); _ = db.remove_feed_source(body.into_inner().url).await; @@ -28,7 +34,11 @@ pub async fn remove_feed_source( } #[actix_web::post("/feed/edit")] -pub async fn edit_feed_source() -> impl Responder { - // TODO(swarnim): implement correctly +pub async fn edit_feed_source( + data: web::Data>, + body: web::Json, +) -> impl Responder { + let db = data.into_inner(); + _ = db.edit_feed_source(body.into_inner()).await; HttpResponse::Ok() } diff --git a/src/db.rs b/src/db.rs index 82c2948..1ad54c4 100644 --- a/src/db.rs +++ b/src/db.rs @@ -2,7 +2,7 @@ use error_stack::{Result, ResultExt}; use futures_util::{stream::BoxStream, StreamExt, TryStreamExt}; use sqlx::{sqlite::*, Acquire}; -use crate::errors::ApplicationError; +use crate::{errors::ApplicationError, api::update::FeedEdit}; pub struct AppDB(SqlitePool); @@ -38,14 +38,69 @@ impl AppDB { .boxed() } - /// add url to urls table and update it in the feeds table + /// Edit the feed url specified by the associated ID. + /// The old url will be deleted after the feed is updated. + /// + /// TODO(rs): Is this worth having as an upsert? + pub async fn edit_feed_source(&self, edit: FeedEdit) -> Result<(), ApplicationError> { + let mut pool_conn = self + .0 + .acquire() + .await + .change_context(ApplicationError::DatabaseQueryError)?; + + // Just to be safe. + let mut tx = pool_conn + .begin() + .await + .change_context(ApplicationError::DatabaseQueryError)?; + + let old_url = sqlx::query!("SELECT url FROM feeds WHERE id = ?", edit.id) + .fetch_optional(tx.as_mut()) + .await + .change_context(ApplicationError::DatabaseQueryError)?.map(|it| it.url); + + if old_url.is_none() { + return Err(ApplicationError::UnexpectedError("No matching feed for id found.").into()); + } + + let datetime = sqlx::types::time::OffsetDateTime::now_utc().to_string(); + + _ = sqlx::query!("INSERT INTO urls VALUES (?, ?)", edit.new_url.url, "rss") + .execute(tx.as_mut()) + .await + .change_context(ApplicationError::DatabaseQueryError)?; + + _ = sqlx::query!( + "UPDATE feeds SET url = ?, last_checked = ?, last_modified = ? WHERE id = ?", + edit.new_url.url, + datetime, + datetime, + edit.id, + ) + .execute(tx.as_mut()) + .await + .change_context(ApplicationError::DatabaseQueryError)?; + + // we remove it after we change the url, so the delete doesn't cascade. + if let Some(old_url) = old_url { + _ = self.remove_feed_source(old_url).await?; + } + + tx.commit() + .await + .change_context(ApplicationError::DatabaseQueryError) + } + + /// Add url to urls table and update it in the feeds table /// - /// take care to not add duplicate URLs with just slightly different strings, + /// Take care to not add duplicate URLs with just slightly different strings, /// consider handling url simplification before pushing to this DB. /// - /// note: feeds table holds the meta data for all urls, relevant for building a feed - /// this allows for simpler normalization constraints as we grow the application complexity - pub async fn add_feed_source(&self, url: String) -> Result<(), ApplicationError> { + /// Note: the feeds table holds the meta data for all urls, relevant for building a feed. + /// This allows for simpler normalization constraints as we grow the application complexity + /// + pub async fn add_feed_source(&self, url: String, feed_type: String) -> Result<(), ApplicationError> { let mut pool_conn = self .0 .acquire() @@ -56,7 +111,7 @@ impl AppDB { .await .change_context(ApplicationError::DatabaseQueryError)?; - _ = sqlx::query!("INSERT INTO urls VALUES (?, ?)", url, "rss") + _ = sqlx::query!("INSERT INTO urls VALUES (?, ?)", url, feed_type) .execute(tx.as_mut()) .await .change_context(ApplicationError::DatabaseQueryError)?; diff --git a/src/models.rs b/src/models.rs index dabafaf..4ee7ea5 100644 --- a/src/models.rs +++ b/src/models.rs @@ -4,24 +4,32 @@ use serde::{Deserialize, Serialize}; // TODO(swarnim): add support for profiles and users probably, // then add support for UUIDs as well. +pub type FeedSourceId = i64; -pub type FeedSourceId = usize; /// [`Feed`]: is a table with a metadata with id pointing a url. /// ## why? /// allows for adding metadata for urls pub type Feed = HashMap; + #[derive(Debug, Serialize, Deserialize)] pub struct FeedSource { /// feed name pub name: String, /// id to urls - pub url: (String, UrlType), + pub url: Url, /// store time for last checked pub last_checked: String, /// last modified pub last_modified: String, } +#[derive(Debug, Serialize, Deserialize)] +pub struct Url { + pub url: String, + pub urltype: UrlType, +} + + /// [`UrlType`]: useful for identifying amongst url feed source types, like rss vs fediverse /// Unused atm. #[derive(Debug, Default, Serialize, Deserialize)]