Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add feed edit endpoint #2

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 18 additions & 8 deletions src/api/update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,43 @@ use std::sync::Arc;

use actix_web::{web, HttpResponse, Responder};

use crate::db::AppDB;
use crate::{db::AppDB, models::FeedSourceId};

#[derive(serde::Serialize, serde::Deserialize)]
pub struct Url {
url: String,
pub struct UrlInput {
pub url: String,
}

#[derive(serde::Serialize, serde::Deserialize)]
pub struct FeedEdit {
pub id: FeedSourceId,
pub new_url: UrlInput,
}

#[actix_web::post("/feed/add")]
pub async fn add_feed_source(data: web::Data<Arc<AppDB>>, body: web::Json<Url>) -> impl Responder {
pub async fn add_feed_source(data: web::Data<Arc<AppDB>>, body: web::Json<UrlInput>) -> impl Responder {
let db = data.into_inner();
// TODO(swarnim): validate before adding to db
_ = db.add_feed_source(body.into_inner().url).await;
_ = db.add_feed_source(body.into_inner().url, "rss".into()).await;
HttpResponse::Ok()
}

#[actix_web::post("/feed/remove")]
pub async fn remove_feed_source(
data: web::Data<Arc<AppDB>>,
body: web::Json<Url>,
body: web::Json<UrlInput>,
) -> impl Responder {
let db = data.into_inner();
_ = db.remove_feed_source(body.into_inner().url).await;
HttpResponse::Ok()
}

#[actix_web::post("/feed/edit")]
pub async fn edit_feed_source() -> impl Responder {
// TODO(swarnim): implement correctly
pub async fn edit_feed_source(
data: web::Data<Arc<AppDB>>,
body: web::Json<FeedEdit>,
) -> impl Responder {
let db = data.into_inner();
_ = db.edit_feed_source(body.into_inner()).await;
HttpResponse::Ok()
}
69 changes: 62 additions & 7 deletions src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use error_stack::{Result, ResultExt};
use futures_util::{stream::BoxStream, StreamExt, TryStreamExt};
use sqlx::{sqlite::*, Acquire};

use crate::errors::ApplicationError;
use crate::{errors::ApplicationError, api::update::FeedEdit};

pub struct AppDB(SqlitePool);

Expand Down Expand Up @@ -38,14 +38,69 @@ impl AppDB {
.boxed()
}

/// add url to urls table and update it in the feeds table
/// Edit the feed url specified by the associated ID.
/// The old url will be deleted after the feed is updated.
///
/// TODO(rs): Is this worth having as an upsert?
pub async fn edit_feed_source(&self, edit: FeedEdit) -> Result<(), ApplicationError> {
let mut pool_conn = self
.0
.acquire()
.await
.change_context(ApplicationError::DatabaseQueryError)?;

// Just to be safe.
let mut tx = pool_conn
.begin()
.await
.change_context(ApplicationError::DatabaseQueryError)?;

let old_url = sqlx::query!("SELECT url FROM feeds WHERE id = ?", edit.id)
.fetch_optional(tx.as_mut())
.await
.change_context(ApplicationError::DatabaseQueryError)?.map(|it| it.url);

if old_url.is_none() {
return Err(ApplicationError::UnexpectedError("No matching feed for id found.").into());
}

let datetime = sqlx::types::time::OffsetDateTime::now_utc().to_string();

_ = sqlx::query!("INSERT INTO urls VALUES (?, ?)", edit.new_url.url, "rss")
.execute(tx.as_mut())
.await
.change_context(ApplicationError::DatabaseQueryError)?;

_ = sqlx::query!(
"UPDATE feeds SET url = ?, last_checked = ?, last_modified = ? WHERE id = ?",
edit.new_url.url,
datetime,
datetime,
edit.id,
)
.execute(tx.as_mut())
.await
.change_context(ApplicationError::DatabaseQueryError)?;

// we remove it after we change the url, so the delete doesn't cascade.
if let Some(old_url) = old_url {
_ = self.remove_feed_source(old_url).await?;
}

tx.commit()
.await
.change_context(ApplicationError::DatabaseQueryError)
}

/// Add url to urls table and update it in the feeds table
///
/// take care to not add duplicate URLs with just slightly different strings,
/// Take care to not add duplicate URLs with just slightly different strings,
/// consider handling url simplification before pushing to this DB.
///
/// note: feeds table holds the meta data for all urls, relevant for building a feed
/// this allows for simpler normalization constraints as we grow the application complexity
pub async fn add_feed_source(&self, url: String) -> Result<(), ApplicationError> {
/// Note: the feeds table holds the meta data for all urls, relevant for building a feed.
/// This allows for simpler normalization constraints as we grow the application complexity
///
pub async fn add_feed_source(&self, url: String, feed_type: String) -> Result<(), ApplicationError> {
let mut pool_conn = self
.0
.acquire()
Expand All @@ -56,7 +111,7 @@ impl AppDB {
.await
.change_context(ApplicationError::DatabaseQueryError)?;

_ = sqlx::query!("INSERT INTO urls VALUES (?, ?)", url, "rss")
_ = sqlx::query!("INSERT INTO urls VALUES (?, ?)", url, feed_type)
.execute(tx.as_mut())
.await
.change_context(ApplicationError::DatabaseQueryError)?;
Expand Down
12 changes: 10 additions & 2 deletions src/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,32 @@ use serde::{Deserialize, Serialize};

// TODO(swarnim): add support for profiles and users probably,
// then add support for UUIDs as well.
pub type FeedSourceId = i64;

pub type FeedSourceId = usize;
/// [`Feed`]: is a table with a metadata with id pointing a url.
/// ## why?
/// allows for adding metadata for urls
pub type Feed = HashMap<FeedSourceId, FeedSource>;

#[derive(Debug, Serialize, Deserialize)]
pub struct FeedSource {
/// feed name
pub name: String,
/// id to urls
pub url: (String, UrlType),
pub url: Url,
/// store time for last checked
pub last_checked: String,
/// last modified
pub last_modified: String,
}

#[derive(Debug, Serialize, Deserialize)]
pub struct Url {
pub url: String,
pub urltype: UrlType,
}


/// [`UrlType`]: useful for identifying amongst url feed source types, like rss vs fediverse
/// Unused atm.
#[derive(Debug, Default, Serialize, Deserialize)]
Expand Down