From 3d77c6f30f7744c1002ab44f892cda806914cbe8 Mon Sep 17 00:00:00 2001 From: mace Date: Sat, 14 Oct 2023 18:32:49 +0200 Subject: [PATCH] Change get articles to read from database instead of dummy data. --- src/json_serialization/articles.rs | 4 +- src/json_serialization/mod.rs | 1 + src/json_serialization/user.rs | 6 ++ src/models/feed_item/mod.rs | 2 +- src/models/feed_item/rss_feed_item.rs | 28 +++++- src/reader/feeds.rs | 2 +- src/reader/get.rs | 121 +++++++++++++++++++------- src/reader/mod.rs | 2 +- src/reader/structs/feed.rs | 2 +- src/reader/sync.rs | 44 ++++++---- vue/src/components/RssFeeds.vue | 6 +- 11 files changed, 157 insertions(+), 61 deletions(-) create mode 100644 src/json_serialization/user.rs diff --git a/src/json_serialization/articles.rs b/src/json_serialization/articles.rs index 3a79fd9..ad6bde1 100755 --- a/src/json_serialization/articles.rs +++ b/src/json_serialization/articles.rs @@ -2,11 +2,11 @@ use actix_web::{HttpResponse, Responder}; use reqwest::StatusCode; use serde::Serialize; -use crate::reader::structs::feed::Feed; +use crate::reader::structs::feed::FeedAggregate; #[derive(Serialize)] pub struct Articles { - pub feeds: Vec, + pub feeds: Vec, } impl Responder for Articles { diff --git a/src/json_serialization/mod.rs b/src/json_serialization/mod.rs index c19504e..6beb73a 100755 --- a/src/json_serialization/mod.rs +++ b/src/json_serialization/mod.rs @@ -3,3 +3,4 @@ pub mod login; pub mod new_feed; pub mod new_feed_item; pub mod new_user; +pub mod user; diff --git a/src/json_serialization/user.rs b/src/json_serialization/user.rs new file mode 100644 index 0000000..4ea75b4 --- /dev/null +++ b/src/json_serialization/user.rs @@ -0,0 +1,6 @@ +use serde_derive::Deserialize; + +#[derive(Deserialize)] +pub struct JsonUser { + pub user_id: i32, +} diff --git a/src/models/feed_item/mod.rs b/src/models/feed_item/mod.rs index 37d1c31..e5412b6 100755 --- a/src/models/feed_item/mod.rs +++ b/src/models/feed_item/mod.rs @@ -1,2 +1,2 @@ pub mod new_feed_item; -mod rss_feed_item; +pub mod rss_feed_item; diff --git a/src/models/feed_item/rss_feed_item.rs b/src/models/feed_item/rss_feed_item.rs index fbf056e..3168c6a 100755 --- a/src/models/feed_item/rss_feed_item.rs +++ b/src/models/feed_item/rss_feed_item.rs @@ -1,15 +1,35 @@ +use crate::models::feed::rss_feed::Feed; use diesel::{Associations, Identifiable, Queryable}; use crate::schema::feed_item; - #[derive(Clone, Queryable, Identifiable, Associations)] #[diesel(belongs_to(Feed))] #[diesel(table_name=feed_item)] -pub struct Feed { +pub struct FeedItem { pub id: i32, pub feed_id: i32, - pub title: String, - pub url: String, pub content: String, pub read: bool, + pub title: String, + pub url: String, +} + +impl FeedItem { + pub fn new( + id: i32, + feed_id: i32, + title: String, + url: String, + content: String, + read: bool, + ) -> Self { + Self { + id, + feed_id, + title, + url, + content, + read, + } + } } diff --git a/src/reader/feeds.rs b/src/reader/feeds.rs index cb77619..5157e51 100755 --- a/src/reader/feeds.rs +++ b/src/reader/feeds.rs @@ -5,6 +5,6 @@ use rss::Channel; pub async fn get_feed(feed: &str) -> Result> { let content = reqwest::get(feed).await?.bytes().await?; let channel = Channel::read_from(&content[..])?; - log::info!("{:?}", channel); + log::debug!("{:?}", channel); Ok(channel) } diff --git a/src/reader/get.rs b/src/reader/get.rs index e234e42..199246d 100755 --- a/src/reader/get.rs +++ b/src/reader/get.rs @@ -1,44 +1,101 @@ -use crate::{auth::jwt::JwtToken, reader::feeds, json_serialization::articles::Articles}; -use actix_web::{HttpRequest, Responder}; -use scraper::{Html, Selector }; +use crate::json_serialization::user::JsonUser; +use crate::models::feed::rss_feed::Feed; +use crate::models::feed_item::rss_feed_item::FeedItem; +use crate::reader::structs::feed::FeedAggregate; +use crate::schema::feed_item::{feed_id, read}; +use crate::{ + auth::jwt::JwtToken, + database::establish_connection, + json_serialization::articles::Articles, + schema::feed::{self, user_id}, + schema::feed_item, +}; +use actix_web::{web, HttpRequest, Responder}; +use diesel::prelude::*; -use super::structs::{article::Article, feed::Feed}; +use super::structs::article::Article; -pub async fn get(req: HttpRequest) -> impl Responder { +pub async fn get(path: web::Path, req: HttpRequest) -> impl Responder { let request = req.clone(); let _token: JwtToken = JwtToken::decode_from_request(req).unwrap(); - let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf").await.unwrap(); + let req_user_id = path.user_id; + log::info!("Received user_id: {}", req_user_id); - let feed_title: String = feed.title.clone(); - let feed_items: Vec
= feed.into_items().into_iter().map(|item| { - let title = item.title.unwrap(); - let frag = Html::parse_fragment(&item.content.unwrap()); - let mut content = "".to_string(); - let frag_clone = frag.clone(); - frag.tree.into_iter().for_each(|node| { - let selector_img = Selector::parse("img").unwrap(); + let mut connection: diesel::PgConnection = establish_connection(); + let feeds: Vec = feed::table + .filter(user_id.eq(req_user_id)) + .load::(&mut connection) + .unwrap(); + // let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf") + // .await + // .unwrap(); - for element in frag_clone.select(&selector_img) { - if !content.starts_with("") - } - } - if let scraper::node::Node::Text(text) = node { - content.push_str(&text.text); - } + let mut feed_aggregates: Vec = Vec::new(); + for feed in feeds { + let existing_item: Vec = feed_item::table + .filter(feed_id.eq(feed.id)) + .filter(read.eq(false)) + .load(&mut connection) + .unwrap(); - }); - Article { - title, - content, - } - } ).collect(); + log::info!( + "Load {} feed items for feed: {}", + existing_item.len(), + feed.url + ); - let feeds = vec![(Feed {title: feed_title, items: feed_items})]; + let article_list: Vec
= existing_item + .into_iter() + .map(|feed_item: FeedItem| Article { + title: feed_item.title, + content: feed_item.content, + }) + .collect(); + + log::info!("article list with {} items generated.", article_list.len()); + + feed_aggregates.push(FeedAggregate { + title: feed.title, + items: article_list, + }) + } + // let feed_title: String = feed.title.clone(); + // let feed_items: Vec
= feed + // .into_items() + // .into_iter() + // .map(|item| { + // let title = item.title.unwrap(); + // let frag = Html::parse_fragment(&item.content.unwrap()); + // let mut content = "".to_string(); + // let frag_clone = frag.clone(); + // frag.tree.into_iter().for_each(|node| { + // let selector_img = Selector::parse("img").unwrap(); + // + // for element in frag_clone.select(&selector_img) { + // if !content.starts_with("") + // } + // } + // if let scraper::node::Node::Text(text) = node { + // content.push_str(&text.text); + // } + // }); + // Article { title, content } + // }) + // .collect(); + // + // let feed_aggregates = vec![ + // (FeedAggregate { + // title: feed_title, + // items: feed_items, + // }), + // ]; + + let articles: Articles = Articles { + feeds: feed_aggregates, + }; - let articles: Articles = Articles { feeds }; - articles.respond_to(&request) } diff --git a/src/reader/mod.rs b/src/reader/mod.rs index 835e09a..15e2ade 100755 --- a/src/reader/mod.rs +++ b/src/reader/mod.rs @@ -13,7 +13,7 @@ pub fn feed_factory(app: &mut web::ServiceConfig) { backend: true, }; app.route( - &base_path.define(String::from("/get")), + &base_path.define(String::from("/get/{user_id}")), web::get().to(get::get), ); app.route( diff --git a/src/reader/structs/feed.rs b/src/reader/structs/feed.rs index edbf681..0394385 100644 --- a/src/reader/structs/feed.rs +++ b/src/reader/structs/feed.rs @@ -3,7 +3,7 @@ use serde::Serialize; use super::article::Article; #[derive(Serialize)] -pub struct Feed { +pub struct FeedAggregate { pub title: String, pub items: Vec
, } diff --git a/src/reader/sync.rs b/src/reader/sync.rs index aa22b3c..0324072 100644 --- a/src/reader/sync.rs +++ b/src/reader/sync.rs @@ -1,6 +1,9 @@ use super::feeds; +use crate::json_serialization::user::JsonUser; use crate::models::feed::rss_feed::Feed; use crate::models::feed_item::new_feed_item::NewFeedItem; +use crate::models::feed_item::rss_feed_item::FeedItem; +use crate::schema::feed_item::{feed_id, title}; use crate::{ database::establish_connection, schema::{ @@ -12,15 +15,9 @@ use actix_web::{web, HttpRequest, HttpResponse, Responder}; use diesel::prelude::*; use rss::Item; use scraper::{Html, Selector}; -use serde_derive::Deserialize; -#[derive(Deserialize)] -pub struct JsonUser { - user_id: String, -} - -fn create_feed_item(item: Item, feed: &Feed) { - let title = item.title.unwrap(); +fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) { + let item_title = item.title.unwrap(); let frag = Html::parse_fragment(&item.content.unwrap()); let mut content = "".to_string(); let frag_clone = frag.clone(); @@ -38,20 +35,33 @@ fn create_feed_item(item: Item, feed: &Feed) { } }); - let mut connection: diesel::PgConnection = establish_connection(); - let new_feed_item = - NewFeedItem::new(feed.id, content.clone(), title.clone(), item.link.unwrap()); - let insert_result = diesel::insert_into(feed_item::table) - .values(&new_feed_item) - .execute(&mut connection); + let existing_item: Vec = feed_item::table + .filter(feed_id.eq(feed.id)) + .filter(title.eq(&item_title)) + .load(connection) + .unwrap(); - log::info!("{:?}", insert_result); + if existing_item.is_empty() { + let new_feed_item = NewFeedItem::new( + feed.id, + content.clone(), + item_title.clone(), + item.link.unwrap(), + ); + let insert_result = diesel::insert_into(feed_item::table) + .values(&new_feed_item) + .execute(connection); + + log::info!("Insert Result: {:?}", insert_result); + } else { + log::info!("Item {} already exists.", feed.title); + } } pub async fn sync(_req: HttpRequest, data: web::Json) -> impl Responder { let mut connection: diesel::PgConnection = establish_connection(); - let req_user_id = data.user_id.parse::().unwrap(); + let req_user_id: i32 = data.user_id; let feeds: Vec = feed::table .filter(user_id.eq(req_user_id)) @@ -67,7 +77,7 @@ pub async fn sync(_req: HttpRequest, data: web::Json) -> impl Responde match result { Ok(channel) => { for item in channel.into_items() { - create_feed_item(item, &feed) + create_feed_item(item, &feed, &mut connection) } } Err(e) => log::error!("Could not get channel {}. Error: {}", feed.url, e), diff --git a/vue/src/components/RssFeeds.vue b/vue/src/components/RssFeeds.vue index 33d03e7..7cd1fa8 100644 --- a/vue/src/components/RssFeeds.vue +++ b/vue/src/components/RssFeeds.vue @@ -6,8 +6,9 @@ const feeds = ref([]); const buttonText = 'Sync' const fetchData = async () => { + const user_id = localStorage.getItem("user-id") try { - const response = await axios.get('feeds/get', { + const response = await axios.get("feeds/get/" + user_id, { headers: { 'Content-Type': 'application/json', 'user-token': localStorage.getItem("user-token") @@ -22,7 +23,7 @@ const fetchData = async () => { async function sync() { try { const repsponse = await axios.post('feeds/sync', { - user_id: localStorage.getItem("user-id") + user_id: 1 //localStorage.getItem("user-id") }, { headers: { @@ -46,6 +47,7 @@ onMounted(() => {

Feeds

+

No unread articles.