Change get articles to read from database instead of dummy data.
parent
c8ca91e90b
commit
3d77c6f30f
|
@ -2,11 +2,11 @@ use actix_web::{HttpResponse, Responder};
|
|||
use reqwest::StatusCode;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::reader::structs::feed::Feed;
|
||||
use crate::reader::structs::feed::FeedAggregate;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Articles {
|
||||
pub feeds: Vec<Feed>,
|
||||
pub feeds: Vec<FeedAggregate>,
|
||||
}
|
||||
|
||||
impl Responder for Articles {
|
||||
|
|
|
@ -3,3 +3,4 @@ pub mod login;
|
|||
pub mod new_feed;
|
||||
pub mod new_feed_item;
|
||||
pub mod new_user;
|
||||
pub mod user;
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
use serde_derive::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct JsonUser {
|
||||
pub user_id: i32,
|
||||
}
|
|
@ -1,2 +1,2 @@
|
|||
pub mod new_feed_item;
|
||||
mod rss_feed_item;
|
||||
pub mod rss_feed_item;
|
||||
|
|
|
@ -1,15 +1,35 @@
|
|||
use crate::models::feed::rss_feed::Feed;
|
||||
use diesel::{Associations, Identifiable, Queryable};
|
||||
|
||||
use crate::schema::feed_item;
|
||||
|
||||
#[derive(Clone, Queryable, Identifiable, Associations)]
|
||||
#[diesel(belongs_to(Feed))]
|
||||
#[diesel(table_name=feed_item)]
|
||||
pub struct Feed {
|
||||
pub struct FeedItem {
|
||||
pub id: i32,
|
||||
pub feed_id: i32,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
pub content: String,
|
||||
pub read: bool,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
impl FeedItem {
|
||||
pub fn new(
|
||||
id: i32,
|
||||
feed_id: i32,
|
||||
title: String,
|
||||
url: String,
|
||||
content: String,
|
||||
read: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
feed_id,
|
||||
title,
|
||||
url,
|
||||
content,
|
||||
read,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,6 @@ use rss::Channel;
|
|||
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
||||
let content = reqwest::get(feed).await?.bytes().await?;
|
||||
let channel = Channel::read_from(&content[..])?;
|
||||
log::info!("{:?}", channel);
|
||||
log::debug!("{:?}", channel);
|
||||
Ok(channel)
|
||||
}
|
||||
|
|
|
@ -1,44 +1,101 @@
|
|||
use crate::{auth::jwt::JwtToken, reader::feeds, json_serialization::articles::Articles};
|
||||
use actix_web::{HttpRequest, Responder};
|
||||
use scraper::{Html, Selector };
|
||||
use crate::json_serialization::user::JsonUser;
|
||||
use crate::models::feed::rss_feed::Feed;
|
||||
use crate::models::feed_item::rss_feed_item::FeedItem;
|
||||
use crate::reader::structs::feed::FeedAggregate;
|
||||
use crate::schema::feed_item::{feed_id, read};
|
||||
use crate::{
|
||||
auth::jwt::JwtToken,
|
||||
database::establish_connection,
|
||||
json_serialization::articles::Articles,
|
||||
schema::feed::{self, user_id},
|
||||
schema::feed_item,
|
||||
};
|
||||
use actix_web::{web, HttpRequest, Responder};
|
||||
use diesel::prelude::*;
|
||||
|
||||
use super::structs::{article::Article, feed::Feed};
|
||||
use super::structs::article::Article;
|
||||
|
||||
pub async fn get(req: HttpRequest) -> impl Responder {
|
||||
pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder {
|
||||
let request = req.clone();
|
||||
let _token: JwtToken = JwtToken::decode_from_request(req).unwrap();
|
||||
|
||||
let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf").await.unwrap();
|
||||
let req_user_id = path.user_id;
|
||||
log::info!("Received user_id: {}", req_user_id);
|
||||
|
||||
let feed_title: String = feed.title.clone();
|
||||
let feed_items: Vec<Article> = feed.into_items().into_iter().map(|item| {
|
||||
let title = item.title.unwrap();
|
||||
let frag = Html::parse_fragment(&item.content.unwrap());
|
||||
let mut content = "".to_string();
|
||||
let frag_clone = frag.clone();
|
||||
frag.tree.into_iter().for_each(|node| {
|
||||
let selector_img = Selector::parse("img").unwrap();
|
||||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
let feeds: Vec<Feed> = feed::table
|
||||
.filter(user_id.eq(req_user_id))
|
||||
.load::<Feed>(&mut connection)
|
||||
.unwrap();
|
||||
// let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf")
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
for element in frag_clone.select(&selector_img) {
|
||||
if !content.starts_with("<img") {
|
||||
content.push_str(&element.html());
|
||||
content.push_str("<br>")
|
||||
}
|
||||
}
|
||||
if let scraper::node::Node::Text(text) = node {
|
||||
content.push_str(&text.text);
|
||||
}
|
||||
let mut feed_aggregates: Vec<FeedAggregate> = Vec::new();
|
||||
for feed in feeds {
|
||||
let existing_item: Vec<FeedItem> = feed_item::table
|
||||
.filter(feed_id.eq(feed.id))
|
||||
.filter(read.eq(false))
|
||||
.load(&mut connection)
|
||||
.unwrap();
|
||||
|
||||
});
|
||||
Article {
|
||||
title,
|
||||
content,
|
||||
}
|
||||
} ).collect();
|
||||
log::info!(
|
||||
"Load {} feed items for feed: {}",
|
||||
existing_item.len(),
|
||||
feed.url
|
||||
);
|
||||
|
||||
let feeds = vec![(Feed {title: feed_title, items: feed_items})];
|
||||
let article_list: Vec<Article> = existing_item
|
||||
.into_iter()
|
||||
.map(|feed_item: FeedItem| Article {
|
||||
title: feed_item.title,
|
||||
content: feed_item.content,
|
||||
})
|
||||
.collect();
|
||||
|
||||
log::info!("article list with {} items generated.", article_list.len());
|
||||
|
||||
feed_aggregates.push(FeedAggregate {
|
||||
title: feed.title,
|
||||
items: article_list,
|
||||
})
|
||||
}
|
||||
// let feed_title: String = feed.title.clone();
|
||||
// let feed_items: Vec<Article> = feed
|
||||
// .into_items()
|
||||
// .into_iter()
|
||||
// .map(|item| {
|
||||
// let title = item.title.unwrap();
|
||||
// let frag = Html::parse_fragment(&item.content.unwrap());
|
||||
// let mut content = "".to_string();
|
||||
// let frag_clone = frag.clone();
|
||||
// frag.tree.into_iter().for_each(|node| {
|
||||
// let selector_img = Selector::parse("img").unwrap();
|
||||
//
|
||||
// for element in frag_clone.select(&selector_img) {
|
||||
// if !content.starts_with("<img") {
|
||||
// content.push_str(&element.html());
|
||||
// content.push_str("<br>")
|
||||
// }
|
||||
// }
|
||||
// if let scraper::node::Node::Text(text) = node {
|
||||
// content.push_str(&text.text);
|
||||
// }
|
||||
// });
|
||||
// Article { title, content }
|
||||
// })
|
||||
// .collect();
|
||||
//
|
||||
// let feed_aggregates = vec![
|
||||
// (FeedAggregate {
|
||||
// title: feed_title,
|
||||
// items: feed_items,
|
||||
// }),
|
||||
// ];
|
||||
|
||||
let articles: Articles = Articles {
|
||||
feeds: feed_aggregates,
|
||||
};
|
||||
|
||||
let articles: Articles = Articles { feeds };
|
||||
|
||||
articles.respond_to(&request)
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ pub fn feed_factory(app: &mut web::ServiceConfig) {
|
|||
backend: true,
|
||||
};
|
||||
app.route(
|
||||
&base_path.define(String::from("/get")),
|
||||
&base_path.define(String::from("/get/{user_id}")),
|
||||
web::get().to(get::get),
|
||||
);
|
||||
app.route(
|
||||
|
|
|
@ -3,7 +3,7 @@ use serde::Serialize;
|
|||
use super::article::Article;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Feed {
|
||||
pub struct FeedAggregate {
|
||||
pub title: String,
|
||||
pub items: Vec<Article>,
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use super::feeds;
|
||||
use crate::json_serialization::user::JsonUser;
|
||||
use crate::models::feed::rss_feed::Feed;
|
||||
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
||||
use crate::models::feed_item::rss_feed_item::FeedItem;
|
||||
use crate::schema::feed_item::{feed_id, title};
|
||||
use crate::{
|
||||
database::establish_connection,
|
||||
schema::{
|
||||
|
@ -12,15 +15,9 @@ use actix_web::{web, HttpRequest, HttpResponse, Responder};
|
|||
use diesel::prelude::*;
|
||||
use rss::Item;
|
||||
use scraper::{Html, Selector};
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct JsonUser {
|
||||
user_id: String,
|
||||
}
|
||||
|
||||
fn create_feed_item(item: Item, feed: &Feed) {
|
||||
let title = item.title.unwrap();
|
||||
fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) {
|
||||
let item_title = item.title.unwrap();
|
||||
let frag = Html::parse_fragment(&item.content.unwrap());
|
||||
let mut content = "".to_string();
|
||||
let frag_clone = frag.clone();
|
||||
|
@ -38,20 +35,33 @@ fn create_feed_item(item: Item, feed: &Feed) {
|
|||
}
|
||||
});
|
||||
|
||||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
let new_feed_item =
|
||||
NewFeedItem::new(feed.id, content.clone(), title.clone(), item.link.unwrap());
|
||||
let insert_result = diesel::insert_into(feed_item::table)
|
||||
.values(&new_feed_item)
|
||||
.execute(&mut connection);
|
||||
let existing_item: Vec<FeedItem> = feed_item::table
|
||||
.filter(feed_id.eq(feed.id))
|
||||
.filter(title.eq(&item_title))
|
||||
.load(connection)
|
||||
.unwrap();
|
||||
|
||||
log::info!("{:?}", insert_result);
|
||||
if existing_item.is_empty() {
|
||||
let new_feed_item = NewFeedItem::new(
|
||||
feed.id,
|
||||
content.clone(),
|
||||
item_title.clone(),
|
||||
item.link.unwrap(),
|
||||
);
|
||||
let insert_result = diesel::insert_into(feed_item::table)
|
||||
.values(&new_feed_item)
|
||||
.execute(connection);
|
||||
|
||||
log::info!("Insert Result: {:?}", insert_result);
|
||||
} else {
|
||||
log::info!("Item {} already exists.", feed.title);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responder {
|
||||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
|
||||
let req_user_id = data.user_id.parse::<i32>().unwrap();
|
||||
let req_user_id: i32 = data.user_id;
|
||||
|
||||
let feeds: Vec<Feed> = feed::table
|
||||
.filter(user_id.eq(req_user_id))
|
||||
|
@ -67,7 +77,7 @@ pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responde
|
|||
match result {
|
||||
Ok(channel) => {
|
||||
for item in channel.into_items() {
|
||||
create_feed_item(item, &feed)
|
||||
create_feed_item(item, &feed, &mut connection)
|
||||
}
|
||||
}
|
||||
Err(e) => log::error!("Could not get channel {}. Error: {}", feed.url, e),
|
||||
|
|
|
@ -6,8 +6,9 @@ const feeds = ref([]);
|
|||
const buttonText = 'Sync'
|
||||
|
||||
const fetchData = async () => {
|
||||
const user_id = localStorage.getItem("user-id")
|
||||
try {
|
||||
const response = await axios.get('feeds/get', {
|
||||
const response = await axios.get("feeds/get/" + user_id, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'user-token': localStorage.getItem("user-token")
|
||||
|
@ -22,7 +23,7 @@ const fetchData = async () => {
|
|||
async function sync() {
|
||||
try {
|
||||
const repsponse = await axios.post('feeds/sync', {
|
||||
user_id: localStorage.getItem("user-id")
|
||||
user_id: 1 //localStorage.getItem("user-id")
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
|
@ -46,6 +47,7 @@ onMounted(() => {
|
|||
<div>
|
||||
<h1>Feeds</h1> <button @click="sync">{{ buttonText }}</button>
|
||||
<div id='aricle'>
|
||||
<p v-if="feeds.length == 0">No unread articles.</p>
|
||||
<template v-for="feed in feeds">
|
||||
<h2>{{ feed.title }}</h2>
|
||||
<p v-html='feed.content'></p>
|
||||
|
|
Loading…
Reference in New Issue