Change get articles to read from database instead of dummy data.
parent
c8ca91e90b
commit
3d77c6f30f
|
@ -2,11 +2,11 @@ use actix_web::{HttpResponse, Responder};
|
||||||
use reqwest::StatusCode;
|
use reqwest::StatusCode;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::reader::structs::feed::Feed;
|
use crate::reader::structs::feed::FeedAggregate;
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct Articles {
|
pub struct Articles {
|
||||||
pub feeds: Vec<Feed>,
|
pub feeds: Vec<FeedAggregate>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Responder for Articles {
|
impl Responder for Articles {
|
||||||
|
|
|
@ -3,3 +3,4 @@ pub mod login;
|
||||||
pub mod new_feed;
|
pub mod new_feed;
|
||||||
pub mod new_feed_item;
|
pub mod new_feed_item;
|
||||||
pub mod new_user;
|
pub mod new_user;
|
||||||
|
pub mod user;
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
use serde_derive::Deserialize;
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct JsonUser {
|
||||||
|
pub user_id: i32,
|
||||||
|
}
|
|
@ -1,2 +1,2 @@
|
||||||
pub mod new_feed_item;
|
pub mod new_feed_item;
|
||||||
mod rss_feed_item;
|
pub mod rss_feed_item;
|
||||||
|
|
|
@ -1,15 +1,35 @@
|
||||||
|
use crate::models::feed::rss_feed::Feed;
|
||||||
use diesel::{Associations, Identifiable, Queryable};
|
use diesel::{Associations, Identifiable, Queryable};
|
||||||
|
|
||||||
use crate::schema::feed_item;
|
use crate::schema::feed_item;
|
||||||
|
|
||||||
#[derive(Clone, Queryable, Identifiable, Associations)]
|
#[derive(Clone, Queryable, Identifiable, Associations)]
|
||||||
#[diesel(belongs_to(Feed))]
|
#[diesel(belongs_to(Feed))]
|
||||||
#[diesel(table_name=feed_item)]
|
#[diesel(table_name=feed_item)]
|
||||||
pub struct Feed {
|
pub struct FeedItem {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub feed_id: i32,
|
pub feed_id: i32,
|
||||||
pub title: String,
|
|
||||||
pub url: String,
|
|
||||||
pub content: String,
|
pub content: String,
|
||||||
pub read: bool,
|
pub read: bool,
|
||||||
|
pub title: String,
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FeedItem {
|
||||||
|
pub fn new(
|
||||||
|
id: i32,
|
||||||
|
feed_id: i32,
|
||||||
|
title: String,
|
||||||
|
url: String,
|
||||||
|
content: String,
|
||||||
|
read: bool,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
id,
|
||||||
|
feed_id,
|
||||||
|
title,
|
||||||
|
url,
|
||||||
|
content,
|
||||||
|
read,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,6 @@ use rss::Channel;
|
||||||
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
||||||
let content = reqwest::get(feed).await?.bytes().await?;
|
let content = reqwest::get(feed).await?.bytes().await?;
|
||||||
let channel = Channel::read_from(&content[..])?;
|
let channel = Channel::read_from(&content[..])?;
|
||||||
log::info!("{:?}", channel);
|
log::debug!("{:?}", channel);
|
||||||
Ok(channel)
|
Ok(channel)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,44 +1,101 @@
|
||||||
use crate::{auth::jwt::JwtToken, reader::feeds, json_serialization::articles::Articles};
|
use crate::json_serialization::user::JsonUser;
|
||||||
use actix_web::{HttpRequest, Responder};
|
use crate::models::feed::rss_feed::Feed;
|
||||||
use scraper::{Html, Selector };
|
use crate::models::feed_item::rss_feed_item::FeedItem;
|
||||||
|
use crate::reader::structs::feed::FeedAggregate;
|
||||||
|
use crate::schema::feed_item::{feed_id, read};
|
||||||
|
use crate::{
|
||||||
|
auth::jwt::JwtToken,
|
||||||
|
database::establish_connection,
|
||||||
|
json_serialization::articles::Articles,
|
||||||
|
schema::feed::{self, user_id},
|
||||||
|
schema::feed_item,
|
||||||
|
};
|
||||||
|
use actix_web::{web, HttpRequest, Responder};
|
||||||
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use super::structs::{article::Article, feed::Feed};
|
use super::structs::article::Article;
|
||||||
|
|
||||||
pub async fn get(req: HttpRequest) -> impl Responder {
|
pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder {
|
||||||
let request = req.clone();
|
let request = req.clone();
|
||||||
let _token: JwtToken = JwtToken::decode_from_request(req).unwrap();
|
let _token: JwtToken = JwtToken::decode_from_request(req).unwrap();
|
||||||
|
|
||||||
let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf").await.unwrap();
|
let req_user_id = path.user_id;
|
||||||
|
log::info!("Received user_id: {}", req_user_id);
|
||||||
|
|
||||||
let feed_title: String = feed.title.clone();
|
let mut connection: diesel::PgConnection = establish_connection();
|
||||||
let feed_items: Vec<Article> = feed.into_items().into_iter().map(|item| {
|
let feeds: Vec<Feed> = feed::table
|
||||||
let title = item.title.unwrap();
|
.filter(user_id.eq(req_user_id))
|
||||||
let frag = Html::parse_fragment(&item.content.unwrap());
|
.load::<Feed>(&mut connection)
|
||||||
let mut content = "".to_string();
|
.unwrap();
|
||||||
let frag_clone = frag.clone();
|
// let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf")
|
||||||
frag.tree.into_iter().for_each(|node| {
|
// .await
|
||||||
let selector_img = Selector::parse("img").unwrap();
|
// .unwrap();
|
||||||
|
|
||||||
for element in frag_clone.select(&selector_img) {
|
let mut feed_aggregates: Vec<FeedAggregate> = Vec::new();
|
||||||
if !content.starts_with("<img") {
|
for feed in feeds {
|
||||||
content.push_str(&element.html());
|
let existing_item: Vec<FeedItem> = feed_item::table
|
||||||
content.push_str("<br>")
|
.filter(feed_id.eq(feed.id))
|
||||||
}
|
.filter(read.eq(false))
|
||||||
}
|
.load(&mut connection)
|
||||||
if let scraper::node::Node::Text(text) = node {
|
.unwrap();
|
||||||
content.push_str(&text.text);
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
log::info!(
|
||||||
Article {
|
"Load {} feed items for feed: {}",
|
||||||
title,
|
existing_item.len(),
|
||||||
content,
|
feed.url
|
||||||
}
|
);
|
||||||
} ).collect();
|
|
||||||
|
|
||||||
let feeds = vec![(Feed {title: feed_title, items: feed_items})];
|
let article_list: Vec<Article> = existing_item
|
||||||
|
.into_iter()
|
||||||
|
.map(|feed_item: FeedItem| Article {
|
||||||
|
title: feed_item.title,
|
||||||
|
content: feed_item.content,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
log::info!("article list with {} items generated.", article_list.len());
|
||||||
|
|
||||||
|
feed_aggregates.push(FeedAggregate {
|
||||||
|
title: feed.title,
|
||||||
|
items: article_list,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// let feed_title: String = feed.title.clone();
|
||||||
|
// let feed_items: Vec<Article> = feed
|
||||||
|
// .into_items()
|
||||||
|
// .into_iter()
|
||||||
|
// .map(|item| {
|
||||||
|
// let title = item.title.unwrap();
|
||||||
|
// let frag = Html::parse_fragment(&item.content.unwrap());
|
||||||
|
// let mut content = "".to_string();
|
||||||
|
// let frag_clone = frag.clone();
|
||||||
|
// frag.tree.into_iter().for_each(|node| {
|
||||||
|
// let selector_img = Selector::parse("img").unwrap();
|
||||||
|
//
|
||||||
|
// for element in frag_clone.select(&selector_img) {
|
||||||
|
// if !content.starts_with("<img") {
|
||||||
|
// content.push_str(&element.html());
|
||||||
|
// content.push_str("<br>")
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// if let scraper::node::Node::Text(text) = node {
|
||||||
|
// content.push_str(&text.text);
|
||||||
|
// }
|
||||||
|
// });
|
||||||
|
// Article { title, content }
|
||||||
|
// })
|
||||||
|
// .collect();
|
||||||
|
//
|
||||||
|
// let feed_aggregates = vec![
|
||||||
|
// (FeedAggregate {
|
||||||
|
// title: feed_title,
|
||||||
|
// items: feed_items,
|
||||||
|
// }),
|
||||||
|
// ];
|
||||||
|
|
||||||
|
let articles: Articles = Articles {
|
||||||
|
feeds: feed_aggregates,
|
||||||
|
};
|
||||||
|
|
||||||
let articles: Articles = Articles { feeds };
|
|
||||||
|
|
||||||
articles.respond_to(&request)
|
articles.respond_to(&request)
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ pub fn feed_factory(app: &mut web::ServiceConfig) {
|
||||||
backend: true,
|
backend: true,
|
||||||
};
|
};
|
||||||
app.route(
|
app.route(
|
||||||
&base_path.define(String::from("/get")),
|
&base_path.define(String::from("/get/{user_id}")),
|
||||||
web::get().to(get::get),
|
web::get().to(get::get),
|
||||||
);
|
);
|
||||||
app.route(
|
app.route(
|
||||||
|
|
|
@ -3,7 +3,7 @@ use serde::Serialize;
|
||||||
use super::article::Article;
|
use super::article::Article;
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct Feed {
|
pub struct FeedAggregate {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub items: Vec<Article>,
|
pub items: Vec<Article>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use super::feeds;
|
use super::feeds;
|
||||||
|
use crate::json_serialization::user::JsonUser;
|
||||||
use crate::models::feed::rss_feed::Feed;
|
use crate::models::feed::rss_feed::Feed;
|
||||||
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
||||||
|
use crate::models::feed_item::rss_feed_item::FeedItem;
|
||||||
|
use crate::schema::feed_item::{feed_id, title};
|
||||||
use crate::{
|
use crate::{
|
||||||
database::establish_connection,
|
database::establish_connection,
|
||||||
schema::{
|
schema::{
|
||||||
|
@ -12,15 +15,9 @@ use actix_web::{web, HttpRequest, HttpResponse, Responder};
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use rss::Item;
|
use rss::Item;
|
||||||
use scraper::{Html, Selector};
|
use scraper::{Html, Selector};
|
||||||
use serde_derive::Deserialize;
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) {
|
||||||
pub struct JsonUser {
|
let item_title = item.title.unwrap();
|
||||||
user_id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_feed_item(item: Item, feed: &Feed) {
|
|
||||||
let title = item.title.unwrap();
|
|
||||||
let frag = Html::parse_fragment(&item.content.unwrap());
|
let frag = Html::parse_fragment(&item.content.unwrap());
|
||||||
let mut content = "".to_string();
|
let mut content = "".to_string();
|
||||||
let frag_clone = frag.clone();
|
let frag_clone = frag.clone();
|
||||||
|
@ -38,20 +35,33 @@ fn create_feed_item(item: Item, feed: &Feed) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut connection: diesel::PgConnection = establish_connection();
|
let existing_item: Vec<FeedItem> = feed_item::table
|
||||||
let new_feed_item =
|
.filter(feed_id.eq(feed.id))
|
||||||
NewFeedItem::new(feed.id, content.clone(), title.clone(), item.link.unwrap());
|
.filter(title.eq(&item_title))
|
||||||
let insert_result = diesel::insert_into(feed_item::table)
|
.load(connection)
|
||||||
.values(&new_feed_item)
|
.unwrap();
|
||||||
.execute(&mut connection);
|
|
||||||
|
|
||||||
log::info!("{:?}", insert_result);
|
if existing_item.is_empty() {
|
||||||
|
let new_feed_item = NewFeedItem::new(
|
||||||
|
feed.id,
|
||||||
|
content.clone(),
|
||||||
|
item_title.clone(),
|
||||||
|
item.link.unwrap(),
|
||||||
|
);
|
||||||
|
let insert_result = diesel::insert_into(feed_item::table)
|
||||||
|
.values(&new_feed_item)
|
||||||
|
.execute(connection);
|
||||||
|
|
||||||
|
log::info!("Insert Result: {:?}", insert_result);
|
||||||
|
} else {
|
||||||
|
log::info!("Item {} already exists.", feed.title);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responder {
|
pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responder {
|
||||||
let mut connection: diesel::PgConnection = establish_connection();
|
let mut connection: diesel::PgConnection = establish_connection();
|
||||||
|
|
||||||
let req_user_id = data.user_id.parse::<i32>().unwrap();
|
let req_user_id: i32 = data.user_id;
|
||||||
|
|
||||||
let feeds: Vec<Feed> = feed::table
|
let feeds: Vec<Feed> = feed::table
|
||||||
.filter(user_id.eq(req_user_id))
|
.filter(user_id.eq(req_user_id))
|
||||||
|
@ -67,7 +77,7 @@ pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responde
|
||||||
match result {
|
match result {
|
||||||
Ok(channel) => {
|
Ok(channel) => {
|
||||||
for item in channel.into_items() {
|
for item in channel.into_items() {
|
||||||
create_feed_item(item, &feed)
|
create_feed_item(item, &feed, &mut connection)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => log::error!("Could not get channel {}. Error: {}", feed.url, e),
|
Err(e) => log::error!("Could not get channel {}. Error: {}", feed.url, e),
|
||||||
|
|
|
@ -6,8 +6,9 @@ const feeds = ref([]);
|
||||||
const buttonText = 'Sync'
|
const buttonText = 'Sync'
|
||||||
|
|
||||||
const fetchData = async () => {
|
const fetchData = async () => {
|
||||||
|
const user_id = localStorage.getItem("user-id")
|
||||||
try {
|
try {
|
||||||
const response = await axios.get('feeds/get', {
|
const response = await axios.get("feeds/get/" + user_id, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'user-token': localStorage.getItem("user-token")
|
'user-token': localStorage.getItem("user-token")
|
||||||
|
@ -22,7 +23,7 @@ const fetchData = async () => {
|
||||||
async function sync() {
|
async function sync() {
|
||||||
try {
|
try {
|
||||||
const repsponse = await axios.post('feeds/sync', {
|
const repsponse = await axios.post('feeds/sync', {
|
||||||
user_id: localStorage.getItem("user-id")
|
user_id: 1 //localStorage.getItem("user-id")
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -46,6 +47,7 @@ onMounted(() => {
|
||||||
<div>
|
<div>
|
||||||
<h1>Feeds</h1> <button @click="sync">{{ buttonText }}</button>
|
<h1>Feeds</h1> <button @click="sync">{{ buttonText }}</button>
|
||||||
<div id='aricle'>
|
<div id='aricle'>
|
||||||
|
<p v-if="feeds.length == 0">No unread articles.</p>
|
||||||
<template v-for="feed in feeds">
|
<template v-for="feed in feeds">
|
||||||
<h2>{{ feed.title }}</h2>
|
<h2>{{ feed.title }}</h2>
|
||||||
<p v-html='feed.content'></p>
|
<p v-html='feed.content'></p>
|
||||||
|
|
Loading…
Reference in New Issue