Compare commits

..

2 Commits

Author SHA1 Message Date
Mathias Rothenhaeusler e56ba37e7e Update table feed items 2023-10-08 18:13:58 +02:00
Mathias Rothenhaeusler ec35f66a88 - Readme changes
- docker fixed version postresql
- sync [WIP]
2023-10-07 19:10:04 +02:00
18 changed files with 183 additions and 21 deletions

View File

@ -1,4 +1,4 @@
## RSS-Reader
## RSS-Reader [WIP]
# Diesel Setup

View File

@ -2,7 +2,7 @@ version: "3.7"
services:
postgres:
container_name: "rss-postgres"
image: "postgres:latest"
image: "postgres:15"
ports:
- "5432:5432"
environment:

View File

@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
ALTER TABLE feed_item
DROP COLUMN title;
ALTER TABLE feed_item
DROP COLUMN url;

View File

@ -0,0 +1,6 @@
-- Your SQL goes here
ALTER TABLE feed_item
ADD COLUMN title VARCHAR NOT NULL;
ALTER TABLE feed_item
ADD COLUMN url VARCHAR NOT NULL;

View File

@ -1,4 +1,5 @@
pub mod articles;
pub mod login;
pub mod new_feed;
pub mod new_feed_item;
pub mod new_user;

View File

@ -0,0 +1,9 @@
use serde::Deserialize;
#[derive(Deserialize)]
pub struct NewFeedItemSchema {
pub content: String,
pub feed_id: i32,
pub url: String,
pub title: String,
}

View File

@ -20,7 +20,7 @@ async fn main() -> std::io::Result<()> {
let app = App::new()
.wrap_fn(|req, srv| {
let mut passed: bool;
let request_url: String = String::from(req.uri().path().clone());
let request_url: String = String::from(req.uri().path());
log::info!("Request Url: {}", request_url);
if req.path().contains("/article/") {

View File

@ -2,7 +2,7 @@ use super::super::user::rss_user::User;
use crate::schema::feed;
use diesel::{Associations, Identifiable, Queryable};
#[derive(Clone, Queryable, Identifiable, Associations)]
#[derive(Clone, Debug, Queryable, Identifiable, Associations)]
#[diesel(belongs_to(User))]
#[diesel(table_name=feed)]
pub struct Feed {

View File

@ -1 +1,2 @@
pub mod new_feed_item;
mod rss_feed_item;

View File

@ -0,0 +1,40 @@
// extern crate bcrypt;
// use bcrypt::{hash, DEFAULT_COST};
use diesel::Insertable;
// use uuid::Uuid;
use crate::schema::feed_item;
#[derive(Insertable, Clone)]
#[diesel(table_name=feed_item)]
pub struct NewFeedItem {
pub feed_id: i32,
pub content: String,
pub title: String,
pub url: String,
}
impl NewFeedItem {
pub fn new(feed_id: i32, content: String, title: String, url: String) -> Self {
Self {
feed_id,
content,
title,
url,
}
}
}
// impl NewUser {
// pub fn new(content: String, title: String, url: String) -> NewUser {
// let hashed_password: String = hash(password.as_str(), DEFAULT_COST).unwrap();
// let uuid = Uuid::new_v4();
// NewUser {
// username,
// email,
// password: hashed_password,
// unique_id: uuid.to_string(),
// }
// }
// }

View File

@ -1 +1,15 @@
use diesel::{Associations, Identifiable, Queryable};
use crate::schema::feed_item;
#[derive(Clone, Queryable, Identifiable, Associations)]
#[diesel(belongs_to(Feed))]
#[diesel(table_name=feed_item)]
pub struct Feed {
pub id: i32,
pub feed_id: i32,
pub title: String,
pub url: String,
pub content: String,
pub read: bool,
}

View File

@ -5,5 +5,6 @@ use rss::Channel;
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
let content = reqwest::get(feed).await?.bytes().await?;
let channel = Channel::read_from(&content[..])?;
log::info!("{:?}", channel);
Ok(channel)
}

View File

@ -22,6 +22,6 @@ pub fn feed_factory(app: &mut web::ServiceConfig) {
);
app.route(
&base_path.define(String::from("/sync")),
web::post().to(sync::sync),
actix_web::Route::to(web::post(), sync::sync),
);
}

View File

@ -7,9 +7,9 @@ pub struct Feed {
pub title: String,
pub items: Vec<Article>,
}
impl Feed {
pub fn new(title: String, items: Vec<Article>) -> Feed {
Feed { title, items }
}
}
//
// impl Feed {
// pub fn new(title: String, items: Vec<Article>) -> Feed {
// Feed { title, items }
// }
// }

View File

@ -1,28 +1,86 @@
use super::feeds;
use crate::{database::establish_connection, models::feed::rss_feed::Feed, schema::feed};
use actix_web::{HttpRequest, HttpResponse, Responder};
use crate::models::feed::rss_feed::Feed;
use crate::models::feed_item::new_feed_item::NewFeedItem;
use crate::{
database::establish_connection,
schema::{
feed::{self, user_id},
feed_item,
},
};
use actix_web::{web, HttpRequest, HttpResponse, Responder};
use diesel::prelude::*;
use futures::StreamExt;
use scraper::{Html, Selector};
use serde_derive::Deserialize;
pub async fn sync(req: HttpRequest) -> impl Responder {
#[derive(Deserialize)]
pub struct JsonUser {
user_id: String,
}
pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responder {
let mut connection: diesel::PgConnection = establish_connection();
let feed: Vec<Feed> = feed::table.load::<Feed>(&mut connection).unwrap();
let req_user_id = data.user_id.parse::<i32>().unwrap();
let feed: Vec<Feed> = feed::table
.filter(user_id.eq(req_user_id))
.load::<Feed>(&mut connection)
.unwrap();
log::info!("Found {} feeds to sync.", feed.len());
// Create an asynchronous stream of Feed items
let feed_stream = futures::stream::iter(feed.clone().into_iter()).map(|feed| {
// Asynchronously fetch the feed_list for each feed
log::info!("processing feed: {:?}", feed);
async move {
let feed_list = feeds::get_feed(&feed.url).await.unwrap();
// Process feed_list here
log::info!("start moved");
let feed_list: rss::Channel = feeds::get_feed(&feed.url).await.unwrap();
log::info!("{:?}", feed_list);
feed_list.into_items().into_iter().for_each(|item| {
let title = item.title.unwrap();
let frag = Html::parse_fragment(&item.content.unwrap());
let mut content = "".to_string();
let frag_clone = frag.clone();
frag.tree.into_iter().for_each(|node| {
let selector_img = Selector::parse("img").unwrap();
for element in frag_clone.select(&selector_img) {
if !content.starts_with("<img") {
content.push_str(&element.html());
content.push_str("<br>")
}
}
if let scraper::node::Node::Text(text) = node {
content.push_str(&text.text);
}
});
let mut connection: diesel::PgConnection = establish_connection();
let new_feed_item =
NewFeedItem::new(feed.id, content.clone(), title.clone(), feed.url.clone());
let insert_result = diesel::insert_into(feed_item::table)
.values(&new_feed_item)
.execute(&mut connection);
log::info!("{:?}", insert_result);
});
}
});
// Execute the asynchronous stream
tokio::spawn(feed_stream.for_each(|_| async {}));
let result = tokio::spawn(feed_stream.for_each(|_| async {})).await;
if result.is_err() {
log::error!("{:?}", result);
HttpResponse::InternalServerError()
} else {
HttpResponse::Ok()
}
}
// pub async fn sync(req: HttpRequest) -> impl Responder {
// let request = req.clone();
// let mut connection: diesel::PgConnection = establish_connection();

View File

@ -15,6 +15,8 @@ diesel::table! {
feed_id -> Int4,
content -> Text,
read -> Bool,
title -> Varchar,
url -> Varchar,
}
}

View File

@ -3,10 +3,11 @@ import { ref, onMounted } from 'vue';
import axios from 'axios';
const feeds = ref([]);
const buttonText = 'Sync'
const fetchData = async () => {
try {
const response = await axios.get('feeds', {
const response = await axios.get('feeds/get', {
headers: {
'Content-Type': 'application/json',
'user-token': localStorage.getItem("user-token")
@ -18,6 +19,23 @@ const fetchData = async () => {
}
};
async function sync() {
try {
const repsponse = await axios.post('feeds/sync', {
user_id: localStorage.getItem("user-id")
},
{
headers: {
'Content-Type': 'application/json',
'user-token': localStorage.getItem("user-token")
}
})
} catch (error) {
console.error('Error sync', error)
}
}
onMounted(() => {
fetchData();
});
@ -26,7 +44,7 @@ onMounted(() => {
<template>
<div>
<h1>Feeds</h1>
<h1>Feeds</h1> <button @click="sync">{{ buttonText }}</button>
<div id='aricle'>
<template v-for="feed in feeds">
<h2>{{ feed.title }}</h2>

View File

@ -28,6 +28,12 @@ export default defineConfig({
secure: false,
rewrite: (path) => path.replace(/^\/feeds\/get/, ''),
},
'/feeds/sync': {
target: 'http://localhost:8001/api/v1/article/sync',
changeOrigin: true,
secure: false,
rewrite: (path) => path.replace(/^\/feeds\/sync/, ''),
},
},
cors: false
},