Added readable mode for article content
parent
3d77c6f30f
commit
ee80cbd53b
|
@ -6,13 +6,13 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
reqwest = { version = "0.11", features = ["json", "blocking"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
rss = { version = "2.0.1" }
|
||||
actix-web = "4.1.0"
|
||||
actix-rt = "2.7.0"
|
||||
futures = "0.3.24"
|
||||
serde = {version = "1.0.144", features = ["alloc","derive","serde_derive"]}
|
||||
serde = { version = "1.0.144", features = ["alloc", "derive", "serde_derive"] }
|
||||
serde_derive = "1.0.145"
|
||||
actix-service = "2.0.2"
|
||||
diesel = { version = "2.0.2", features = ["postgres"]}
|
||||
|
|
|
@ -43,6 +43,7 @@ impl JwtToken {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn decode_from_request(request: HttpRequest) -> Result<JwtToken, &'static str> {
|
||||
match request.headers().get("user-token") {
|
||||
Some(token) => JwtToken::decode(String::from(token.to_str().unwrap())),
|
||||
|
|
|
@ -3,4 +3,6 @@ pub mod login;
|
|||
pub mod new_feed;
|
||||
pub mod new_feed_item;
|
||||
pub mod new_user;
|
||||
pub mod readable;
|
||||
pub mod url;
|
||||
pub mod user;
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
use actix_web::{HttpResponse, Responder};
|
||||
use reqwest::StatusCode;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Readable {
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
impl Responder for Readable {
|
||||
type Body = String;
|
||||
|
||||
fn respond_to(self, _req: &actix_web::HttpRequest) -> actix_web::HttpResponse<Self::Body> {
|
||||
let body = serde_json::to_string(&self).unwrap();
|
||||
HttpResponse::with_body(StatusCode::OK, body)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UrlJson {
|
||||
pub url: String,
|
||||
}
|
|
@ -4,7 +4,6 @@ use crate::models::feed_item::rss_feed_item::FeedItem;
|
|||
use crate::reader::structs::feed::FeedAggregate;
|
||||
use crate::schema::feed_item::{feed_id, read};
|
||||
use crate::{
|
||||
auth::jwt::JwtToken,
|
||||
database::establish_connection,
|
||||
json_serialization::articles::Articles,
|
||||
schema::feed::{self, user_id},
|
||||
|
@ -17,8 +16,6 @@ use super::structs::article::Article;
|
|||
|
||||
pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder {
|
||||
let request = req.clone();
|
||||
let _token: JwtToken = JwtToken::decode_from_request(req).unwrap();
|
||||
|
||||
let req_user_id = path.user_id;
|
||||
log::info!("Received user_id: {}", req_user_id);
|
||||
|
||||
|
@ -27,9 +24,6 @@ pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder
|
|||
.filter(user_id.eq(req_user_id))
|
||||
.load::<Feed>(&mut connection)
|
||||
.unwrap();
|
||||
// let feed = feeds::get_feed("https://www.heise.de/rss/heise-Rubrik-Wissen.rdf")
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
let mut feed_aggregates: Vec<FeedAggregate> = Vec::new();
|
||||
for feed in feeds {
|
||||
|
@ -50,6 +44,7 @@ pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder
|
|||
.map(|feed_item: FeedItem| Article {
|
||||
title: feed_item.title,
|
||||
content: feed_item.content,
|
||||
url: feed_item.url,
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -60,38 +55,6 @@ pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder
|
|||
items: article_list,
|
||||
})
|
||||
}
|
||||
// let feed_title: String = feed.title.clone();
|
||||
// let feed_items: Vec<Article> = feed
|
||||
// .into_items()
|
||||
// .into_iter()
|
||||
// .map(|item| {
|
||||
// let title = item.title.unwrap();
|
||||
// let frag = Html::parse_fragment(&item.content.unwrap());
|
||||
// let mut content = "".to_string();
|
||||
// let frag_clone = frag.clone();
|
||||
// frag.tree.into_iter().for_each(|node| {
|
||||
// let selector_img = Selector::parse("img").unwrap();
|
||||
//
|
||||
// for element in frag_clone.select(&selector_img) {
|
||||
// if !content.starts_with("<img") {
|
||||
// content.push_str(&element.html());
|
||||
// content.push_str("<br>")
|
||||
// }
|
||||
// }
|
||||
// if let scraper::node::Node::Text(text) = node {
|
||||
// content.push_str(&text.text);
|
||||
// }
|
||||
// });
|
||||
// Article { title, content }
|
||||
// })
|
||||
// .collect();
|
||||
//
|
||||
// let feed_aggregates = vec![
|
||||
// (FeedAggregate {
|
||||
// title: feed_title,
|
||||
// items: feed_items,
|
||||
// }),
|
||||
// ];
|
||||
|
||||
let articles: Articles = Articles {
|
||||
feeds: feed_aggregates,
|
||||
|
|
|
@ -4,6 +4,8 @@ use crate::views::path::Path;
|
|||
mod add;
|
||||
pub mod feeds;
|
||||
mod get;
|
||||
mod read;
|
||||
mod scraper;
|
||||
pub mod structs;
|
||||
mod sync;
|
||||
|
||||
|
@ -24,4 +26,8 @@ pub fn feed_factory(app: &mut web::ServiceConfig) {
|
|||
&base_path.define(String::from("/sync")),
|
||||
actix_web::Route::to(web::post(), sync::sync),
|
||||
);
|
||||
app.route(
|
||||
&base_path.define(String::from("/read")),
|
||||
actix_web::Route::to(web::post(), read::read),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
use actix_web::{web, HttpRequest, Responder};
|
||||
|
||||
use crate::json_serialization::{readable::Readable, url::UrlJson};
|
||||
|
||||
use super::scraper::content::do_throttled_request;
|
||||
|
||||
pub async fn read(_req: HttpRequest, data: web::Json<UrlJson>) -> impl Responder {
|
||||
let result = do_throttled_request(&data.url);
|
||||
|
||||
let content = match result.await {
|
||||
Ok(cont) => cont,
|
||||
Err(e) => {
|
||||
log::error!("Could not scrap url {}", data.url);
|
||||
e.to_string()
|
||||
}
|
||||
};
|
||||
|
||||
Readable { content }
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
use reqwest::Error;
|
||||
|
||||
// Do a request for the given URL, with a minimum time between requests
|
||||
// to avoid overloading the server.
|
||||
pub async fn do_throttled_request(url: &str) -> Result<String, Error> {
|
||||
let response = reqwest::get(url).await?;
|
||||
response.text().await
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
pub mod content;
|
|
@ -4,6 +4,7 @@ use serde::Serialize;
|
|||
pub struct Article {
|
||||
pub title: String,
|
||||
pub content: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
// impl Article {
|
||||
|
|
|
@ -7,9 +7,3 @@ pub struct FeedAggregate {
|
|||
pub title: String,
|
||||
pub items: Vec<Article>,
|
||||
}
|
||||
//
|
||||
// impl Feed {
|
||||
// pub fn new(title: String, items: Vec<Article>) -> Feed {
|
||||
// Feed { title, items }
|
||||
// }
|
||||
// }
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
"name": "rss",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"@mozilla/readability": "^0.4.4",
|
||||
"axios": "^1.5.0",
|
||||
"vue": "^3.3.4",
|
||||
"vue-router": "^4.2.4",
|
||||
|
@ -489,6 +490,14 @@
|
|||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
|
||||
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg=="
|
||||
},
|
||||
"node_modules/@mozilla/readability": {
|
||||
"version": "0.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@mozilla/readability/-/readability-0.4.4.tgz",
|
||||
"integrity": "sha512-MCgZyANpJ6msfvVMi6+A0UAsvZj//4OHREYUB9f2087uXHVoU+H+SWhuihvb1beKpM323bReQPRio0WNk2+V6g==",
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@nodelib/fs.scandir": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
"format": "prettier --write src/"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mozilla/readability": "^0.4.4",
|
||||
"axios": "^1.5.0",
|
||||
"vue": "^3.3.4",
|
||||
"vue-router": "^4.2.4",
|
||||
|
|
|
@ -14,7 +14,17 @@ a,
|
|||
color: hsla(160, 100%, 37%, 1);
|
||||
transition: 0.4s;
|
||||
}
|
||||
|
||||
.message {
|
||||
background-color: #3498db;
|
||||
color: white;
|
||||
padding: 10px;
|
||||
border-radius: 4px;
|
||||
position: fixed;
|
||||
top: 10px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
z-index: 9999;
|
||||
}
|
||||
@media (hover: hover) {
|
||||
a:hover {
|
||||
background-color: hsla(160, 100%, 37%, 0.2);
|
||||
|
|
|
@ -1,10 +1,55 @@
|
|||
<script setup>
|
||||
import { ref, onMounted } from 'vue';
|
||||
import axios from 'axios';
|
||||
import { Readability } from '@mozilla/readability';
|
||||
|
||||
const showMessage = ref(false)
|
||||
const feeds = ref([]);
|
||||
const message = ref('')
|
||||
const buttonText = 'Sync'
|
||||
|
||||
async function getReadable(feed, index) {
|
||||
try {
|
||||
const response = await axios.post("feeds/read", {
|
||||
url: feed.url
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'user-token': localStorage.getItem("user-token")
|
||||
}
|
||||
})
|
||||
|
||||
const doc = new DOMParser().parseFromString(response.data.content, 'text/html');
|
||||
const article = new Readability(doc).parse();
|
||||
feeds.value[index].content = article.content;
|
||||
} catch (error) {
|
||||
console.error('Error fetching data:', error)
|
||||
showMessageForXSeconds(error, 5)
|
||||
}
|
||||
// try {
|
||||
// const response = await fetch(feed.url);
|
||||
// const html = await response.text();
|
||||
// const doc = new DOMParser().parseFromString(html, 'text/html');
|
||||
// const article = new Readability(doc).parse();
|
||||
// feeds.value[index].content = article.content;
|
||||
// } catch (error) {
|
||||
// console.error(error);
|
||||
// showMessageForXSeconds(error, 5);
|
||||
// }
|
||||
}
|
||||
|
||||
function showMessageForXSeconds(text, seconds) {
|
||||
message.value = text;
|
||||
showMessage.value = true;
|
||||
|
||||
// Set a timeout to hide the message after x seconds
|
||||
setTimeout(() => {
|
||||
showMessage.value = false;
|
||||
message.value = '';
|
||||
}, seconds * 1000); // Convert seconds to milliseconds
|
||||
}
|
||||
|
||||
const fetchData = async () => {
|
||||
const user_id = localStorage.getItem("user-id")
|
||||
try {
|
||||
|
@ -16,14 +61,15 @@ const fetchData = async () => {
|
|||
});
|
||||
feeds.value = response.data.feeds[0].items;
|
||||
} catch (error) {
|
||||
console.error('Error fetching data:', error);
|
||||
console.error('Error fetching data:', error)
|
||||
showMessageForXSeconds(error, 5)
|
||||
}
|
||||
};
|
||||
|
||||
async function sync() {
|
||||
try {
|
||||
const repsponse = await axios.post('feeds/sync', {
|
||||
user_id: 1 //localStorage.getItem("user-id")
|
||||
const response = await axios.post('feeds/sync', {
|
||||
user_id: parseInt(localStorage.getItem("user-id"))
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
|
@ -32,8 +78,12 @@ async function sync() {
|
|||
}
|
||||
})
|
||||
|
||||
if (response.status == 200) {
|
||||
showMessageForXSeconds('Sync successful.', 5)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error sync', error)
|
||||
showMessageForXSeconds(error, 5)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -46,11 +96,14 @@ onMounted(() => {
|
|||
<template>
|
||||
<div>
|
||||
<h1>Feeds</h1> <button @click="sync">{{ buttonText }}</button>
|
||||
<div v-if="showMessage" class="message">{{ message }}</div>
|
||||
<div id='aricle'>
|
||||
<p v-if="feeds.length == 0">No unread articles.</p>
|
||||
<template v-for="feed in feeds">
|
||||
<h2>{{ feed.title }}</h2>
|
||||
<p v-html='feed.content'></p>
|
||||
<template v-for="(feed, index) in feeds">
|
||||
<div v-bind:id="'article_' + index">
|
||||
<h2 @click="getReadable(feed, index)">{{ feed.title }}</h2>
|
||||
<p v-html='feed.content'></p>
|
||||
</div>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -34,7 +34,14 @@ export default defineConfig({
|
|||
secure: false,
|
||||
rewrite: (path) => path.replace(/^\/feeds\/sync/, ''),
|
||||
},
|
||||
'/feeds/read': {
|
||||
target: 'http://localhost:8001/api/v1/article/read',
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path.replace(/^\/feeds\/read/, ''),
|
||||
},
|
||||
},
|
||||
|
||||
cors: false
|
||||
},
|
||||
|
||||
|
|
Loading…
Reference in New Issue