Compare commits
No commits in common. "e56ba37e7ed60120728d8987679a70e18ac960b4" and "6822b5eab53c3309d93b491399df368f084cf6be" have entirely different histories.
e56ba37e7e
...
6822b5eab5
|
@ -2,7 +2,7 @@ version: "3.7"
|
|||
services:
|
||||
postgres:
|
||||
container_name: "rss-postgres"
|
||||
image: "postgres:15"
|
||||
image: "postgres:latest"
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
-- This file should undo anything in `up.sql`
|
||||
ALTER TABLE feed_item
|
||||
DROP COLUMN title;
|
||||
|
||||
ALTER TABLE feed_item
|
||||
DROP COLUMN url;
|
|
@ -1,6 +0,0 @@
|
|||
-- Your SQL goes here
|
||||
ALTER TABLE feed_item
|
||||
ADD COLUMN title VARCHAR NOT NULL;
|
||||
|
||||
ALTER TABLE feed_item
|
||||
ADD COLUMN url VARCHAR NOT NULL;
|
|
@ -1,5 +1,4 @@
|
|||
pub mod articles;
|
||||
pub mod login;
|
||||
pub mod new_feed;
|
||||
pub mod new_feed_item;
|
||||
pub mod new_user;
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct NewFeedItemSchema {
|
||||
pub content: String,
|
||||
pub feed_id: i32,
|
||||
pub url: String,
|
||||
pub title: String,
|
||||
}
|
|
@ -20,7 +20,7 @@ async fn main() -> std::io::Result<()> {
|
|||
let app = App::new()
|
||||
.wrap_fn(|req, srv| {
|
||||
let mut passed: bool;
|
||||
let request_url: String = String::from(req.uri().path());
|
||||
let request_url: String = String::from(req.uri().path().clone());
|
||||
|
||||
log::info!("Request Url: {}", request_url);
|
||||
if req.path().contains("/article/") {
|
||||
|
|
|
@ -2,7 +2,7 @@ use super::super::user::rss_user::User;
|
|||
use crate::schema::feed;
|
||||
use diesel::{Associations, Identifiable, Queryable};
|
||||
|
||||
#[derive(Clone, Debug, Queryable, Identifiable, Associations)]
|
||||
#[derive(Clone, Queryable, Identifiable, Associations)]
|
||||
#[diesel(belongs_to(User))]
|
||||
#[diesel(table_name=feed)]
|
||||
pub struct Feed {
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
pub mod new_feed_item;
|
||||
mod rss_feed_item;
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
// extern crate bcrypt;
|
||||
|
||||
// use bcrypt::{hash, DEFAULT_COST};
|
||||
use diesel::Insertable;
|
||||
// use uuid::Uuid;
|
||||
|
||||
use crate::schema::feed_item;
|
||||
|
||||
#[derive(Insertable, Clone)]
|
||||
#[diesel(table_name=feed_item)]
|
||||
pub struct NewFeedItem {
|
||||
pub feed_id: i32,
|
||||
pub content: String,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
impl NewFeedItem {
|
||||
pub fn new(feed_id: i32, content: String, title: String, url: String) -> Self {
|
||||
Self {
|
||||
feed_id,
|
||||
content,
|
||||
title,
|
||||
url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// impl NewUser {
|
||||
// pub fn new(content: String, title: String, url: String) -> NewUser {
|
||||
// let hashed_password: String = hash(password.as_str(), DEFAULT_COST).unwrap();
|
||||
// let uuid = Uuid::new_v4();
|
||||
// NewUser {
|
||||
// username,
|
||||
// email,
|
||||
// password: hashed_password,
|
||||
// unique_id: uuid.to_string(),
|
||||
// }
|
||||
// }
|
||||
// }
|
|
@ -1,15 +1 @@
|
|||
use diesel::{Associations, Identifiable, Queryable};
|
||||
|
||||
use crate::schema::feed_item;
|
||||
|
||||
#[derive(Clone, Queryable, Identifiable, Associations)]
|
||||
#[diesel(belongs_to(Feed))]
|
||||
#[diesel(table_name=feed_item)]
|
||||
pub struct Feed {
|
||||
pub id: i32,
|
||||
pub feed_id: i32,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
pub content: String,
|
||||
pub read: bool,
|
||||
}
|
||||
|
|
|
@ -5,6 +5,5 @@ use rss::Channel;
|
|||
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
||||
let content = reqwest::get(feed).await?.bytes().await?;
|
||||
let channel = Channel::read_from(&content[..])?;
|
||||
log::info!("{:?}", channel);
|
||||
Ok(channel)
|
||||
}
|
||||
|
|
|
@ -22,6 +22,6 @@ pub fn feed_factory(app: &mut web::ServiceConfig) {
|
|||
);
|
||||
app.route(
|
||||
&base_path.define(String::from("/sync")),
|
||||
actix_web::Route::to(web::post(), sync::sync),
|
||||
web::post().to(sync::sync),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -7,9 +7,9 @@ pub struct Feed {
|
|||
pub title: String,
|
||||
pub items: Vec<Article>,
|
||||
}
|
||||
//
|
||||
// impl Feed {
|
||||
// pub fn new(title: String, items: Vec<Article>) -> Feed {
|
||||
// Feed { title, items }
|
||||
// }
|
||||
// }
|
||||
|
||||
impl Feed {
|
||||
pub fn new(title: String, items: Vec<Article>) -> Feed {
|
||||
Feed { title, items }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,85 +1,27 @@
|
|||
use super::feeds;
|
||||
use crate::models::feed::rss_feed::Feed;
|
||||
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
||||
use crate::{
|
||||
database::establish_connection,
|
||||
schema::{
|
||||
feed::{self, user_id},
|
||||
feed_item,
|
||||
},
|
||||
};
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Responder};
|
||||
use crate::{database::establish_connection, models::feed::rss_feed::Feed, schema::feed};
|
||||
use actix_web::{HttpRequest, HttpResponse, Responder};
|
||||
use diesel::prelude::*;
|
||||
use futures::StreamExt;
|
||||
use scraper::{Html, Selector};
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct JsonUser {
|
||||
user_id: String,
|
||||
}
|
||||
|
||||
pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responder {
|
||||
pub async fn sync(req: HttpRequest) -> impl Responder {
|
||||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
|
||||
let req_user_id = data.user_id.parse::<i32>().unwrap();
|
||||
|
||||
let feed: Vec<Feed> = feed::table
|
||||
.filter(user_id.eq(req_user_id))
|
||||
.load::<Feed>(&mut connection)
|
||||
.unwrap();
|
||||
|
||||
log::info!("Found {} feeds to sync.", feed.len());
|
||||
let feed: Vec<Feed> = feed::table.load::<Feed>(&mut connection).unwrap();
|
||||
|
||||
// Create an asynchronous stream of Feed items
|
||||
let feed_stream = futures::stream::iter(feed.clone().into_iter()).map(|feed| {
|
||||
// Asynchronously fetch the feed_list for each feed
|
||||
log::info!("processing feed: {:?}", feed);
|
||||
async move {
|
||||
log::info!("start moved");
|
||||
let feed_list: rss::Channel = feeds::get_feed(&feed.url).await.unwrap();
|
||||
log::info!("{:?}", feed_list);
|
||||
|
||||
feed_list.into_items().into_iter().for_each(|item| {
|
||||
let title = item.title.unwrap();
|
||||
let frag = Html::parse_fragment(&item.content.unwrap());
|
||||
let mut content = "".to_string();
|
||||
let frag_clone = frag.clone();
|
||||
frag.tree.into_iter().for_each(|node| {
|
||||
let selector_img = Selector::parse("img").unwrap();
|
||||
|
||||
for element in frag_clone.select(&selector_img) {
|
||||
if !content.starts_with("<img") {
|
||||
content.push_str(&element.html());
|
||||
content.push_str("<br>")
|
||||
}
|
||||
}
|
||||
if let scraper::node::Node::Text(text) = node {
|
||||
content.push_str(&text.text);
|
||||
}
|
||||
});
|
||||
|
||||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
let new_feed_item =
|
||||
NewFeedItem::new(feed.id, content.clone(), title.clone(), feed.url.clone());
|
||||
let insert_result = diesel::insert_into(feed_item::table)
|
||||
.values(&new_feed_item)
|
||||
.execute(&mut connection);
|
||||
|
||||
log::info!("{:?}", insert_result);
|
||||
});
|
||||
let feed_list = feeds::get_feed(&feed.url).await.unwrap();
|
||||
// Process feed_list here
|
||||
}
|
||||
});
|
||||
|
||||
// Execute the asynchronous stream
|
||||
let result = tokio::spawn(feed_stream.for_each(|_| async {})).await;
|
||||
tokio::spawn(feed_stream.for_each(|_| async {}));
|
||||
|
||||
if result.is_err() {
|
||||
log::error!("{:?}", result);
|
||||
HttpResponse::InternalServerError()
|
||||
} else {
|
||||
HttpResponse::Ok()
|
||||
}
|
||||
HttpResponse::Ok()
|
||||
}
|
||||
// pub async fn sync(req: HttpRequest) -> impl Responder {
|
||||
// let request = req.clone();
|
||||
|
|
|
@ -15,8 +15,6 @@ diesel::table! {
|
|||
feed_id -> Int4,
|
||||
content -> Text,
|
||||
read -> Bool,
|
||||
title -> Varchar,
|
||||
url -> Varchar,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,11 +3,10 @@ import { ref, onMounted } from 'vue';
|
|||
import axios from 'axios';
|
||||
|
||||
const feeds = ref([]);
|
||||
const buttonText = 'Sync'
|
||||
|
||||
const fetchData = async () => {
|
||||
try {
|
||||
const response = await axios.get('feeds/get', {
|
||||
const response = await axios.get('feeds', {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'user-token': localStorage.getItem("user-token")
|
||||
|
@ -19,23 +18,6 @@ const fetchData = async () => {
|
|||
}
|
||||
};
|
||||
|
||||
async function sync() {
|
||||
try {
|
||||
const repsponse = await axios.post('feeds/sync', {
|
||||
user_id: localStorage.getItem("user-id")
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'user-token': localStorage.getItem("user-token")
|
||||
}
|
||||
})
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error sync', error)
|
||||
}
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
fetchData();
|
||||
});
|
||||
|
@ -44,7 +26,7 @@ onMounted(() => {
|
|||
|
||||
<template>
|
||||
<div>
|
||||
<h1>Feeds</h1> <button @click="sync">{{ buttonText }}</button>
|
||||
<h1>Feeds</h1>
|
||||
<div id='aricle'>
|
||||
<template v-for="feed in feeds">
|
||||
<h2>{{ feed.title }}</h2>
|
||||
|
|
|
@ -28,12 +28,6 @@ export default defineConfig({
|
|||
secure: false,
|
||||
rewrite: (path) => path.replace(/^\/feeds\/get/, ''),
|
||||
},
|
||||
'/feeds/sync': {
|
||||
target: 'http://localhost:8001/api/v1/article/sync',
|
||||
changeOrigin: true,
|
||||
secure: false,
|
||||
rewrite: (path) => path.replace(/^\/feeds\/sync/, ''),
|
||||
},
|
||||
},
|
||||
cors: false
|
||||
},
|
||||
|
|
Loading…
Reference in New Issue