Update table feed items
parent
ec35f66a88
commit
e56ba37e7e
|
@ -0,0 +1,6 @@
|
|||
-- This file should undo anything in `up.sql`
|
||||
ALTER TABLE feed_item
|
||||
DROP COLUMN title;
|
||||
|
||||
ALTER TABLE feed_item
|
||||
DROP COLUMN url;
|
|
@ -0,0 +1,6 @@
|
|||
-- Your SQL goes here
|
||||
ALTER TABLE feed_item
|
||||
ADD COLUMN title VARCHAR NOT NULL;
|
||||
|
||||
ALTER TABLE feed_item
|
||||
ADD COLUMN url VARCHAR NOT NULL;
|
|
@ -1,4 +1,5 @@
|
|||
pub mod articles;
|
||||
pub mod login;
|
||||
pub mod new_feed;
|
||||
pub mod new_feed_item;
|
||||
pub mod new_user;
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct NewFeedItemSchema {
|
||||
pub content: String,
|
||||
pub feed_id: i32,
|
||||
pub url: String,
|
||||
pub title: String,
|
||||
}
|
|
@ -2,7 +2,7 @@ use super::super::user::rss_user::User;
|
|||
use crate::schema::feed;
|
||||
use diesel::{Associations, Identifiable, Queryable};
|
||||
|
||||
#[derive(Clone, Queryable, Identifiable, Associations)]
|
||||
#[derive(Clone, Debug, Queryable, Identifiable, Associations)]
|
||||
#[diesel(belongs_to(User))]
|
||||
#[diesel(table_name=feed)]
|
||||
pub struct Feed {
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
pub mod new_feed_item;
|
||||
mod rss_feed_item;
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
// extern crate bcrypt;
|
||||
|
||||
// use bcrypt::{hash, DEFAULT_COST};
|
||||
use diesel::Insertable;
|
||||
// use uuid::Uuid;
|
||||
|
||||
use crate::schema::feed_item;
|
||||
|
||||
#[derive(Insertable, Clone)]
|
||||
#[diesel(table_name=feed_item)]
|
||||
pub struct NewFeedItem {
|
||||
pub feed_id: i32,
|
||||
pub content: String,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
impl NewFeedItem {
|
||||
pub fn new(feed_id: i32, content: String, title: String, url: String) -> Self {
|
||||
Self {
|
||||
feed_id,
|
||||
content,
|
||||
title,
|
||||
url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// impl NewUser {
|
||||
// pub fn new(content: String, title: String, url: String) -> NewUser {
|
||||
// let hashed_password: String = hash(password.as_str(), DEFAULT_COST).unwrap();
|
||||
// let uuid = Uuid::new_v4();
|
||||
// NewUser {
|
||||
// username,
|
||||
// email,
|
||||
// password: hashed_password,
|
||||
// unique_id: uuid.to_string(),
|
||||
// }
|
||||
// }
|
||||
// }
|
|
@ -1 +1,15 @@
|
|||
use diesel::{Associations, Identifiable, Queryable};
|
||||
|
||||
use crate::schema::feed_item;
|
||||
|
||||
#[derive(Clone, Queryable, Identifiable, Associations)]
|
||||
#[diesel(belongs_to(Feed))]
|
||||
#[diesel(table_name=feed_item)]
|
||||
pub struct Feed {
|
||||
pub id: i32,
|
||||
pub feed_id: i32,
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
pub content: String,
|
||||
pub read: bool,
|
||||
}
|
||||
|
|
|
@ -5,5 +5,6 @@ use rss::Channel;
|
|||
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
||||
let content = reqwest::get(feed).await?.bytes().await?;
|
||||
let channel = Channel::read_from(&content[..])?;
|
||||
log::info!("{:?}", channel);
|
||||
Ok(channel)
|
||||
}
|
||||
|
|
|
@ -7,9 +7,9 @@ pub struct Feed {
|
|||
pub title: String,
|
||||
pub items: Vec<Article>,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
pub fn new(title: String, items: Vec<Article>) -> Feed {
|
||||
Feed { title, items }
|
||||
}
|
||||
}
|
||||
//
|
||||
// impl Feed {
|
||||
// pub fn new(title: String, items: Vec<Article>) -> Feed {
|
||||
// Feed { title, items }
|
||||
// }
|
||||
// }
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
use super::feeds;
|
||||
use crate::models::feed::rss_feed::Feed;
|
||||
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
||||
use crate::{
|
||||
database::establish_connection,
|
||||
models::feed::rss_feed::Feed,
|
||||
schema::feed::{self, user_id},
|
||||
schema::{
|
||||
feed::{self, user_id},
|
||||
feed_item,
|
||||
},
|
||||
};
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Responder};
|
||||
use diesel::prelude::*;
|
||||
use futures::StreamExt;
|
||||
use scraper::{Html, Selector};
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -18,26 +23,63 @@ pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responde
|
|||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
|
||||
let req_user_id = data.user_id.parse::<i32>().unwrap();
|
||||
log::info!("{:?}", req_user_id);
|
||||
|
||||
let feed: Vec<Feed> = feed::table
|
||||
.filter(user_id.eq(req_user_id))
|
||||
.load::<Feed>(&mut connection)
|
||||
.unwrap();
|
||||
|
||||
log::info!("Found {} feeds to sync.", feed.len());
|
||||
|
||||
// Create an asynchronous stream of Feed items
|
||||
let feed_stream = futures::stream::iter(feed.clone().into_iter()).map(|feed| {
|
||||
// Asynchronously fetch the feed_list for each feed
|
||||
log::info!("processing feed: {:?}", feed);
|
||||
async move {
|
||||
let _feed_list = feeds::get_feed(&feed.url).await.unwrap();
|
||||
// Process feed_list here
|
||||
log::info!("start moved");
|
||||
let feed_list: rss::Channel = feeds::get_feed(&feed.url).await.unwrap();
|
||||
log::info!("{:?}", feed_list);
|
||||
|
||||
feed_list.into_items().into_iter().for_each(|item| {
|
||||
let title = item.title.unwrap();
|
||||
let frag = Html::parse_fragment(&item.content.unwrap());
|
||||
let mut content = "".to_string();
|
||||
let frag_clone = frag.clone();
|
||||
frag.tree.into_iter().for_each(|node| {
|
||||
let selector_img = Selector::parse("img").unwrap();
|
||||
|
||||
for element in frag_clone.select(&selector_img) {
|
||||
if !content.starts_with("<img") {
|
||||
content.push_str(&element.html());
|
||||
content.push_str("<br>")
|
||||
}
|
||||
}
|
||||
if let scraper::node::Node::Text(text) = node {
|
||||
content.push_str(&text.text);
|
||||
}
|
||||
});
|
||||
|
||||
let mut connection: diesel::PgConnection = establish_connection();
|
||||
let new_feed_item =
|
||||
NewFeedItem::new(feed.id, content.clone(), title.clone(), feed.url.clone());
|
||||
let insert_result = diesel::insert_into(feed_item::table)
|
||||
.values(&new_feed_item)
|
||||
.execute(&mut connection);
|
||||
|
||||
log::info!("{:?}", insert_result);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Execute the asynchronous stream
|
||||
tokio::spawn(feed_stream.for_each(|_| async {}));
|
||||
let result = tokio::spawn(feed_stream.for_each(|_| async {})).await;
|
||||
|
||||
if result.is_err() {
|
||||
log::error!("{:?}", result);
|
||||
HttpResponse::InternalServerError()
|
||||
} else {
|
||||
HttpResponse::Ok()
|
||||
}
|
||||
}
|
||||
// pub async fn sync(req: HttpRequest) -> impl Responder {
|
||||
// let request = req.clone();
|
||||
|
|
|
@ -15,6 +15,8 @@ diesel::table! {
|
|||
feed_id -> Int4,
|
||||
content -> Text,
|
||||
read -> Bool,
|
||||
title -> Varchar,
|
||||
url -> Varchar,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue