Compare commits
No commits in common. "5a18e5728ed89a9ecb08ab6ca2c862fa4b92bcf5" and "a2e2ff141e367db409eb16a37ba95c085b6c4cd0" have entirely different histories.
5a18e5728e
...
a2e2ff141e
|
@ -1,7 +0,0 @@
|
||||||
.env
|
|
||||||
target/
|
|
||||||
tests/
|
|
||||||
Dockerfile
|
|
||||||
scripts/
|
|
||||||
migrations/
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -3,13 +3,6 @@ name = "rss-reader"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
path = "src/main.rs"
|
|
||||||
name = "rss-reader"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
@ -22,27 +15,19 @@ futures = "0.3.24"
|
||||||
serde = { version = "1.0.144", features = ["alloc", "derive", "serde_derive"] }
|
serde = { version = "1.0.144", features = ["alloc", "derive", "serde_derive"] }
|
||||||
serde_derive = "1.0.145"
|
serde_derive = "1.0.145"
|
||||||
actix-service = "2.0.2"
|
actix-service = "2.0.2"
|
||||||
diesel = { version = "2.0.2", features = ["postgres", "chrono", "r2d2"] }
|
diesel = { version = "2.0.2", features = ["postgres", "chrono"] }
|
||||||
dotenv = "0.15.0"
|
dotenv = "0.15.0"
|
||||||
bcrypt = "0.13.0"
|
bcrypt = "0.13.0"
|
||||||
uuid = {version = "1.2.1", features=["serde", "v4"]}
|
uuid = {version = "1.2.1", features=["serde", "v4"]}
|
||||||
jwt = "0.16.0"
|
jwt = "0.16.0"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
sha2 = "0.10.6"
|
sha2 = "0.10.6"
|
||||||
|
log = "0.4.17"
|
||||||
|
env_logger = "0.9.3"
|
||||||
scraper = "0.14.0"
|
scraper = "0.14.0"
|
||||||
actix-cors = "0.6.4"
|
actix-cors = "0.6.4"
|
||||||
chrono = { version = "0.4.31", features = ["serde"] }
|
chrono = { version = "0.4.31", features = ["serde"] }
|
||||||
dateparser = "0.2.0"
|
dateparser = "0.2.0"
|
||||||
tracing-appender = "0.2.3"
|
|
||||||
once_cell = "1.19.0"
|
|
||||||
secrecy = { version = "0.8.0", features = ["serde"] }
|
|
||||||
tracing-actix-web = "0.7.10"
|
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["registry", "env-filter"] }
|
|
||||||
tracing-log = "0.2.0"
|
|
||||||
config = "0.14.0"
|
|
||||||
diesel-connection = "4.1.0"
|
|
||||||
tracing = { version = "0.1.40", features = ["log"] }
|
|
||||||
tracing-bunyan-formatter = "0.3.9"
|
|
||||||
|
|
||||||
[dependencies.serde_json]
|
[dependencies.serde_json]
|
||||||
version = "1.0.86"
|
version = "1.0.86"
|
||||||
|
|
41
Dockerfile
41
Dockerfile
|
@ -1,41 +0,0 @@
|
||||||
FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apt update && apt install lld clang -y
|
|
||||||
|
|
||||||
FROM chef as planner
|
|
||||||
COPY . .
|
|
||||||
RUN cargo chef prepare --recipe-path recipe.json
|
|
||||||
|
|
||||||
FROM chef as builder
|
|
||||||
COPY --from=planner /app/recipe.json recipe.json
|
|
||||||
|
|
||||||
RUN cargo chef cook --release --recipe-path recipe.json
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
RUN cargo build --release --bin rss-reader
|
|
||||||
RUN cargo install diesel_cli --no-default-features --features postgres
|
|
||||||
|
|
||||||
# Runtime stage
|
|
||||||
FROM debian:bookworm-slim AS runtime
|
|
||||||
WORKDIR /app
|
|
||||||
# Install OpenSSL - it is dynamically linked by some of our dependencies
|
|
||||||
# Install ca-certificates - it is needed to verify TLS certificates
|
|
||||||
# when establishing HTTPS connections
|
|
||||||
RUN apt-get update -y \
|
|
||||||
&& apt-get install -y openssl ca-certificates pkg-config\
|
|
||||||
&& apt-get install -y libpq5 \
|
|
||||||
&& apt-get autoremove -y \
|
|
||||||
&& apt-get clean -y \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy diesel_cli from builder to runtime
|
|
||||||
COPY --from=builder /usr/local/cargo/bin/diesel /usr/local/cargo/bin/diesel
|
|
||||||
|
|
||||||
COPY --from=builder /app/target/release/rss-reader rss-reader
|
|
||||||
|
|
||||||
EXPOSE 8001
|
|
||||||
# COPY configuration configuration
|
|
||||||
# ENV APP_ENVIRONMENT production
|
|
||||||
# ENTRYPOINT ["./rss-reader"]
|
|
||||||
ENTRYPOINT ["sh", "-c", "/app/rss-reader && diesel migration run"]
|
|
|
@ -1,8 +0,0 @@
|
||||||
application:
|
|
||||||
port: 8001
|
|
||||||
database:
|
|
||||||
host: "localhost"
|
|
||||||
port: 5432
|
|
||||||
username: "admin"
|
|
||||||
password: "secret+123"
|
|
||||||
database_name: "rss"
|
|
|
@ -1,2 +0,0 @@
|
||||||
application:
|
|
||||||
host: 127.0.0.1
|
|
|
@ -1,2 +0,0 @@
|
||||||
application:
|
|
||||||
host: 0.0.0.0
|
|
|
@ -1,17 +1,6 @@
|
||||||
version: "3.7"
|
version: "3.7"
|
||||||
services:
|
services:
|
||||||
|
|
||||||
# vue-app:
|
|
||||||
# build:
|
|
||||||
# context: ./vue/
|
|
||||||
# dockerfile: Dockerfile
|
|
||||||
# ports:
|
|
||||||
# - "8080:8080" # Adjust the port as needed for your Rust application
|
|
||||||
# networks:
|
|
||||||
# - app-network
|
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
restart: always
|
|
||||||
container_name: "rss-postgres"
|
container_name: "rss-postgres"
|
||||||
image: "postgres:15"
|
image: "postgres:15"
|
||||||
ports:
|
ports:
|
||||||
|
@ -22,23 +11,6 @@ services:
|
||||||
- "POSTGRES_PASSWORD=secret+123"
|
- "POSTGRES_PASSWORD=secret+123"
|
||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
networks:
|
|
||||||
- app-network
|
|
||||||
|
|
||||||
# rust-app:
|
|
||||||
# build:
|
|
||||||
# context: . # Specify the path to your Rust application's Dockerfile
|
|
||||||
# dockerfile: Dockerfile
|
|
||||||
# ports:
|
|
||||||
# - "8001:8001" # Adjust the port as needed for your Rust application
|
|
||||||
# depends_on:
|
|
||||||
# - postgres
|
|
||||||
# networks:
|
|
||||||
# - app-network
|
|
||||||
|
|
||||||
networks:
|
|
||||||
app-network:
|
|
||||||
driver: bridge
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
|
|
@ -4,7 +4,6 @@ pub mod processes;
|
||||||
use crate::auth::processes::check_password;
|
use crate::auth::processes::check_password;
|
||||||
use crate::auth::processes::extract_header_token;
|
use crate::auth::processes::extract_header_token;
|
||||||
|
|
||||||
#[tracing::instrument(name = "Process token")]
|
|
||||||
pub fn process_token(request: &ServiceRequest) -> Result<String, &'static str> {
|
pub fn process_token(request: &ServiceRequest) -> Result<String, &'static str> {
|
||||||
match extract_header_token(request) {
|
match extract_header_token(request) {
|
||||||
Ok(token) => check_password(token),
|
Ok(token) => check_password(token),
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
use super::jwt;
|
use super::jwt;
|
||||||
use actix_web::dev::ServiceRequest;
|
use actix_web::dev::ServiceRequest;
|
||||||
use secrecy::{ExposeSecret, Secret};
|
|
||||||
|
|
||||||
pub fn check_password(password: Secret<String>) -> Result<String, &'static str> {
|
pub fn check_password(password: String) -> Result<String, &'static str> {
|
||||||
match jwt::JwtToken::decode(password.expose_secret().to_string()) {
|
match jwt::JwtToken::decode(password) {
|
||||||
Ok(_token) => Ok(String::from("passed")),
|
Ok(_token) => Ok(String::from("passed")),
|
||||||
Err(message) => Err(message),
|
Err(message) => Err(message),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(name = "Extract Header Token")]
|
pub fn extract_header_token(request: &ServiceRequest) -> Result<String, &'static str> {
|
||||||
pub fn extract_header_token(request: &ServiceRequest) -> Result<Secret<String>, &'static str> {
|
log::info!("Request: {:?}", request);
|
||||||
match request.headers().get("user-token") {
|
match request.headers().get("user-token") {
|
||||||
Some(token) => match token.to_str() {
|
Some(token) => match token.to_str() {
|
||||||
Ok(processed_password) => Ok(Secret::new(String::from(processed_password))),
|
Ok(processed_password) => {
|
||||||
|
log::info!("Token provided: {}", processed_password);
|
||||||
|
Ok(String::from(processed_password))
|
||||||
|
}
|
||||||
Err(_processed_password) => Err("there was an error processing token"),
|
Err(_processed_password) => Err("there was an error processing token"),
|
||||||
},
|
},
|
||||||
None => Err("there is no token"),
|
None => Err("there is no token"),
|
||||||
|
@ -23,7 +25,6 @@ pub fn extract_header_token(request: &ServiceRequest) -> Result<Secret<String>,
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod processes_test {
|
mod processes_test {
|
||||||
use actix_web::test::TestRequest;
|
use actix_web::test::TestRequest;
|
||||||
use secrecy::{ExposeSecret, Secret};
|
|
||||||
|
|
||||||
use crate::auth::jwt::JwtToken;
|
use crate::auth::jwt::JwtToken;
|
||||||
|
|
||||||
|
@ -31,7 +32,7 @@ mod processes_test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn check_correct_password() {
|
fn check_correct_password() {
|
||||||
let password_string: Secret<String> = Secret::new(JwtToken::encode(32));
|
let password_string: String = JwtToken::encode(32);
|
||||||
|
|
||||||
let result = check_password(password_string);
|
let result = check_password(password_string);
|
||||||
|
|
||||||
|
@ -43,7 +44,7 @@ mod processes_test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn incorrect_check_password() {
|
fn incorrect_check_password() {
|
||||||
let password: Secret<String> = Secret::new(String::from("test"));
|
let password: String = String::from("test");
|
||||||
|
|
||||||
match check_password(password) {
|
match check_password(password) {
|
||||||
Err(message) => assert_eq!("could not decode token", message),
|
Err(message) => assert_eq!("could not decode token", message),
|
||||||
|
@ -58,7 +59,7 @@ mod processes_test {
|
||||||
.to_srv_request();
|
.to_srv_request();
|
||||||
|
|
||||||
match super::extract_header_token(&request) {
|
match super::extract_header_token(&request) {
|
||||||
Ok(processed_password) => assert_eq!("token", processed_password.expose_secret()),
|
Ok(processed_password) => assert_eq!("token", processed_password),
|
||||||
_ => panic!("failed extract_header_token"),
|
_ => panic!("failed extract_header_token"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,118 +0,0 @@
|
||||||
use config::{Config, ConfigError};
|
|
||||||
use secrecy::{ExposeSecret, Secret};
|
|
||||||
|
|
||||||
#[derive(serde::Deserialize, Debug)]
|
|
||||||
pub struct Settings {
|
|
||||||
pub database: DatabaseSettings,
|
|
||||||
pub application: ApplicationSettings,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(serde::Deserialize, Debug)]
|
|
||||||
pub struct ApplicationSettings {
|
|
||||||
pub port: u16,
|
|
||||||
pub host: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(serde::Deserialize, Debug)]
|
|
||||||
pub struct DatabaseSettings {
|
|
||||||
pub username: String,
|
|
||||||
pub password: Secret<String>,
|
|
||||||
pub port: u16,
|
|
||||||
pub host: String,
|
|
||||||
pub database_name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<Config> for Settings {
|
|
||||||
type Error = ConfigError;
|
|
||||||
|
|
||||||
fn try_from(builder: config::Config) -> Result<Self, Self::Error> {
|
|
||||||
// Extract values from the builder and construct Settings
|
|
||||||
let database = builder.get::<DatabaseSettings>("database")?;
|
|
||||||
let application = builder.get::<ApplicationSettings>("application")?;
|
|
||||||
|
|
||||||
Ok(Settings {
|
|
||||||
database,
|
|
||||||
application,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DatabaseSettings {
|
|
||||||
pub fn connection_string(&self) -> Secret<String> {
|
|
||||||
Secret::new(format!(
|
|
||||||
"postgres://{}:{}@{}:{}/{}",
|
|
||||||
self.username,
|
|
||||||
self.password.expose_secret(),
|
|
||||||
self.host,
|
|
||||||
self.port,
|
|
||||||
self.database_name
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn connection_string_without_db(&self) -> Secret<String> {
|
|
||||||
Secret::new(format!(
|
|
||||||
"postgres://{}:{}@{}:{}",
|
|
||||||
self.username,
|
|
||||||
self.password.expose_secret(),
|
|
||||||
self.host,
|
|
||||||
self.port
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_configuration() -> Result<Settings, ConfigError> {
|
|
||||||
let base_path = std::env::current_dir().expect("Failed to determine the current directory.");
|
|
||||||
let configuration_directory = base_path.join("configuration");
|
|
||||||
|
|
||||||
// Detect the running environment
|
|
||||||
// Default to `local`
|
|
||||||
let environment: Environment = std::env::var("APP_ENVIRONMENT")
|
|
||||||
.unwrap_or_else(|_| "local".into())
|
|
||||||
.try_into()
|
|
||||||
.expect("Failed to parse APP_ENVIRONMENT.");
|
|
||||||
|
|
||||||
let environment_filename = format!("{}.yaml", environment.as_str());
|
|
||||||
// Initialise our configuration reader
|
|
||||||
let settings = config::Config::builder()
|
|
||||||
// Add configuration values from a file named `configuration.yaml`.
|
|
||||||
.add_source(config::File::from(
|
|
||||||
configuration_directory.join("base.yaml"),
|
|
||||||
))
|
|
||||||
.add_source(config::File::from(
|
|
||||||
configuration_directory.join(environment_filename),
|
|
||||||
))
|
|
||||||
.build()?;
|
|
||||||
// Try to convert the configuration values it read into
|
|
||||||
// our Settings type
|
|
||||||
settings.try_deserialize::<Settings>()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum Environment {
|
|
||||||
Local,
|
|
||||||
Production,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Environment {
|
|
||||||
pub fn as_str(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Environment::Local => "local",
|
|
||||||
Environment::Production => "production",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for Environment {
|
|
||||||
type Error = String;
|
|
||||||
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
match s.to_lowercase().as_str() {
|
|
||||||
"local" => Ok(Self::Local),
|
|
||||||
"production" => Ok(Self::Production),
|
|
||||||
other => Err(format!(
|
|
||||||
"{} is not a supported environement. \
|
|
||||||
Use either 'local' or 'production'.",
|
|
||||||
other
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
application:
|
|
||||||
port: 8000
|
|
||||||
host: 127.0.0.1
|
|
||||||
database:
|
|
||||||
host: "127.0.0.1"
|
|
||||||
port: 5432
|
|
||||||
username: "postgres"
|
|
||||||
password: "password"
|
|
||||||
database_name: "newsletter"
|
|
|
@ -1,12 +1,12 @@
|
||||||
use diesel::pg::PgConnection;
|
use diesel::pg::PgConnection;
|
||||||
use diesel::r2d2::{ConnectionManager, Pool};
|
use diesel::prelude::*;
|
||||||
|
use dotenv::dotenv;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
pub fn get_connection_pool(url: &str) -> Pool<ConnectionManager<PgConnection>> {
|
pub fn establish_connection() -> PgConnection {
|
||||||
let manager = ConnectionManager::<PgConnection>::new(url);
|
dotenv().ok();
|
||||||
// Refer to the `r2d2` documentation for more methods to use
|
|
||||||
// when building a connection pool
|
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
Pool::builder()
|
PgConnection::establish(&database_url)
|
||||||
.test_on_check_out(true)
|
.unwrap_or_else(|e| panic!("Error connecting to database {}: {}", database_url, e))
|
||||||
.build(manager)
|
|
||||||
.expect("Could not build connection pool")
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
pub struct NewFeedSchema {
|
pub struct NewFeedSchema {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub url: String,
|
pub url: String,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
pub struct NewUserSchema {
|
pub struct NewUserSchema {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub email: String,
|
pub email: String,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
pub struct ReadItem {
|
pub struct ReadItem {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
pub struct UrlJson {
|
pub struct UrlJson {
|
||||||
pub url: String,
|
pub url: String,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
pub struct JsonUser {
|
pub struct JsonUser {
|
||||||
pub user_id: i32,
|
pub user_id: i32,
|
||||||
}
|
}
|
||||||
|
|
13
src/lib.rs
13
src/lib.rs
|
@ -1,13 +0,0 @@
|
||||||
extern crate diesel;
|
|
||||||
extern crate dotenv;
|
|
||||||
|
|
||||||
pub mod auth;
|
|
||||||
pub mod configuration;
|
|
||||||
pub mod database;
|
|
||||||
pub mod json_serialization;
|
|
||||||
pub mod models;
|
|
||||||
pub mod reader;
|
|
||||||
pub mod schema;
|
|
||||||
pub mod startup;
|
|
||||||
pub mod telemetry;
|
|
||||||
pub mod views;
|
|
75
src/main.rs
75
src/main.rs
|
@ -1,32 +1,61 @@
|
||||||
use std::net::TcpListener;
|
extern crate diesel;
|
||||||
|
extern crate dotenv;
|
||||||
|
|
||||||
use diesel::{
|
use actix_service::Service;
|
||||||
r2d2::{ConnectionManager, Pool},
|
use actix_web::{App, HttpResponse, HttpServer};
|
||||||
PgConnection,
|
use futures::future::{ok, Either};
|
||||||
};
|
mod auth;
|
||||||
use rss_reader::{
|
mod database;
|
||||||
configuration::get_configuration,
|
mod json_serialization;
|
||||||
database::get_connection_pool,
|
mod models;
|
||||||
startup::run,
|
mod reader;
|
||||||
telemetry::{get_subscriber, init_subscriber},
|
mod schema;
|
||||||
};
|
mod views;
|
||||||
use secrecy::ExposeSecret;
|
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
let subscriber = get_subscriber("zero2prod".into(), "info".into(), std::io::stdout);
|
env_logger::init();
|
||||||
init_subscriber(subscriber);
|
|
||||||
|
|
||||||
let configuration = get_configuration().expect("Failed to read configuration.");
|
HttpServer::new(|| {
|
||||||
|
let app = App::new()
|
||||||
|
.wrap_fn(|req, srv| {
|
||||||
|
let mut passed: bool;
|
||||||
|
let request_url: String = String::from(req.uri().path());
|
||||||
|
|
||||||
let connection_pool: Pool<ConnectionManager<PgConnection>> =
|
log::info!("Request Url: {}", request_url);
|
||||||
get_connection_pool(configuration.database.connection_string().expose_secret());
|
if req.path().contains("/article/") {
|
||||||
|
match auth::process_token(&req) {
|
||||||
|
Ok(_token) => passed = true,
|
||||||
|
Err(_message) => passed = false,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log::warn!("No auth check done.");
|
||||||
|
passed = true;
|
||||||
|
}
|
||||||
|
|
||||||
let address = format!(
|
if req.path().contains("user/create") {
|
||||||
"{}:{}",
|
passed = true;
|
||||||
configuration.application.host, configuration.application.port
|
}
|
||||||
);
|
|
||||||
|
|
||||||
let listener = TcpListener::bind(address)?;
|
log::info!("passed: {:?}", passed);
|
||||||
run(listener, connection_pool)?.await
|
|
||||||
|
let end_result = match passed {
|
||||||
|
true => Either::Left(srv.call(req)),
|
||||||
|
false => Either::Right(ok(req.into_response(
|
||||||
|
HttpResponse::Unauthorized().finish().map_into_boxed_body(),
|
||||||
|
))),
|
||||||
|
};
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let result = end_result.await?;
|
||||||
|
log::info!("{} -> {}", request_url, &result.status());
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.configure(views::views_factory);
|
||||||
|
app
|
||||||
|
})
|
||||||
|
.bind("127.0.0.1:8001")?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,6 @@ extern crate bcrypt;
|
||||||
|
|
||||||
use bcrypt::{hash, DEFAULT_COST};
|
use bcrypt::{hash, DEFAULT_COST};
|
||||||
use diesel::Insertable;
|
use diesel::Insertable;
|
||||||
use secrecy::{ExposeSecret, Secret};
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::schema::users;
|
use crate::schema::users;
|
||||||
|
@ -17,9 +16,8 @@ pub struct NewUser {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NewUser {
|
impl NewUser {
|
||||||
pub fn new(username: String, email: String, password: Secret<String>) -> NewUser {
|
pub fn new(username: String, email: String, password: String) -> NewUser {
|
||||||
let hashed_password: String =
|
let hashed_password: String = hash(password.as_str(), DEFAULT_COST).unwrap();
|
||||||
hash(password.expose_secret().as_str(), DEFAULT_COST).unwrap();
|
|
||||||
let uuid = Uuid::new_v4();
|
let uuid = Uuid::new_v4();
|
||||||
NewUser {
|
NewUser {
|
||||||
username,
|
username,
|
||||||
|
|
|
@ -1,23 +1,15 @@
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use diesel::{
|
use diesel::RunQueryDsl;
|
||||||
r2d2::{ConnectionManager, Pool},
|
|
||||||
PgConnection, RunQueryDsl,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
json_serialization::new_feed::NewFeedSchema, models::feed::new_feed::NewFeed, schema::feed,
|
database::establish_connection, json_serialization::new_feed::NewFeedSchema,
|
||||||
|
models::feed::new_feed::NewFeed, schema::feed,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::feeds;
|
use super::feeds;
|
||||||
|
|
||||||
#[tracing::instrument(name = "Add new feed", skip(pool))]
|
pub async fn add(new_feed: web::Json<NewFeedSchema>) -> HttpResponse {
|
||||||
pub async fn add(
|
let mut connection = establish_connection();
|
||||||
new_feed: web::Json<NewFeedSchema>,
|
|
||||||
pool: web::Data<Pool<ConnectionManager<PgConnection>>>,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let pool_arc = pool.get_ref().clone();
|
|
||||||
let mut connection = pool_arc.get().expect("Failed to get database connection");
|
|
||||||
|
|
||||||
let title: String = new_feed.title.clone();
|
let title: String = new_feed.title.clone();
|
||||||
let url: String = new_feed.url.clone();
|
let url: String = new_feed.url.clone();
|
||||||
let user_id: i32 = new_feed.user_id;
|
let user_id: i32 = new_feed.user_id;
|
||||||
|
@ -25,11 +17,13 @@ pub async fn add(
|
||||||
let result = feeds::get_feed(&url).await;
|
let result = feeds::get_feed(&url).await;
|
||||||
match result {
|
match result {
|
||||||
Ok(channel) => {
|
Ok(channel) => {
|
||||||
|
log::info!("valid channel");
|
||||||
if channel.items.is_empty() {
|
if channel.items.is_empty() {
|
||||||
return HttpResponse::ServiceUnavailable().await.unwrap();
|
return HttpResponse::ServiceUnavailable().await.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(e) => {
|
||||||
|
log::error!("{:?}", e);
|
||||||
return HttpResponse::NotFound().await.unwrap();
|
return HttpResponse::NotFound().await.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,6 +36,9 @@ pub async fn add(
|
||||||
|
|
||||||
match insert_result {
|
match insert_result {
|
||||||
Ok(_) => HttpResponse::Created().await.unwrap(),
|
Ok(_) => HttpResponse::Created().await.unwrap(),
|
||||||
Err(_) => HttpResponse::Conflict().await.unwrap(),
|
Err(e) => {
|
||||||
|
log::error!("{e}");
|
||||||
|
HttpResponse::Conflict().await.unwrap()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,9 +2,9 @@ use std::error::Error;
|
||||||
|
|
||||||
use rss::Channel;
|
use rss::Channel;
|
||||||
|
|
||||||
#[tracing::instrument(name = "Get Channel Feed")]
|
|
||||||
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
pub async fn get_feed(feed: &str) -> Result<Channel, Box<dyn Error>> {
|
||||||
let content = reqwest::get(feed).await?.bytes().await?;
|
let content = reqwest::get(feed).await?.bytes().await?;
|
||||||
let channel = Channel::read_from(&content[..])?;
|
let channel = Channel::read_from(&content[..])?;
|
||||||
|
log::debug!("{:?}", channel);
|
||||||
Ok(channel)
|
Ok(channel)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,30 +4,23 @@ use crate::models::feed_item::rss_feed_item::FeedItem;
|
||||||
use crate::reader::structs::feed::FeedAggregate;
|
use crate::reader::structs::feed::FeedAggregate;
|
||||||
use crate::schema::feed_item::{feed_id, id, read};
|
use crate::schema::feed_item::{feed_id, id, read};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
database::establish_connection,
|
||||||
json_serialization::articles::Articles,
|
json_serialization::articles::Articles,
|
||||||
schema::feed::{self, user_id},
|
schema::feed::{self, user_id},
|
||||||
schema::feed_item,
|
schema::feed_item,
|
||||||
};
|
};
|
||||||
use actix_web::{web, HttpRequest, Responder};
|
use actix_web::{web, HttpRequest, Responder};
|
||||||
use chrono::Local;
|
use chrono::Local;
|
||||||
use diesel::r2d2::{ConnectionManager, Pool};
|
use diesel::prelude::*;
|
||||||
use diesel::{prelude::*, r2d2};
|
|
||||||
|
|
||||||
use super::structs::article::Article;
|
use super::structs::article::Article;
|
||||||
|
|
||||||
#[tracing::instrument(name = "Get feeds", skip(pool))]
|
pub async fn get(path: web::Path<JsonUser>, req: HttpRequest) -> impl Responder {
|
||||||
pub async fn get(
|
|
||||||
path: web::Path<JsonUser>,
|
|
||||||
req: HttpRequest,
|
|
||||||
pool: web::Data<Pool<ConnectionManager<PgConnection>>>,
|
|
||||||
) -> impl Responder {
|
|
||||||
let request = req.clone();
|
let request = req.clone();
|
||||||
let req_user_id = path.user_id;
|
let req_user_id = path.user_id;
|
||||||
// Clone the Arc containing the connection pool
|
log::info!("Received user_id: {}", req_user_id);
|
||||||
let pool_arc = pool.get_ref().clone();
|
|
||||||
// Acquire a connection from the pool
|
|
||||||
let mut connection = pool_arc.get().expect("Failed to get database connection");
|
|
||||||
|
|
||||||
|
let mut connection: diesel::PgConnection = establish_connection();
|
||||||
let feeds: Vec<Feed> = feed::table
|
let feeds: Vec<Feed> = feed::table
|
||||||
.filter(user_id.eq(req_user_id))
|
.filter(user_id.eq(req_user_id))
|
||||||
.load::<Feed>(&mut connection)
|
.load::<Feed>(&mut connection)
|
||||||
|
@ -35,28 +28,19 @@ pub async fn get(
|
||||||
|
|
||||||
let mut feed_aggregates: Vec<FeedAggregate> = Vec::new();
|
let mut feed_aggregates: Vec<FeedAggregate> = Vec::new();
|
||||||
for feed in feeds {
|
for feed in feeds {
|
||||||
feed_aggregates.push(get_feed_aggregate(feed, &mut connection))
|
|
||||||
}
|
|
||||||
|
|
||||||
let articles: Articles = Articles {
|
|
||||||
feeds: feed_aggregates,
|
|
||||||
};
|
|
||||||
|
|
||||||
articles.respond_to(&request)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "Get feed aggregate", skip(connection))]
|
|
||||||
pub fn get_feed_aggregate(
|
|
||||||
feed: Feed,
|
|
||||||
connection: &mut r2d2::PooledConnection<ConnectionManager<PgConnection>>,
|
|
||||||
) -> FeedAggregate {
|
|
||||||
let existing_item: Vec<FeedItem> = feed_item::table
|
let existing_item: Vec<FeedItem> = feed_item::table
|
||||||
.filter(feed_id.eq(feed.id))
|
.filter(feed_id.eq(feed.id))
|
||||||
.filter(read.eq(false))
|
.filter(read.eq(false))
|
||||||
.order(id.asc())
|
.order(id.asc())
|
||||||
.load(connection)
|
.load(&mut connection)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
log::info!(
|
||||||
|
"Load {} feed items for feed: {}",
|
||||||
|
existing_item.len(),
|
||||||
|
feed.url
|
||||||
|
);
|
||||||
|
|
||||||
let article_list: Vec<Article> = existing_item
|
let article_list: Vec<Article> = existing_item
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|feed_item: FeedItem| {
|
.map(|feed_item: FeedItem| {
|
||||||
|
@ -74,8 +58,17 @@ pub fn get_feed_aggregate(
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
FeedAggregate {
|
log::info!("article list with {} items generated.", article_list.len());
|
||||||
|
|
||||||
|
feed_aggregates.push(FeedAggregate {
|
||||||
title: feed.title,
|
title: feed.title,
|
||||||
items: article_list,
|
items: article_list,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let articles: Articles = Articles {
|
||||||
|
feeds: feed_aggregates,
|
||||||
|
};
|
||||||
|
|
||||||
|
articles.respond_to(&request)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,36 +1,31 @@
|
||||||
use crate::schema::feed_item::{id, read};
|
use crate::schema::feed_item::{id, read};
|
||||||
use crate::{
|
use crate::{
|
||||||
json_serialization::read_feed_item::ReadItem, models::feed_item::rss_feed_item::FeedItem,
|
database::establish_connection, json_serialization::read_feed_item::ReadItem,
|
||||||
schema::feed_item,
|
models::feed_item::rss_feed_item::FeedItem, schema::feed_item,
|
||||||
};
|
};
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse, Responder};
|
||||||
use diesel::r2d2::{ConnectionManager, Pool};
|
use diesel::RunQueryDsl;
|
||||||
use diesel::{ExpressionMethods, QueryDsl};
|
use diesel::{ExpressionMethods, QueryDsl};
|
||||||
use diesel::{PgConnection, RunQueryDsl};
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "Mark as read", skip(pool))]
|
|
||||||
pub async fn mark_read(
|
|
||||||
_req: HttpRequest,
|
|
||||||
path: web::Path<ReadItem>,
|
|
||||||
pool: web::Data<Pool<ConnectionManager<PgConnection>>>,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let pool_arc = pool.get_ref().clone();
|
|
||||||
let mut connection = pool_arc.get().expect("Failed to get database connection");
|
|
||||||
|
|
||||||
|
pub async fn mark_read(_req: HttpRequest, path: web::Path<ReadItem>) -> impl Responder {
|
||||||
|
let mut connection = establish_connection();
|
||||||
|
log::info!("Id: {}", path.id);
|
||||||
let feed_items: Vec<FeedItem> = feed_item::table
|
let feed_items: Vec<FeedItem> = feed_item::table
|
||||||
.filter(id.eq(path.id))
|
.filter(id.eq(path.id))
|
||||||
.load::<FeedItem>(&mut connection)
|
.load::<FeedItem>(&mut connection)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
if feed_items.len() != 1 {
|
if feed_items.len() != 1 {
|
||||||
return HttpResponse::NotFound().await.unwrap();
|
return HttpResponse::NotFound();
|
||||||
}
|
}
|
||||||
|
|
||||||
let feed_item: &FeedItem = feed_items.first().unwrap();
|
let feed_item: &FeedItem = feed_items.first().unwrap();
|
||||||
|
|
||||||
let _result: Result<usize, diesel::result::Error> = diesel::update(feed_item)
|
let result: Result<usize, diesel::result::Error> = diesel::update(feed_item)
|
||||||
.set(read.eq(true))
|
.set(read.eq(true))
|
||||||
.execute(&mut connection);
|
.execute(&mut connection);
|
||||||
|
|
||||||
HttpResponse::Ok().await.unwrap()
|
log::info!("Mark as read: {:?}", result);
|
||||||
|
|
||||||
|
HttpResponse::Ok()
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,13 +4,15 @@ use crate::json_serialization::{readable::Readable, url::UrlJson};
|
||||||
|
|
||||||
use super::scraper::content::do_throttled_request;
|
use super::scraper::content::do_throttled_request;
|
||||||
|
|
||||||
#[tracing::instrument(name = "Read Feed")]
|
|
||||||
pub async fn read(_req: HttpRequest, data: web::Json<UrlJson>) -> impl Responder {
|
pub async fn read(_req: HttpRequest, data: web::Json<UrlJson>) -> impl Responder {
|
||||||
let result = do_throttled_request(&data.url);
|
let result = do_throttled_request(&data.url);
|
||||||
|
|
||||||
let content = match result.await {
|
let content = match result.await {
|
||||||
Ok(cont) => cont,
|
Ok(cont) => cont,
|
||||||
Err(e) => e.to_string(),
|
Err(e) => {
|
||||||
|
log::error!("Could not scrap url {}", data.url);
|
||||||
|
e.to_string()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Readable { content }
|
Readable { content }
|
||||||
|
|
|
@ -4,24 +4,26 @@ use crate::models::feed::rss_feed::Feed;
|
||||||
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
use crate::models::feed_item::new_feed_item::NewFeedItem;
|
||||||
use crate::models::feed_item::rss_feed_item::FeedItem;
|
use crate::models::feed_item::rss_feed_item::FeedItem;
|
||||||
use crate::schema::feed_item::{feed_id, title};
|
use crate::schema::feed_item::{feed_id, title};
|
||||||
use crate::schema::{
|
use crate::{
|
||||||
|
database::establish_connection,
|
||||||
|
schema::{
|
||||||
feed::{self, user_id},
|
feed::{self, user_id},
|
||||||
feed_item,
|
feed_item,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse, Responder};
|
||||||
use chrono::{DateTime, Local, NaiveDateTime};
|
use chrono::{DateTime, Local, NaiveDateTime};
|
||||||
use dateparser::parse;
|
use dateparser::parse;
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel::r2d2::{ConnectionManager, Pool};
|
|
||||||
use rss::Item;
|
use rss::Item;
|
||||||
use scraper::{Html, Selector};
|
use scraper::{Html, Selector};
|
||||||
|
|
||||||
#[tracing::instrument(name = "Get Date")]
|
|
||||||
fn get_date(date_str: &str) -> Result<NaiveDateTime, chrono::ParseError> {
|
fn get_date(date_str: &str) -> Result<NaiveDateTime, chrono::ParseError> {
|
||||||
// let format_string = "%a, %d %b %Y %H:%M:%S %z";
|
// let format_string = "%a, %d %b %Y %H:%M:%S %z";
|
||||||
let format_string = "%Y-%m-%dT%H:%M:%S%Z";
|
let format_string = "%Y-%m-%dT%H:%M:%S%Z";
|
||||||
|
|
||||||
let result = parse(date_str).unwrap();
|
let result = parse(date_str).unwrap();
|
||||||
|
log::info!("Date: {:?}", result);
|
||||||
|
|
||||||
match NaiveDateTime::parse_from_str(&result.to_string(), format_string) {
|
match NaiveDateTime::parse_from_str(&result.to_string(), format_string) {
|
||||||
Ok(r) => Ok(r),
|
Ok(r) => Ok(r),
|
||||||
|
@ -38,9 +40,9 @@ fn get_date(date_str: &str) -> Result<NaiveDateTime, chrono::ParseError> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(name = "Create Feed Item", skip(connection))]
|
|
||||||
fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) {
|
fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) {
|
||||||
let item_title = item.title.clone().unwrap();
|
let item_title = item.title.clone().unwrap();
|
||||||
|
log::info!("Create feed item: {}", item_title);
|
||||||
|
|
||||||
let base_content: &str = match item.content() {
|
let base_content: &str = match item.content() {
|
||||||
Some(c) => c,
|
Some(c) => c,
|
||||||
|
@ -74,11 +76,15 @@ fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
if existing_item.is_empty() {
|
if existing_item.is_empty() {
|
||||||
|
log::info!("{:?}", item.pub_date());
|
||||||
let mut time: NaiveDateTime = Local::now().naive_local();
|
let mut time: NaiveDateTime = Local::now().naive_local();
|
||||||
if item.pub_date().is_some() {
|
if item.pub_date().is_some() {
|
||||||
time = match get_date(item.pub_date().unwrap()) {
|
time = match get_date(item.pub_date().unwrap()) {
|
||||||
Ok(date) => date,
|
Ok(date) => date,
|
||||||
Err(_err) => time,
|
Err(err) => {
|
||||||
|
log::error!("could not unwrap pub date: {}", err);
|
||||||
|
time
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let new_feed_item = NewFeedItem::new(
|
let new_feed_item = NewFeedItem::new(
|
||||||
|
@ -88,20 +94,18 @@ fn create_feed_item(item: Item, feed: &Feed, connection: &mut PgConnection) {
|
||||||
item.link.unwrap(),
|
item.link.unwrap(),
|
||||||
Some(time),
|
Some(time),
|
||||||
);
|
);
|
||||||
let _insert_result = diesel::insert_into(feed_item::table)
|
let insert_result = diesel::insert_into(feed_item::table)
|
||||||
.values(&new_feed_item)
|
.values(&new_feed_item)
|
||||||
.execute(connection);
|
.execute(connection);
|
||||||
|
|
||||||
|
log::info!("Insert Result: {:?}", insert_result);
|
||||||
|
} else {
|
||||||
|
log::info!("Item {} already exists.", item_title);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(name = "sync", skip(pool))]
|
pub async fn sync(_req: HttpRequest, data: web::Json<JsonUser>) -> impl Responder {
|
||||||
pub async fn sync(
|
let mut connection: diesel::PgConnection = establish_connection();
|
||||||
_req: HttpRequest,
|
|
||||||
data: web::Json<JsonUser>,
|
|
||||||
pool: web::Data<Pool<ConnectionManager<PgConnection>>>,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let pool_arc = pool.get_ref().clone();
|
|
||||||
let mut connection = pool_arc.get().expect("Failed to get database connection");
|
|
||||||
|
|
||||||
let req_user_id: i32 = data.user_id;
|
let req_user_id: i32 = data.user_id;
|
||||||
|
|
||||||
|
@ -110,18 +114,22 @@ pub async fn sync(
|
||||||
.load::<Feed>(&mut connection)
|
.load::<Feed>(&mut connection)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
log::info!("Found {} feeds to sync.", feeds.len());
|
||||||
|
|
||||||
for feed in feeds {
|
for feed in feeds {
|
||||||
|
log::info!("Try to get url: {}", feed.url);
|
||||||
let result = feeds::get_feed(&feed.url).await;
|
let result = feeds::get_feed(&feed.url).await;
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(channel) => {
|
Ok(channel) => {
|
||||||
for item in channel.into_items() {
|
for item in channel.into_items() {
|
||||||
|
log::info!("{:?}", item);
|
||||||
create_feed_item(item, &feed, &mut connection);
|
create_feed_item(item, &feed, &mut connection);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_e) => return HttpResponse::InternalServerError().await.unwrap(),
|
Err(e) => log::error!("Could not get channel {}. Error: {}", feed.url, e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
HttpResponse::Ok().await.unwrap()
|
HttpResponse::Ok()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,56 +0,0 @@
|
||||||
use std::net::TcpListener;
|
|
||||||
|
|
||||||
use actix_service::Service;
|
|
||||||
use actix_web::web;
|
|
||||||
use actix_web::{dev::Server, App, HttpResponse, HttpServer};
|
|
||||||
use diesel::r2d2::{ConnectionManager, Pool};
|
|
||||||
use diesel::PgConnection;
|
|
||||||
use futures::future::{ok, Either};
|
|
||||||
|
|
||||||
use crate::auth;
|
|
||||||
use crate::views;
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "Run application", skip(connection, listener))]
|
|
||||||
pub fn run(
|
|
||||||
listener: TcpListener,
|
|
||||||
connection: Pool<ConnectionManager<PgConnection>>,
|
|
||||||
) -> Result<Server, std::io::Error> {
|
|
||||||
let wrapper = web::Data::new(connection);
|
|
||||||
let server = HttpServer::new(move || {
|
|
||||||
App::new()
|
|
||||||
.wrap_fn(|req, srv| {
|
|
||||||
let mut passed: bool;
|
|
||||||
|
|
||||||
if req.path().contains("/article/") {
|
|
||||||
match auth::process_token(&req) {
|
|
||||||
Ok(_token) => passed = true,
|
|
||||||
Err(_message) => passed = false,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
passed = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.path().contains("user/create") {
|
|
||||||
passed = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let end_result = match passed {
|
|
||||||
true => Either::Left(srv.call(req)),
|
|
||||||
false => Either::Right(ok(req.into_response(
|
|
||||||
HttpResponse::Unauthorized().finish().map_into_boxed_body(),
|
|
||||||
))),
|
|
||||||
};
|
|
||||||
|
|
||||||
async move {
|
|
||||||
let result = end_result.await?;
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.app_data(wrapper.clone())
|
|
||||||
.configure(views::views_factory)
|
|
||||||
})
|
|
||||||
.listen(listener)?
|
|
||||||
.run();
|
|
||||||
|
|
||||||
Ok(server)
|
|
||||||
}
|
|
|
@ -1,27 +0,0 @@
|
||||||
use tracing::{dispatcher::set_global_default, Subscriber};
|
|
||||||
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
|
||||||
use tracing_log::LogTracer;
|
|
||||||
use tracing_subscriber::{fmt::MakeWriter, layer::SubscriberExt, EnvFilter, Registry};
|
|
||||||
|
|
||||||
pub fn get_subscriber<Sink>(
|
|
||||||
name: String,
|
|
||||||
env_filter: String,
|
|
||||||
sink: Sink,
|
|
||||||
) -> impl Subscriber + Send + Sync
|
|
||||||
where
|
|
||||||
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
|
|
||||||
{
|
|
||||||
let env_filter =
|
|
||||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
|
|
||||||
let formatting_layer = BunyanFormattingLayer::new(name, sink);
|
|
||||||
|
|
||||||
Registry::default()
|
|
||||||
.with(env_filter)
|
|
||||||
.with(JsonStorageLayer)
|
|
||||||
.with(formatting_layer)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
|
|
||||||
LogTracer::init().expect("Failed to set logger.");
|
|
||||||
set_global_default(subscriber.into()).expect("Failed to set subscriber.");
|
|
||||||
}
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use crate::database::establish_connection;
|
||||||
use crate::diesel;
|
use crate::diesel;
|
||||||
use crate::json_serialization::login::Login;
|
use crate::json_serialization::login::Login;
|
||||||
use crate::models::user::rss_user::User;
|
use crate::models::user::rss_user::User;
|
||||||
|
@ -5,18 +6,13 @@ use crate::schema::users;
|
||||||
use crate::{auth::jwt::JwtToken, schema::users::username};
|
use crate::{auth::jwt::JwtToken, schema::users::username};
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel::r2d2::{ConnectionManager, Pool};
|
|
||||||
|
|
||||||
pub async fn login(
|
|
||||||
credentials: web::Json<Login>,
|
|
||||||
pool: web::Data<Pool<ConnectionManager<PgConnection>>>,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let pool_arc = pool.get_ref().clone();
|
|
||||||
let mut connection = pool_arc.get().expect("Failed to get database connection");
|
|
||||||
|
|
||||||
|
pub async fn login(credentials: web::Json<Login>) -> HttpResponse {
|
||||||
let username_cred: String = credentials.username.clone();
|
let username_cred: String = credentials.username.clone();
|
||||||
let password: String = credentials.password.clone();
|
let password: String = credentials.password.clone();
|
||||||
|
|
||||||
|
let mut connection = establish_connection();
|
||||||
|
|
||||||
let users: Vec<User> = users::table
|
let users: Vec<User> = users::table
|
||||||
.filter(username.eq(username_cred.as_str()))
|
.filter(username.eq(username_cred.as_str()))
|
||||||
.load::<User>(&mut connection)
|
.load::<User>(&mut connection)
|
||||||
|
@ -25,6 +21,10 @@ pub async fn login(
|
||||||
if users.is_empty() {
|
if users.is_empty() {
|
||||||
return HttpResponse::NotFound().await.unwrap();
|
return HttpResponse::NotFound().await.unwrap();
|
||||||
} else if users.len() > 1 {
|
} else if users.len() > 1 {
|
||||||
|
log::error!(
|
||||||
|
"multiple user have the usernam: {}",
|
||||||
|
credentials.username.clone()
|
||||||
|
);
|
||||||
return HttpResponse::Conflict().await.unwrap();
|
return HttpResponse::Conflict().await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@ pub async fn login(
|
||||||
|
|
||||||
match user.clone().verify(password) {
|
match user.clone().verify(password) {
|
||||||
true => {
|
true => {
|
||||||
|
log::info!("verified password successfully for user {}", user.id);
|
||||||
let token: String = JwtToken::encode(user.clone().id);
|
let token: String = JwtToken::encode(user.clone().id);
|
||||||
HttpResponse::Ok()
|
HttpResponse::Ok()
|
||||||
.insert_header(("token", token))
|
.insert_header(("token", token))
|
||||||
|
|
|
@ -1,25 +1,16 @@
|
||||||
|
use crate::database::establish_connection;
|
||||||
use crate::diesel;
|
use crate::diesel;
|
||||||
use crate::json_serialization::new_user::NewUserSchema;
|
use crate::json_serialization::new_user::NewUserSchema;
|
||||||
use crate::models::user::new_user::NewUser;
|
use crate::models::user::new_user::NewUser;
|
||||||
use crate::schema::users;
|
use crate::schema::users;
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use diesel::{
|
use diesel::prelude::*;
|
||||||
prelude::*,
|
|
||||||
r2d2::{ConnectionManager, Pool},
|
|
||||||
};
|
|
||||||
use secrecy::Secret;
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "Create new User", skip(pool))]
|
|
||||||
pub async fn create(
|
|
||||||
new_user: web::Json<NewUserSchema>,
|
|
||||||
pool: web::Data<Pool<ConnectionManager<PgConnection>>>,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let pool_arc = pool.get_ref().clone();
|
|
||||||
let mut connection = pool_arc.get().expect("Failed to get database connection");
|
|
||||||
|
|
||||||
|
pub async fn create(new_user: web::Json<NewUserSchema>) -> HttpResponse {
|
||||||
|
let mut connection = establish_connection();
|
||||||
let name: String = new_user.name.clone();
|
let name: String = new_user.name.clone();
|
||||||
let email: String = new_user.email.clone();
|
let email: String = new_user.email.clone();
|
||||||
let new_password: Secret<String> = Secret::new(new_user.password.clone());
|
let new_password: String = new_user.password.clone();
|
||||||
|
|
||||||
let new_user = NewUser::new(name, email, new_password);
|
let new_user = NewUser::new(name, email, new_password);
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
VITE_API_BASE_URL=http://localhost:8001
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
VITE_API_BASE_URL=http://rust-app:8001
|
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
FROM node:lts-alpine
|
|
||||||
|
|
||||||
# install simple http server for serving static content
|
|
||||||
RUN npm install -g http-server
|
|
||||||
|
|
||||||
# make the 'app' folder the current working directory
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# copy both 'package.json' and 'package-lock.json' (if available)
|
|
||||||
COPY package*.json ./
|
|
||||||
|
|
||||||
# install project dependencies
|
|
||||||
RUN npm install
|
|
||||||
|
|
||||||
# copy project files and folders to the current working directory (i.e. 'app' folder)
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# build app for production with minification
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
EXPOSE 8080
|
|
||||||
CMD [ "http-server", "dist" ]
|
|
|
@ -18,7 +18,6 @@
|
||||||
"@rushstack/eslint-patch": "^1.3.2",
|
"@rushstack/eslint-patch": "^1.3.2",
|
||||||
"@vitejs/plugin-vue": "^4.3.1",
|
"@vitejs/plugin-vue": "^4.3.1",
|
||||||
"@vue/eslint-config-prettier": "^8.0.0",
|
"@vue/eslint-config-prettier": "^8.0.0",
|
||||||
"dotenv": "^16.4.5",
|
|
||||||
"eslint": "^8.46.0",
|
"eslint": "^8.46.0",
|
||||||
"eslint-plugin-vue": "^9.16.1",
|
"eslint-plugin-vue": "^9.16.1",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.0.0",
|
||||||
|
@ -1026,18 +1025,6 @@
|
||||||
"node": ">=6.0.0"
|
"node": ">=6.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/dotenv": {
|
|
||||||
"version": "16.4.5",
|
|
||||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz",
|
|
||||||
"integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==",
|
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://dotenvx.com"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/esbuild": {
|
"node_modules/esbuild": {
|
||||||
"version": "0.18.20",
|
"version": "0.18.20",
|
||||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz",
|
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz",
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
"@rushstack/eslint-patch": "^1.3.2",
|
"@rushstack/eslint-patch": "^1.3.2",
|
||||||
"@vitejs/plugin-vue": "^4.3.1",
|
"@vitejs/plugin-vue": "^4.3.1",
|
||||||
"@vue/eslint-config-prettier": "^8.0.0",
|
"@vue/eslint-config-prettier": "^8.0.0",
|
||||||
"dotenv": "^16.4.5",
|
|
||||||
"eslint": "^8.46.0",
|
"eslint": "^8.46.0",
|
||||||
"eslint-plugin-vue": "^9.16.1",
|
"eslint-plugin-vue": "^9.16.1",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.0.0",
|
||||||
|
|
|
@ -14,7 +14,6 @@ a,
|
||||||
color: hsla(160, 100%, 37%, 1);
|
color: hsla(160, 100%, 37%, 1);
|
||||||
transition: 0.4s;
|
transition: 0.4s;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message {
|
.message {
|
||||||
background-color: #3498db;
|
background-color: #3498db;
|
||||||
color: white;
|
color: white;
|
||||||
|
@ -26,7 +25,6 @@ a,
|
||||||
transform: translateX(-50%);
|
transform: translateX(-50%);
|
||||||
z-index: 9999;
|
z-index: 9999;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (hover: hover) {
|
@media (hover: hover) {
|
||||||
a:hover {
|
a:hover {
|
||||||
background-color: hsla(160, 100%, 37%, 0.2);
|
background-color: hsla(160, 100%, 37%, 0.2);
|
||||||
|
@ -45,68 +43,18 @@ a,
|
||||||
font-family: Georgia, 'Times New Roman', Times, serif;
|
font-family: Georgia, 'Times New Roman', Times, serif;
|
||||||
font-size: 20px;
|
font-size: 20px;
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: left;
|
|
||||||
text-align: left;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.feed-content p {
|
.feed-content p {
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
}
|
}
|
||||||
|
|
||||||
.feed-content h2,
|
.feed-content h3 {
|
||||||
h3,
|
|
||||||
h4,
|
|
||||||
h5,
|
|
||||||
h6 {
|
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
font-size: 21px;
|
font-size: 21px;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
.feed-content img {
|
|
||||||
max-width: 100%;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
/* Adjust spacing between image and text */
|
|
||||||
}
|
|
||||||
|
|
||||||
h3 {
|
h3 {
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
.form-group {
|
|
||||||
margin-bottom: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
input[type="text"],
|
|
||||||
input[type="password"] {
|
|
||||||
/* width: 100%; */
|
|
||||||
padding: 8px;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
border-radius: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
button {
|
|
||||||
background-color: #4CAF50;
|
|
||||||
color: white;
|
|
||||||
padding: 10px 20px;
|
|
||||||
border: none;
|
|
||||||
border-radius: 4px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
button:hover {
|
|
||||||
background-color: #45a049;
|
|
||||||
}
|
|
||||||
|
|
||||||
.error {
|
|
||||||
color: red;
|
|
||||||
}
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ async function login() {
|
||||||
localStorage.setItem("user-id", user_id)
|
localStorage.setItem("user-id", user_id)
|
||||||
sessionStorage.setItem("user-id", user_id)
|
sessionStorage.setItem("user-id", user_id)
|
||||||
sessionStorage.setItem("user-token", token)
|
sessionStorage.setItem("user-token", token)
|
||||||
router.push({ name: 'feeds' })
|
router.push({ name: 'about' })
|
||||||
}
|
}
|
||||||
// Handle success
|
// Handle success
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -67,10 +67,9 @@ const fetchData = async () => {
|
||||||
'user-token': localStorage.getItem("user-token")
|
'user-token': localStorage.getItem("user-token")
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const sortedItems = response.data.feeds.flatMap(feed => feed.items)
|
response.data.feeds.forEach(feed => {
|
||||||
.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));
|
feeds.value.push(...feed.items);
|
||||||
|
});
|
||||||
feeds.value.push(...sortedItems);
|
|
||||||
await nextTick();
|
await nextTick();
|
||||||
setupIntersectionObserver();
|
setupIntersectionObserver();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -2,14 +2,6 @@ import { fileURLToPath, URL } from 'node:url'
|
||||||
|
|
||||||
import { defineConfig } from 'vite'
|
import { defineConfig } from 'vite'
|
||||||
import vue from '@vitejs/plugin-vue'
|
import vue from '@vitejs/plugin-vue'
|
||||||
import dotenv from 'dotenv';
|
|
||||||
|
|
||||||
console.log('process.env:', process.env);
|
|
||||||
console.log('TEst:', process.env.VITE_API_BASE_URL);
|
|
||||||
// Load environment variables based on the environment mode
|
|
||||||
dotenv.config({
|
|
||||||
path: `.env.${process.env.NODE_ENV || 'development'}`
|
|
||||||
});
|
|
||||||
|
|
||||||
// https://vitejs.dev/config/
|
// https://vitejs.dev/config/
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
|
@ -25,31 +17,31 @@ export default defineConfig({
|
||||||
server: {
|
server: {
|
||||||
proxy: {
|
proxy: {
|
||||||
'/login/rss': {
|
'/login/rss': {
|
||||||
target: `${process.env.VITE_API_BASE_URL}/api/v1/auth/login`,
|
target: 'http://localhost:8001/api/v1/auth/login',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) => path.replace(/^\/login\/rss/, ''),
|
rewrite: (path) => path.replace(/^\/login\/rss/, ''),
|
||||||
},
|
},
|
||||||
'/feeds/get': {
|
'/feeds/get': {
|
||||||
target: `${process.env.VITE_API_BASE_URL}/api/v1/article/get`,
|
target: 'http://localhost:8001/api/v1/article/get',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) => path.replace(/^\/feeds\/get/, ''),
|
rewrite: (path) => path.replace(/^\/feeds\/get/, ''),
|
||||||
},
|
},
|
||||||
'/feeds/sync': {
|
'/feeds/sync': {
|
||||||
target: `${process.env.VITE_API_BASE_URL}/api/v1/article/sync`,
|
target: 'http://localhost:8001/api/v1/article/sync',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) => path.replace(/^\/feeds\/sync/, ''),
|
rewrite: (path) => path.replace(/^\/feeds\/sync/, ''),
|
||||||
},
|
},
|
||||||
'/feeds/read': {
|
'/feeds/read': {
|
||||||
target: `${process.env.VITE_API_BASE_URL}/api/v1/article/read`,
|
target: 'http://localhost:8001/api/v1/article/read',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) => path.replace(/^\/feeds\/read/, ''),
|
rewrite: (path) => path.replace(/^\/feeds\/read/, ''),
|
||||||
},
|
},
|
||||||
'/feeds/add': {
|
'/feeds/add': {
|
||||||
target: `${process.env.VITE_API_BASE_URL}/api/v1/article/add`,
|
target: 'http://localhost:8001/api/v1/article/add',
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
rewrite: (path) => path.replace(/^\/feeds\/add/, ''),
|
rewrite: (path) => path.replace(/^\/feeds\/add/, ''),
|
||||||
|
|
Loading…
Reference in New Issue