Merge pull request 'Added caching system' (#1) from caching into master

Reviewed-on: #1
This commit is contained in:
Wyatt J. Miller 2025-03-16 21:16:52 -05:00
commit 2487a0f421
12 changed files with 662 additions and 65 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
debug.log
.vscode/
.idea/
dump.rdb

View File

@ -1,3 +1,10 @@
# Backend
TODO
## What is this?
This is just an orginizational way of keeping the backend services together (so I don't lose my mind).
## Projects
- [`public`](./public/README.md) - a RESTful API service
- [`task`](./task/README.md) - a task scheduler service

View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
version = 4
[[package]]
name = "addr2line"
@ -59,6 +59,12 @@ dependencies = [
"libc",
]
[[package]]
name = "arc-swap"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "async-trait"
version = "0.1.82"
@ -209,6 +215,16 @@ version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
[[package]]
name = "bytes-utils"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35"
dependencies = [
"bytes",
"either",
]
[[package]]
name = "cc"
version = "1.1.21"
@ -253,6 +269,12 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "cookie-factory"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "396de984970346b0d9e93d1415082923c679e5ae5c3ee3dcbd104f5610af126b"
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
@ -283,6 +305,12 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crc16"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "338089f42c427b86394a5ee60ff321da23a5c89c9d89514c829687b26359fcff"
[[package]]
name = "crossbeam-queue"
version = "0.3.11"
@ -403,6 +431,15 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]]
name = "flume"
version = "0.11.0"
@ -439,6 +476,43 @@ dependencies = [
"thiserror",
]
[[package]]
name = "fred"
version = "10.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a7b2fd0f08b23315c13b6156f971aeedb6f75fb16a29ac1872d2eabccc1490e"
dependencies = [
"arc-swap",
"async-trait",
"bytes",
"bytes-utils",
"float-cmp",
"fred-macros",
"futures",
"log",
"parking_lot",
"rand",
"redis-protocol",
"semver",
"socket2",
"tokio",
"tokio-stream",
"tokio-util",
"url",
"urlencoding",
]
[[package]]
name = "fred-macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1458c6e22d36d61507034d5afecc64f105c1d39712b7ac6ec3b352c423f715cc"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "futures"
version = "0.3.30"
@ -1180,6 +1254,7 @@ dependencies = [
"axum",
"chrono",
"dotenvy",
"fred",
"serde",
"serde_json",
"sqlx",
@ -1254,6 +1329,20 @@ dependencies = [
"bitflags",
]
[[package]]
name = "redis-protocol"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cdba59219406899220fc4cdfd17a95191ba9c9afb719b5fa5a083d63109a9f1"
dependencies = [
"bytes",
"bytes-utils",
"cookie-factory",
"crc16",
"log",
"nom",
]
[[package]]
name = "redox_syscall"
version = "0.5.4"
@ -1420,6 +1509,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
[[package]]
name = "serde"
version = "1.0.210"
@ -2142,6 +2237,12 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "valuable"
version = "0.1.0"

View File

@ -24,3 +24,4 @@ serde = "1.0.210"
serde_json = "1.0.128"
chrono = "0.4.38"
xml = "0.8.20"
fred = "10.1.0"

20
backend/public/README.md Normal file
View File

@ -0,0 +1,20 @@
# Backend API
also known as `public`
## What is this?
This is a RESTful API service. Most of the data retrival, requesting, and processing happens here.
## Things you should know
### Environment variables
`public` uses a `.env` file at the root of the project. The file takes standard environment variables (like enviroment variables you would put into a `.bashrc` or ad-hoc into your shell).
For `public` to work properly, please make sure to first create the `.env` file, then fill out the following environment variables:
- `RUST_ENV` - needed for letting the service that we are working in either `development` or `production`
- `DATABASE_URL` - needed for connecting to Postgres
- `REDIS_URL` - needed for connecting to the cache (Redis or Valkey)
- `BASE_URI_WEB` - needed for connecting to the frontend user interface of the system to this service

View File

@ -1,11 +1,13 @@
use axum::{http::Method, Router};
use axum::Router;
use config::config;
use sqlx::{postgres::PgPoolOptions, PgPool};
use fred::prelude::*;
use sqlx::postgres::PgPoolOptions;
use std::fs::File;
use std::sync::Arc;
use std::time::Duration;
use tokio::net::TcpListener;
use tokio::signal;
use tokio::sync::Mutex;
use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
use tower_http::{
cors::{Any, CorsLayer},
@ -16,12 +18,9 @@ use tracing_subscriber::{filter, layer::SubscriberExt, prelude::*, util::Subscri
mod config;
mod datasources;
mod routes;
mod state;
mod utils;
pub struct AppState {
db: PgPool,
}
#[tokio::main]
async fn main() {
// setting up configuration
@ -87,6 +86,11 @@ async fn main() {
// grabbing the database url from our env variables
let db_connection_str = std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://postgres:password@localhost".to_string());
let redis_url = match std::env::var("REDIS_URL").unwrap().as_str() {
// TODO: fix the unwrap ^
"" => "redis://localhost:6379".to_string(),
x => x.to_string(),
};
// set up connection pool
let pool = PgPoolOptions::new()
@ -96,7 +100,24 @@ async fn main() {
.await
.expect("Failed to connect to database");
let app_state = AppState { db: pool.clone() };
let pool_size = 8;
let config = Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
let redis_pool = Builder::from_config(config)
.with_performance_config(|config| {
config.default_command_timeout = Duration::from_secs(60);
})
.set_policy(ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
.build_pool(pool_size)
.expect("Failed to create cache pool");
if std::env::var("REDIS_URL").unwrap() != "" {
// TODO: fix the unwrap ^
redis_pool.init().await.expect("Failed to connect to cache");
let _ = redis_pool.flushall::<i32>(false).await;
}
let app_state = Arc::new(Mutex::new(state::AppInternalState::new(pool, redis_pool)));
// build our application with some routes
let app = Router::new()

View File

@ -5,14 +5,14 @@ use axum::{
routing::get,
Json,
};
use fred::types::Expiration;
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Postgres};
use crate::{datasources::authors::AuthorsDatasource, AppState};
use crate::{datasources::authors::AuthorsDatasource, state::AppState};
use super::comments::Pagination;
#[derive(Serialize)]
#[derive(Deserialize, Serialize, Clone)]
pub struct Author {
pub author_id: i32,
pub first_name: String,
@ -21,7 +21,7 @@ pub struct Author {
pub image: Option<String>,
}
#[derive(Deserialize)]
#[derive(Deserialize, Serialize, Clone)]
pub struct AuthorGetOneParams {
pub id: i32,
}
@ -33,34 +33,104 @@ impl AuthorsRoute {
.route("/", get(AuthorsRoute::get_all))
.route("/:id", get(AuthorsRoute::get_one))
.route("/:id/posts", get(AuthorsRoute::get_authors_posts))
.with_state(app_state.db.clone())
.with_state(app_state.clone())
}
async fn get_all(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Json(pagination): Json<Pagination>,
) -> impl IntoResponse {
match AuthorsDatasource::get_all(&pool, pagination).await {
Ok(a) => Ok(Json(a)),
let mut state = app_state.lock().await;
let cached: Option<Vec<Author>> = state
.cache
.get(String::from("authors:all"))
.await
.unwrap_or(None);
if let Some(authors) = cached {
tracing::info!("grabbing all authors from cache");
return Ok(Json(authors));
}
match AuthorsDatasource::get_all(&state.database, pagination).await {
Ok(authors) => {
tracing::info!("grabbing all authors from the database");
if let a = &authors {
let author_cloned = a.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("authors:all"),
&author_cloned,
Some(Expiration::EX(5)),
None,
false,
)
.await;
});
}
Ok(Json(authors))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
async fn get_one(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Path(params): Path<AuthorGetOneParams>,
) -> impl IntoResponse {
match AuthorsDatasource::get_one(&pool, params.id).await {
Ok(a) => Ok(Json(a)),
let mut state = app_state.lock().await;
let cached: Option<Author> = state
.cache
.get(format!("authors:{}", params.id))
.await
.unwrap_or(None);
if let Some(author) = cached {
tracing::info!("grabbing one author from cache");
return Ok(Json(author));
}
match AuthorsDatasource::get_one(&state.database, params.id).await {
Ok(author) => {
tracing::info!("grabbing all authors from the database");
if let a = &author {
let author_cloned = a.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
format!("authors:{}", author_cloned.author_id),
&author_cloned,
Some(Expiration::EX(5)),
None,
false,
)
.await;
});
}
Ok(Json(author))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
async fn get_authors_posts(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Path(params): Path<AuthorGetOneParams>,
) -> impl IntoResponse {
match AuthorsDatasource::get_authors_posts(&pool, params.id).await {
let state = app_state.lock().await;
match AuthorsDatasource::get_authors_posts(&state.database, params.id).await {
Ok(p) => Ok(Json(p)),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}

View File

@ -1,5 +1,5 @@
use super::posts::serialize_datetime;
use crate::{datasources::comments::CommentsDatasource, AppState};
use super::posts::{deserialize_datetime, serialize_datetime};
use crate::{datasources::comments::CommentsDatasource, state::AppState};
use axum::{
extract::{Path, State},
http::StatusCode,
@ -8,33 +8,34 @@ use axum::{
Json,
};
use chrono::Utc;
use fred::types::{Expiration, SetOptions};
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Postgres};
#[derive(Deserialize, Debug)]
#[derive(Deserialize, Serialize, Debug)]
pub struct CommentInputPayload {
pub name: String,
pub body: String,
pub post_id: i32,
}
#[derive(Deserialize)]
#[derive(Deserialize, Serialize)]
pub struct CommentPathParams {
id: i32,
}
#[derive(Deserialize)]
#[derive(Deserialize, Serialize)]
pub struct Pagination {
pub page_number: i64,
pub page_size: i64,
}
#[derive(sqlx::FromRow, Serialize, Debug)]
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
pub struct Comment {
pub comment_id: i32,
pub name: String,
pub body: String,
#[serde(serialize_with = "serialize_datetime")]
#[serde(deserialize_with = "deserialize_datetime")]
pub created_at: Option<chrono::DateTime<Utc>>,
}
@ -46,34 +47,61 @@ impl CommentsRoute {
.route("/post/:id", get(CommentsRoute::get_post_comments))
.route("/add", post(CommentsRoute::insert_comment))
.route("/index", get(CommentsRoute::get_comments_index))
.with_state(app_state.db.clone())
.with_state(app_state.clone())
}
async fn get_post_comments(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Path(params): Path<CommentPathParams>,
) -> impl IntoResponse {
match CommentsDatasource::get_posts_comments(&pool, params.id).await {
let state = app_state.lock().await;
match CommentsDatasource::get_posts_comments(&state.database, params.id).await {
Ok(c) => Ok(Json(c)),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
//
async fn insert_comment(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Json(input): Json<CommentInputPayload>,
) -> impl IntoResponse {
match CommentsDatasource::insert_comment(&pool, input).await {
Ok(c) => Ok((StatusCode::CREATED, Json(c))),
let state = app_state.lock().await;
match CommentsDatasource::insert_comment(&state.database, input).await {
Ok(c) => {
if let co = &c {
let co = c.clone();
let state = app_state.clone();
tokio::spawn(async move {
tracing::info!("update cache if key already exists!");
let mut s = state.lock().await;
let _ = s
.cache
.set(
format!("comments:{}", co.comment_id),
&co,
Some(Expiration::EX(60 * 15)),
Some(SetOptions::XX),
false,
)
.await;
});
}
Ok((StatusCode::CREATED, Json(c)))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
async fn get_comments_index(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Json(pagination): Json<Pagination>,
) -> impl IntoResponse {
match CommentsDatasource::get_index_comments(&pool, pagination).await {
let state = app_state.lock().await;
match CommentsDatasource::get_index_comments(&state.database, pagination).await {
Ok(c) => Ok(Json(c)),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}

View File

@ -1,7 +1,8 @@
use std::collections::HashMap;
use std::fmt;
use crate::utils::rss;
use crate::{datasources::posts::PostsDatasource, AppState};
use crate::{datasources::posts::PostsDatasource, state::AppState};
use axum::http::{HeaderMap, HeaderValue};
use axum::{
extract::{Path, State},
@ -11,10 +12,10 @@ use axum::{
Json, Router,
};
use chrono::Utc;
use serde::{Deserialize, Serialize, Serializer};
use sqlx::{Pool, Postgres};
use fred::types::Expiration;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[derive(sqlx::FromRow, Serialize, Debug, Clone)]
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
pub struct Post {
pub post_id: i32,
pub author_id: Option<i32>,
@ -23,10 +24,11 @@ pub struct Post {
pub title: String,
pub body: String,
#[serde(serialize_with = "serialize_datetime")]
#[serde(deserialize_with = "deserialize_datetime")]
pub created_at: Option<chrono::DateTime<Utc>>,
}
#[derive(sqlx::FromRow, Serialize, Debug)]
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
pub struct PostFeaturedVariant {
pub post_id: i32,
pub author_id: Option<i32>,
@ -35,6 +37,7 @@ pub struct PostFeaturedVariant {
pub title: String,
pub body: String,
#[serde(serialize_with = "serialize_datetime")]
#[serde(deserialize_with = "deserialize_datetime")]
pub created_at: Option<chrono::DateTime<Utc>>,
pub is_featured: Option<bool>,
}
@ -56,63 +59,275 @@ impl PostsRoute {
.route("/hot", get(PostsRoute::get_hot_posts))
.route("/featured", get(PostsRoute::get_featured_posts))
.route("/rss", get(PostsRoute::get_rss_posts))
.with_state(app_state.db.clone())
.with_state(app_state.clone())
}
// get all posts
async fn get_all(State(pool): State<Pool<Postgres>>) -> impl IntoResponse {
match PostsDatasource::get_all(&pool).await {
Ok(posts) => Ok(Json(posts)),
async fn get_all(State(app_state): State<AppState>) -> impl IntoResponse {
let mut state = app_state.lock().await;
let cached: Option<Vec<Post>> = state
.cache
.get(String::from("posts:all"))
.await
.unwrap_or(None);
if let Some(posts) = cached {
tracing::info!("grabbing all posts from cache");
return Ok(Json(posts));
};
match PostsDatasource::get_all(&state.database).await {
Ok(posts) => {
tracing::info!("grabbing all posts from database");
if let p = &posts {
let posts = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("posts:all"),
&posts,
Some(Expiration::EX(10)),
None,
false,
)
.await;
});
};
Ok(Json(posts))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
// get one post
async fn get_one(
State(pool): State<Pool<Postgres>>,
State(app_state): State<AppState>,
Path(params): Path<PostGetOneParams>,
) -> impl IntoResponse {
match PostsDatasource::get_one(&pool, params.id).await {
Ok(post) => Ok(Json(post)),
let mut state = app_state.lock().await;
let cached: Option<PostFeaturedVariant> = state
.cache
.get(format!("posts:{}", params.id))
.await
.unwrap_or(None);
if let Some(post) = cached {
tracing::info!("grabbing one post from cache");
return Ok(Json(post));
};
match PostsDatasource::get_one(&state.database, params.id).await {
Ok(post) => {
tracing::info!("grabbing one post from database");
if let p = &post {
let post = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
format!("posts:{}", params.id),
&post,
Some(Expiration::EX(10)),
None,
false,
)
.await;
});
};
Ok(Json(post))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
// get recent posts
async fn get_recent_posts(State(pool): State<Pool<Postgres>>) -> impl IntoResponse {
match PostsDatasource::get_recent(&pool).await {
Ok(posts) => Ok(Json(posts)),
async fn get_recent_posts(State(app_state): State<AppState>) -> impl IntoResponse {
let mut state = app_state.lock().await;
let cached: Option<Vec<Post>> = state
.cache
.get(String::from("posts:recent"))
.await
.unwrap_or(None);
if let Some(posts) = cached {
tracing::info!("grabbing recent posts from cache");
return Ok(Json(posts));
};
match PostsDatasource::get_recent(&state.database).await {
Ok(posts) => {
tracing::info!("grabbing recent posts from database");
if let p = &posts {
let posts = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("posts:recent"),
&posts,
Some(Expiration::EX(5)),
None,
false,
)
.await;
});
};
Ok(Json(posts))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
// get the top three posts with the highest view count
async fn get_popular_posts(State(pool): State<Pool<Postgres>>) -> impl IntoResponse {
match PostsDatasource::get_popular(&pool).await {
Ok(posts) => Ok(Json(posts)),
async fn get_popular_posts(State(app_state): State<AppState>) -> impl IntoResponse {
let mut state = app_state.lock().await;
let cached: Option<Vec<Post>> = state
.cache
.get(String::from("posts:popular"))
.await
.unwrap_or(None);
if let Some(posts) = cached {
tracing::info!("grabbing popular posts from cache");
return Ok(Json(posts));
};
match PostsDatasource::get_popular(&state.database).await {
Ok(posts) => {
tracing::info!("grabbing popular posts from database");
if let p = &posts {
let posts = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("posts:popular"),
&posts,
Some(Expiration::EX(5)),
None,
false,
)
.await;
});
};
Ok(Json(posts))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
// get the top three posts with the most comments
async fn get_hot_posts(State(pool): State<Pool<Postgres>>) -> impl IntoResponse {
match PostsDatasource::get_hot(&pool).await {
Ok(posts) => Ok(Json(posts)),
async fn get_hot_posts(State(app_state): State<AppState>) -> impl IntoResponse {
let mut state = app_state.lock().await;
let cached: Option<Vec<Post>> = state
.cache
.get(String::from("posts:hot"))
.await
.unwrap_or(None);
if let Some(posts) = cached {
tracing::info!("grabbing hot posts from cache");
return Ok(Json(posts));
};
match PostsDatasource::get_hot(&state.database).await {
Ok(posts) => {
tracing::info!("grabbing hot posts from database");
if let p = &posts {
let posts = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("posts:hot"),
&posts,
Some(Expiration::EX(5)),
None,
false,
)
.await;
});
};
Ok(Json(posts))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
// get posts that are featured
async fn get_featured_posts(State(pool): State<Pool<Postgres>>) -> impl IntoResponse {
match PostsDatasource::get_featured(&pool).await {
Ok(posts) => Ok(Json(posts)),
async fn get_featured_posts(State(app_state): State<AppState>) -> impl IntoResponse {
let mut state = app_state.lock().await;
let cached: Option<Vec<Post>> = state
.cache
.get(String::from("posts:featured"))
.await
.unwrap_or(None);
if let Some(posts) = cached {
tracing::info!("grabbing featured posts from cache");
return Ok(Json(posts));
};
match PostsDatasource::get_featured(&state.database).await {
Ok(posts) => {
tracing::info!("grabbing featured posts from database");
if let p = &posts {
let posts = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("posts:featured"),
&posts,
Some(Expiration::EX(5)),
None,
false,
)
.await;
});
};
Ok(Json(posts))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
// get rss posts
async fn get_rss_posts(State(pool): State<Pool<Postgres>>) -> impl IntoResponse {
match PostsDatasource::get_all(&pool).await {
async fn get_rss_posts(State(app_state): State<AppState>) -> impl IntoResponse {
let state = app_state.lock().await;
match PostsDatasource::get_all(&state.database).await {
Ok(posts) => {
let web_url = std::env::var("BASE_URI_WEB").expect("No environment variable found");
let mapped_posts: HashMap<String, Post> = posts
@ -154,3 +369,46 @@ where
{
serializer.serialize_str(&date.unwrap().to_rfc3339())
}
pub fn deserialize_datetime<'de, D>(
deserializer: D,
) -> Result<Option<chrono::DateTime<Utc>>, D::Error>
where
D: Deserializer<'de>,
{
struct DateTimeVisitor;
impl<'de> serde::de::Visitor<'de> for DateTimeVisitor {
type Value = Option<chrono::DateTime<Utc>>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an ISO 8601 formatted date string or null")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match chrono::DateTime::parse_from_rfc3339(value) {
Ok(dt) => Ok(Some(dt.with_timezone(&Utc))),
Err(e) => Err(E::custom(format!("Failed to parse datetime: {}", e))),
}
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(None)
}
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(self)
}
}
deserializer.deserialize_option(DateTimeVisitor)
}

View File

@ -0,0 +1,78 @@
use fred::interfaces::KeysInterface;
use fred::{clients::Pool, prelude::*};
use sqlx::PgPool;
pub type AppState = std::sync::Arc<tokio::sync::Mutex<AppInternalState>>;
pub struct AppInternalState {
pub database: sqlx::postgres::PgPool,
pub cache: Cache,
}
pub struct Cache {
pub inmem: Pool,
}
impl AppInternalState {
pub fn new(database: PgPool, cache: Pool) -> Self {
AppInternalState {
database,
cache: Cache { inmem: cache },
}
}
}
impl Cache {
pub async fn get<T>(&mut self, key: String) -> Result<Option<T>, Box<dyn std::error::Error>>
where
T: for<'de> serde::Deserialize<'de>,
{
if !self.inmem.is_connected() {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Not connected to cache".to_string(),
)));
}
let value: Option<String> = self.inmem.get(&key).await?;
match value {
Some(json_str) => match serde_json::from_str::<T>(&json_str) {
Ok(deserialized) => Ok(Some(deserialized)),
Err(_) => Ok(None),
},
None => Ok(None),
}
}
pub async fn set<T>(
&mut self,
key: String,
contents: &T,
expiration: Option<Expiration>,
set_opts: Option<SetOptions>,
get: bool,
) -> Result<(), Box<dyn std::error::Error>>
where
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
{
if !self.inmem.is_connected() {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Not connected to cache".to_string(),
)));
}
let json_string = serde_json::to_string(contents)?;
self.inmem
.set(key, json_string, expiration, set_opts, get)
.await?;
Ok(())
}
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
self.inmem.del(key).await?;
Ok(())
}
}

7
backend/task/README.md Normal file
View File

@ -0,0 +1,7 @@
# Task scheduler
also known as `task`
## What is this?
I don't know yet - hopefully this will be filled out soon.

View File

@ -1,6 +1,11 @@
# Database
You can set environment variables either through the command line, the Nix flake (if you are running nix/NixOS), _or_ the `.env` file
You can set environment variables either through the command line, the Nix flake (if you are running nix/NixOS), _or_ the `.env` file.
Uses the following data storing services:
- PostgreSQL 16
- Valkey 8.0.2 (or Redis, haven't tested)
## Create migration database