Merge remote-tracking branch 'origin' into pagination

This commit is contained in:
2025-07-16 21:57:52 -04:00
31 changed files with 1423 additions and 146 deletions

View File

@@ -279,6 +279,15 @@ dependencies = [
"either",
]
[[package]]
name = "cache"
version = "0.1.0"
dependencies = [
"fred",
"serde",
"serde_json",
]
[[package]]
name = "cc"
version = "1.1.21"
@@ -1322,7 +1331,12 @@ dependencies = [
name = "public"
version = "0.1.0"
dependencies = [
<<<<<<< HEAD
"axum 0.8.4",
=======
"axum",
"cache",
>>>>>>> origin
"chrono",
"dotenvy",
"fred",
@@ -1588,18 +1602,18 @@ checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
[[package]]
name = "serde"
version = "1.0.210"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.210"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
@@ -1608,9 +1622,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.128"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [
"itoa",
"memchr",
@@ -1982,9 +1996,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.77"
version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -25,3 +25,4 @@ serde_json = "1.0.128"
chrono = "0.4.38"
xml = "0.8.20"
fred = "10.1.0"
cache = { version = "*", path = "../cache" }

View File

@@ -1,6 +1,6 @@
use axum::Router;
use cache::ClientLike;
use config::config;
use fred::prelude::*;
use sqlx::postgres::PgPoolOptions;
use std::fs::File;
use std::sync::Arc;
@@ -8,7 +8,7 @@ use std::time::Duration;
use tokio::net::TcpListener;
use tokio::signal;
use tokio::sync::Mutex;
use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
// use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
use tower_http::{
cors::{Any, CorsLayer},
trace::{self, TraceLayer},
@@ -101,13 +101,13 @@ async fn main() {
.expect("Failed to connect to database");
let pool_size = 8;
let config = Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
let config = cache::Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
let redis_pool = Builder::from_config(config)
let redis_pool = cache::Builder::from_config(config)
.with_performance_config(|config| {
config.default_command_timeout = Duration::from_secs(60);
})
.set_policy(ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
.set_policy(cache::ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
.build_pool(pool_size)
.expect("Failed to create cache pool");

View File

@@ -5,7 +5,7 @@ use axum::{
routing::get,
Json,
};
use fred::types::Expiration;
use cache::Expiration;
use serde::{Deserialize, Serialize};
use crate::{

View File

@@ -13,8 +13,8 @@ use axum::{
routing::{get, post},
Json,
};
use cache::{Expiration, SetOptions};
use chrono::Utc;
use fred::types::{Expiration, SetOptions};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Debug)]

View File

@@ -15,8 +15,8 @@ use axum::{
routing::get,
Json, Router,
};
use cache::Expiration;
use chrono::Utc;
use fred::types::Expiration;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

View File

@@ -1,6 +1,6 @@
use crate::{datasources::projects::ProjectsDatasource, state::AppState, utils::datetime::*};
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::get, Json, Router};
use fred::types::Expiration;
use cache::Expiration;
use serde::{Deserialize, Serialize};
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]

View File

@@ -1,83 +1,17 @@
use fred::interfaces::KeysInterface;
use fred::{clients::Pool, prelude::*};
use sqlx::PgPool;
pub type AppState = std::sync::Arc<tokio::sync::Mutex<AppInternalState>>;
pub struct AppInternalState {
pub database: sqlx::postgres::PgPool,
pub cache: Cache,
}
pub struct Cache {
pub inmem: Pool,
pub cache: cache::Cache,
}
impl AppInternalState {
pub fn new(database: PgPool, cache: Pool) -> Self {
pub fn new(database: PgPool, cache: cache::Pool) -> Self {
AppInternalState {
database,
cache: Cache { inmem: cache },
}
}
}
impl Cache {
pub async fn get<T>(&mut self, key: String) -> Result<Option<T>, Box<dyn std::error::Error>>
where
T: for<'de> serde::Deserialize<'de>,
{
self.is_connected()?;
let value: Option<String> = self.inmem.get(&key).await?;
match value {
Some(json_str) => match serde_json::from_str::<T>(&json_str) {
Ok(deserialized) => Ok(Some(deserialized)),
Err(_) => Ok(None),
},
None => Ok(None),
}
}
pub async fn set<T>(
&mut self,
key: String,
contents: &T,
expiration: Option<Expiration>,
set_opts: Option<SetOptions>,
get: bool,
) -> Result<(), Box<dyn std::error::Error>>
where
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
{
self.is_connected()?;
let json_string = match serde_json::to_string::<T>(contents) {
Ok(s) => s,
Err(_) => {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Unable to deserialize contents passed to cache".to_string(),
)))
}
};
Ok(self
.inmem
.set(key, json_string, expiration, set_opts, get)
.await?)
}
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
Ok(self.inmem.del(key).await?)
}
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
match self.inmem.is_connected() {
true => Ok(()),
false => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Not connected to cache".to_string(),
))),
cache: cache::Cache { inmem: cache },
}
}
}

View File

@@ -73,17 +73,18 @@ pub fn generate_rss(
format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>{safe_title}</title>
<description>{safe_description}</description>
<link>{link}</link>
<language>en-us</language>
<ttl>60</ttl>
<generator>Kyouma 1.0.0-SE</generator>
<atom:link href="https://wyattjmiller.com/posts.xml" rel="self" type="application/rss+xml" />
{}
</channel>
</rss>"#,
<channel>
<title>{safe_title}</title>
<description>{safe_description}</description>
<link>{link}</link>
<language>en-us</language>
<ttl>60</ttl>
<generator>Kyouma 1.0.0-SE</generator>
<atom:link href="https://wyattjmiller.com/posts.xml" rel="self" type="application/rss+xml" />
{}
</channel>
</rss>
"#,
rss_entries
)
}

View File

@@ -23,7 +23,6 @@ impl SitemapEntry {
pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
let urls = entries
.values()
.into_iter()
.map(|entry| entry.to_item())
.collect::<String>();
format!(
@@ -39,21 +38,21 @@ pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
pub fn get_static_pages(entries: &mut HashMap<String, SitemapEntry>, web_url: &String) {
entries.insert(
(entries.len() + 1).to_string(),
"10000".to_string(),
SitemapEntry {
location: web_url.clone(),
lastmod: chrono::Utc::now(),
},
);
entries.insert(
(entries.len() + 1).to_string(),
"10001".to_string(),
SitemapEntry {
location: format!("{}/posts", web_url),
lastmod: chrono::Utc::now(),
},
);
entries.insert(
(entries.len() + 1).to_string(),
"10002".to_string(),
SitemapEntry {
location: format!("{}/projects", web_url),
lastmod: chrono::Utc::now(),