stuff happened
This commit is contained in:
@ -1,3 +1,4 @@
|
||||
pub mod authors;
|
||||
pub mod comments;
|
||||
pub mod posts;
|
||||
pub mod projects;
|
||||
|
15
backend/public/src/datasources/projects.rs
Normal file
15
backend/public/src/datasources/projects.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use sqlx::{FromRow, Pool, Postgres, Row};
|
||||
|
||||
use crate::routes::projects::Project;
|
||||
|
||||
pub struct ProjectsDatasource;
|
||||
impl ProjectsDatasource {
|
||||
pub async fn get_all(pool: &Pool<Postgres>) -> Result<Vec<Project>, sqlx::Error> {
|
||||
sqlx::query_as!(
|
||||
Project,
|
||||
"SELECT project_id, title, repo, summary, tech, wip, created_at FROM projects p WHERE deleted_at IS NULL ORDER BY p.created_at DESC"
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
}
|
@ -131,6 +131,10 @@ async fn main() {
|
||||
"/authors",
|
||||
routes::authors::AuthorsRoute::routes(&app_state),
|
||||
)
|
||||
.nest(
|
||||
"/projects",
|
||||
routes::projects::ProjectsRoute::routes(&app_state),
|
||||
)
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
|
@ -1,4 +1,5 @@
|
||||
pub mod authors;
|
||||
pub mod comments;
|
||||
pub mod posts;
|
||||
pub mod projects;
|
||||
pub mod root;
|
||||
|
@ -1,7 +1,11 @@
|
||||
use crate::{
|
||||
datasources::posts::PostsDatasource,
|
||||
state::AppState,
|
||||
utils::{datetime::*, rss},
|
||||
utils::{
|
||||
datetime::*,
|
||||
rss,
|
||||
sitemap::{self, SitemapEntry},
|
||||
},
|
||||
};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use axum::{
|
||||
@ -60,6 +64,7 @@ impl PostsRoute {
|
||||
.route("/hot", get(PostsRoute::get_hot_posts))
|
||||
.route("/featured", get(PostsRoute::get_featured_posts))
|
||||
.route("/rss", get(PostsRoute::get_rss_posts))
|
||||
.route("/sitemap", get(PostsRoute::get_sitemap))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
@ -330,7 +335,8 @@ impl PostsRoute {
|
||||
|
||||
match PostsDatasource::get_all(&state.database).await {
|
||||
Ok(posts) => {
|
||||
let web_url = std::env::var("BASE_URI_WEB").expect("No environment variable found");
|
||||
let web_url =
|
||||
std::env::var("BASE_URI_WEB").expect("Environment BASE_URI_WEB variable found");
|
||||
let mapped_posts: HashMap<String, Post> = posts
|
||||
.into_iter()
|
||||
.map(|post| (post.post_id.to_string(), post))
|
||||
@ -343,9 +349,42 @@ impl PostsRoute {
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
HeaderValue::from_str(r#"attachment; filename="posts.xml""#).unwrap(),
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/xml"),
|
||||
);
|
||||
(headers, xml)
|
||||
}
|
||||
Err(e) => {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("Content-Type", HeaderValue::from_static("text/plain"));
|
||||
(headers, e.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_sitemap(State(app_state): State<AppState>) -> impl IntoResponse {
|
||||
let state = app_state.lock().await;
|
||||
// let cached: Option<Vec<Post>> = None; // TODO: maybe implement cache, later??
|
||||
|
||||
match PostsDatasource::get_all(&state.database).await {
|
||||
Ok(posts) => {
|
||||
let web_url =
|
||||
std::env::var("BASE_URI_WEB").expect("Environment BASE_URI_WEB variable found");
|
||||
let mut entries: HashMap<String, SitemapEntry> = posts
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
(
|
||||
p.post_id.to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/posts/{}", web_url, p.post_id.to_string()),
|
||||
lastmod: p.created_at.unwrap_or_else(|| chrono::Utc::now()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
sitemap::get_static_pages(&mut entries, &web_url);
|
||||
let xml: String = sitemap::generate_sitemap(&entries);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/xml"),
|
||||
|
69
backend/public/src/routes/projects.rs
Normal file
69
backend/public/src/routes/projects.rs
Normal file
@ -0,0 +1,69 @@
|
||||
use crate::{datasources::projects::ProjectsDatasource, state::AppState, utils::datetime::*};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::get, Json, Router};
|
||||
use fred::types::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct Project {
|
||||
pub project_id: i32,
|
||||
pub title: String,
|
||||
pub repo: Option<String>,
|
||||
pub summary: String,
|
||||
pub tech: String,
|
||||
pub wip: Option<bool>,
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
}
|
||||
|
||||
pub struct ProjectsRoute;
|
||||
impl ProjectsRoute {
|
||||
pub fn routes(app_state: &AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/", get(ProjectsRoute::get_all))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
async fn get_all(State(app_state): State<AppState>) -> impl IntoResponse {
|
||||
let mut state = app_state.lock().await;
|
||||
let cached: Option<Vec<Project>> = state
|
||||
.cache
|
||||
.get(String::from("projects:all"))
|
||||
.await
|
||||
.unwrap_or(None);
|
||||
|
||||
if let Some(projects) = cached {
|
||||
tracing::info!("grabbing all projects from cache");
|
||||
return Ok(Json(projects));
|
||||
};
|
||||
|
||||
match ProjectsDatasource::get_all(&state.database).await {
|
||||
Ok(projects) => {
|
||||
tracing::info!("grabbing all projects from database");
|
||||
if let p = &projects {
|
||||
let projects = p.clone();
|
||||
let state = app_state.clone();
|
||||
|
||||
tracing::info!("storing database data in cache");
|
||||
tokio::spawn(async move {
|
||||
let mut s = state.lock().await;
|
||||
let _ = s
|
||||
.cache
|
||||
.set(
|
||||
String::from("projects:all"),
|
||||
&projects,
|
||||
Some(Expiration::EX(10)),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await;
|
||||
});
|
||||
};
|
||||
|
||||
Ok(Json(projects))
|
||||
}
|
||||
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +1,15 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
|
||||
use crate::{datasources::posts::PostsDatasource, state::AppState};
|
||||
|
||||
use super::posts::Post;
|
||||
|
||||
pub struct RootRoute;
|
||||
impl RootRoute {
|
||||
pub fn routes() -> Router {
|
||||
|
@ -27,13 +27,7 @@ impl Cache {
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
if !self.inmem.is_connected() {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
)));
|
||||
}
|
||||
|
||||
self.is_connected()?;
|
||||
let value: Option<String> = self.inmem.get(&key).await?;
|
||||
|
||||
match value {
|
||||
@ -56,23 +50,34 @@ impl Cache {
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
|
||||
{
|
||||
if !self.inmem.is_connected() {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
)));
|
||||
}
|
||||
self.is_connected()?;
|
||||
let json_string = match serde_json::to_string::<T>(contents) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unable to deserialize contents passed to cache".to_string(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
let json_string = serde_json::to_string(contents)?;
|
||||
self.inmem
|
||||
Ok(self
|
||||
.inmem
|
||||
.set(key, json_string, expiration, set_opts, get)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
self.inmem.del(key).await?;
|
||||
Ok(())
|
||||
Ok(self.inmem.del(key).await?)
|
||||
}
|
||||
|
||||
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
match self.inmem.is_connected() {
|
||||
true => Ok(()),
|
||||
false => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,2 +1,3 @@
|
||||
pub mod datetime;
|
||||
pub mod rss;
|
||||
pub mod sitemap;
|
||||
|
@ -13,7 +13,8 @@ pub struct RssEntry {
|
||||
|
||||
impl From<posts::Post> for RssEntry {
|
||||
fn from(post: posts::Post) -> Self {
|
||||
let web_url = std::env::var("BASE_URI_WEB").expect("Environment variable not found");
|
||||
let web_url =
|
||||
std::env::var("BASE_URI_WEB").expect("Environment variable BASE_URI_WEB not found");
|
||||
let post_url = format!("{}{}{}", web_url, "/posts/", post.post_id.to_string());
|
||||
let author_full_name = format!("{} {}", post.first_name.unwrap(), post.last_name.unwrap());
|
||||
|
||||
@ -58,10 +59,7 @@ pub fn generate_rss(
|
||||
link: &str,
|
||||
posts: &HashMap<String, posts::Post>,
|
||||
) -> String {
|
||||
println!("{:?}", posts);
|
||||
let values = posts.clone().into_values();
|
||||
println!("{:?}", values);
|
||||
|
||||
let rss_entries = values
|
||||
.map(|p| p.into())
|
||||
.map(|r: RssEntry| r.to_item())
|
||||
@ -69,8 +67,9 @@ pub fn generate_rss(
|
||||
|
||||
let safe_title = escape_str_pcdata(title);
|
||||
let safe_description = escape_str_pcdata(description);
|
||||
println!("{:?}", rss_entries);
|
||||
|
||||
// TODO: change the atom link in this string - it's not correct
|
||||
// change it when we know the URL
|
||||
format!(
|
||||
r#"<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
|
62
backend/public/src/utils/sitemap.rs
Normal file
62
backend/public/src/utils/sitemap.rs
Normal file
@ -0,0 +1,62 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct SitemapEntry {
|
||||
pub location: String,
|
||||
pub lastmod: chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
|
||||
impl SitemapEntry {
|
||||
fn to_item(&self) -> String {
|
||||
format!(
|
||||
r#"
|
||||
<url>
|
||||
<loc>{}</loc>
|
||||
<lastmod>{}</lastmod>
|
||||
</url>
|
||||
"#,
|
||||
self.location,
|
||||
self.lastmod.to_rfc3339(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
|
||||
let urls = entries
|
||||
.values()
|
||||
.into_iter()
|
||||
.map(|entry| entry.to_item())
|
||||
.collect::<String>();
|
||||
format!(
|
||||
r#"
|
||||
<!-- Generated by Kyouma 1.0.0-SE -->
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
{}
|
||||
</urlset>
|
||||
"#,
|
||||
urls
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_static_pages(entries: &mut HashMap<String, SitemapEntry>, web_url: &String) {
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: web_url.clone(),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/posts", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/projects", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
}
|
Reference in New Issue
Block a user