Merge pull request 'Tasks MR' (#2) from import into master

Reviewed-on: #2
This commit is contained in:
2025-06-29 22:29:19 -05:00
34 changed files with 2267 additions and 341 deletions

View File

@ -1,3 +1,4 @@
pub mod authors;
pub mod comments;
pub mod posts;
pub mod projects;

View File

@ -0,0 +1,15 @@
use sqlx::{FromRow, Pool, Postgres, Row};
use crate::routes::projects::Project;
pub struct ProjectsDatasource;
impl ProjectsDatasource {
pub async fn get_all(pool: &Pool<Postgres>) -> Result<Vec<Project>, sqlx::Error> {
sqlx::query_as!(
Project,
"SELECT project_id, title, repo, summary, tech, wip, created_at FROM projects p WHERE deleted_at IS NULL ORDER BY p.created_at DESC"
)
.fetch_all(pool)
.await
}
}

View File

@ -131,6 +131,10 @@ async fn main() {
"/authors",
routes::authors::AuthorsRoute::routes(&app_state),
)
.nest(
"/projects",
routes::projects::ProjectsRoute::routes(&app_state),
)
.layer(CorsLayer::permissive())
.layer(
TraceLayer::new_for_http()

View File

@ -1,4 +1,5 @@
pub mod authors;
pub mod comments;
pub mod posts;
pub mod projects;
pub mod root;

View File

@ -1,7 +1,11 @@
use crate::{
datasources::posts::PostsDatasource,
state::AppState,
utils::{datetime::*, rss},
utils::{
datetime::*,
rss,
sitemap::{self, SitemapEntry},
},
};
use axum::http::{HeaderMap, HeaderValue};
use axum::{
@ -60,6 +64,7 @@ impl PostsRoute {
.route("/hot", get(PostsRoute::get_hot_posts))
.route("/featured", get(PostsRoute::get_featured_posts))
.route("/rss", get(PostsRoute::get_rss_posts))
.route("/sitemap", get(PostsRoute::get_sitemap))
.with_state(app_state.clone())
}
@ -330,7 +335,8 @@ impl PostsRoute {
match PostsDatasource::get_all(&state.database).await {
Ok(posts) => {
let web_url = std::env::var("BASE_URI_WEB").expect("No environment variable found");
let web_url =
std::env::var("BASE_URI_WEB").expect("Environment BASE_URI_WEB variable found");
let mapped_posts: HashMap<String, Post> = posts
.into_iter()
.map(|post| (post.post_id.to_string(), post))
@ -343,9 +349,42 @@ impl PostsRoute {
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_DISPOSITION,
HeaderValue::from_str(r#"attachment; filename="posts.xml""#).unwrap(),
header::CONTENT_TYPE,
HeaderValue::from_static("application/xml"),
);
(headers, xml)
}
Err(e) => {
let mut headers = HeaderMap::new();
headers.insert("Content-Type", HeaderValue::from_static("text/plain"));
(headers, e.to_string())
}
}
}
async fn get_sitemap(State(app_state): State<AppState>) -> impl IntoResponse {
let state = app_state.lock().await;
// let cached: Option<Vec<Post>> = None; // TODO: maybe implement cache, later??
match PostsDatasource::get_all(&state.database).await {
Ok(posts) => {
let web_url =
std::env::var("BASE_URI_WEB").expect("Environment BASE_URI_WEB variable found");
let mut entries: HashMap<String, SitemapEntry> = posts
.into_iter()
.map(|p| {
(
p.post_id.to_string(),
SitemapEntry {
location: format!("{}/posts/{}", web_url, p.post_id.to_string()),
lastmod: p.created_at.unwrap_or_else(|| chrono::Utc::now()),
},
)
})
.collect();
sitemap::get_static_pages(&mut entries, &web_url);
let xml: String = sitemap::generate_sitemap(&entries);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_static("application/xml"),

View File

@ -0,0 +1,69 @@
use crate::{datasources::projects::ProjectsDatasource, state::AppState, utils::datetime::*};
use axum::http::{HeaderMap, HeaderValue};
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::get, Json, Router};
use fred::types::Expiration;
use serde::{Deserialize, Serialize};
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
pub struct Project {
pub project_id: i32,
pub title: String,
pub repo: Option<String>,
pub summary: String,
pub tech: String,
pub wip: Option<bool>,
#[serde(serialize_with = "serialize_datetime")]
#[serde(deserialize_with = "deserialize_datetime")]
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
}
pub struct ProjectsRoute;
impl ProjectsRoute {
pub fn routes(app_state: &AppState) -> Router {
Router::new()
.route("/", get(ProjectsRoute::get_all))
.with_state(app_state.clone())
}
async fn get_all(State(app_state): State<AppState>) -> impl IntoResponse {
let mut state = app_state.lock().await;
let cached: Option<Vec<Project>> = state
.cache
.get(String::from("projects:all"))
.await
.unwrap_or(None);
if let Some(projects) = cached {
tracing::info!("grabbing all projects from cache");
return Ok(Json(projects));
};
match ProjectsDatasource::get_all(&state.database).await {
Ok(projects) => {
tracing::info!("grabbing all projects from database");
if let p = &projects {
let projects = p.clone();
let state = app_state.clone();
tracing::info!("storing database data in cache");
tokio::spawn(async move {
let mut s = state.lock().await;
let _ = s
.cache
.set(
String::from("projects:all"),
&projects,
Some(Expiration::EX(10)),
None,
false,
)
.await;
});
};
Ok(Json(projects))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
}

View File

@ -1,10 +1,15 @@
use axum::{
extract::State,
http::StatusCode,
response::{Html, IntoResponse},
routing::get,
Router,
};
use crate::{datasources::posts::PostsDatasource, state::AppState};
use super::posts::Post;
pub struct RootRoute;
impl RootRoute {
pub fn routes() -> Router {

View File

@ -27,13 +27,7 @@ impl Cache {
where
T: for<'de> serde::Deserialize<'de>,
{
if !self.inmem.is_connected() {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Not connected to cache".to_string(),
)));
}
self.is_connected()?;
let value: Option<String> = self.inmem.get(&key).await?;
match value {
@ -56,23 +50,34 @@ impl Cache {
where
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
{
if !self.inmem.is_connected() {
self.is_connected()?;
let json_string = match serde_json::to_string::<T>(contents) {
Ok(s) => s,
Err(_) => {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Not connected to cache".to_string(),
)));
"Unable to deserialize contents passed to cache".to_string(),
)))
}
};
let json_string = serde_json::to_string(contents)?;
self.inmem
Ok(self
.inmem
.set(key, json_string, expiration, set_opts, get)
.await?;
Ok(())
.await?)
}
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
self.inmem.del(key).await?;
Ok(())
Ok(self.inmem.del(key).await?)
}
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
match self.inmem.is_connected() {
true => Ok(()),
false => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Not connected to cache".to_string(),
))),
}
}
}

View File

@ -1,2 +1,3 @@
pub mod datetime;
pub mod rss;
pub mod sitemap;

View File

@ -13,7 +13,8 @@ pub struct RssEntry {
impl From<posts::Post> for RssEntry {
fn from(post: posts::Post) -> Self {
let web_url = std::env::var("BASE_URI_WEB").expect("Environment variable not found");
let web_url =
std::env::var("BASE_URI_WEB").expect("Environment variable BASE_URI_WEB not found");
let post_url = format!("{}{}{}", web_url, "/posts/", post.post_id.to_string());
let author_full_name = format!("{} {}", post.first_name.unwrap(), post.last_name.unwrap());
@ -58,10 +59,7 @@ pub fn generate_rss(
link: &str,
posts: &HashMap<String, posts::Post>,
) -> String {
println!("{:?}", posts);
let values = posts.clone().into_values();
println!("{:?}", values);
let rss_entries = values
.map(|p| p.into())
.map(|r: RssEntry| r.to_item())
@ -69,8 +67,9 @@ pub fn generate_rss(
let safe_title = escape_str_pcdata(title);
let safe_description = escape_str_pcdata(description);
println!("{:?}", rss_entries);
// TODO: change the atom link in this string - it's not correct
// change it when we know the URL
format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">

View File

@ -0,0 +1,62 @@
use std::collections::HashMap;
pub struct SitemapEntry {
pub location: String,
pub lastmod: chrono::DateTime<chrono::Utc>,
}
impl SitemapEntry {
fn to_item(&self) -> String {
format!(
r#"
<url>
<loc>{}</loc>
<lastmod>{}</lastmod>
</url>
"#,
self.location,
self.lastmod.to_rfc3339(),
)
}
}
pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
let urls = entries
.values()
.into_iter()
.map(|entry| entry.to_item())
.collect::<String>();
format!(
r#"
<!-- Generated by Kyouma 1.0.0-SE -->
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
{}
</urlset>
"#,
urls
)
}
pub fn get_static_pages(entries: &mut HashMap<String, SitemapEntry>, web_url: &String) {
entries.insert(
(entries.len() + 1).to_string(),
SitemapEntry {
location: web_url.clone(),
lastmod: chrono::Utc::now(),
},
);
entries.insert(
(entries.len() + 1).to_string(),
SitemapEntry {
location: format!("{}/posts", web_url),
lastmod: chrono::Utc::now(),
},
);
entries.insert(
(entries.len() + 1).to_string(),
SitemapEntry {
location: format!("{}/projects", web_url),
lastmod: chrono::Utc::now(),
},
);
}

View File

@ -1 +1 @@
DATABASE_URL=postgres://wyatt:wyattisawesome@localhost:5432/postgres
DATABASE_URL=postgres://wyatt:wyattisawesome@192.168.100.253:5432/postgres

1633
backend/task/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,7 @@ edition = "2021"
[dependencies]
tokio = { version = "1.19.2", features = ["full"] }
reqwest = { version = "0.12.20", features = ["json", "rustls-tls"] }
job_scheduler = "1.2.1"
sqlx = { version = "0.8.2", features = [
"postgres",
@ -18,6 +19,9 @@ once_cell = "1.19.0"
dotenvy = "0.15.7"
futures = "0.3.30"
markdown = "1.0.0-alpha.20"
serde = {version = "*", features = ["derive"]}
serde_yaml = "*"
aws-sdk-s3 = "1.77.0"
serde = { version = "*", features = ["derive"] }
serde_yml = "*"
aws-sdk-s3 = "1.94.0"
aws-config = "1.8"
tracing = "0.1"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }

View File

@ -4,4 +4,12 @@ also known as `task`
## What is this?
I don't know yet - hopefully this will be filled out soon.
This is a task runner/scheduler programs that will fire off various tasks. These tasks can be anything from an blog post import task to a RSS generator task. Additionally, there is task logs inside the database so that you can keep track of tasks when something goes wrong.
## Things you should know
`task` uses a `.env` file at the root of the project. The file takes standard environment variables (like enviroment variables you would put into a `.bashrc` or ad-hoc into your shell).
For `task` to work properly, please make sure to first create the `.env` file, then fill out the following environment variables:
- `DATABASE_URL` - needed for communicating to Postgres

View File

@ -3,7 +3,7 @@ use sqlx::{postgres::PgPoolOptions, Pool, Postgres};
use std::env;
use std::sync::Arc;
use std::time::Duration;
use tasks::import_posts;
use tasks::*;
//mod config;
mod tasks;
@ -58,7 +58,7 @@ async fn main() {
.expect("Failed to connect to the database");
let mut manager = TaskManager::new(pool);
manager.register_jobs().await;
manager.register_jobs().await.unwrap();
loop {
manager.scheduler.tick();
@ -77,23 +77,40 @@ impl<'a> TaskManager<'a> {
}
}
pub async fn register_jobs(&self) {
// let jobs: Vec<Job> = Vec::new();
pub async fn register_jobs(&mut self) -> Result<(), Box<dyn std::error::Error>> {
let results = sqlx::query_as::<_, TaskJob>("SELECT task_id, task_name, schedule, is_active, created_at, deleted_at FROM tasks WHERE is_active = true AND deleted_at IS NULL")
.fetch_all(&self.pool)
.await
.unwrap();
.await?;
let mut scheduler = job_scheduler::JobScheduler::new();
results.iter().for_each(|r| {
println!("Registering job: {:?}", r.task_name);
tracing::info!("Found {} active jobs to register", results.len());
let job: _ = job_scheduler::Job::new(r.schedule.parse().unwrap(), || match r.task_id {
1 => import_posts::register(&Arc::new(&self.pool)),
_ => panic!(),
});
for job in &results {
tracing::info!("Registering job: {}", job.task_name);
scheduler.add(job);
});
let schedule = job
.schedule
.parse()
.map_err(|e| format!("Failed to parse schedule '{}': {}", job.schedule, e))?;
let task: Box<dyn Fn() + Send + Sync> = match job.task_id {
1 => {
let pool = Arc::new(self.pool.clone());
Box::new(move || import_posts::register(&pool))
}
2 => {
let pool = Arc::new(self.pool.clone());
Box::new(move || upload_rss::register(&pool))
}
3 => {
let pool = Arc::new(self.pool.clone());
Box::new(move || upload_sitemap::register(&pool))
}
id => return Err(format!("Unknown task_id: {}", id).into()),
};
self.scheduler.add(job_scheduler::Job::new(schedule, task));
}
Ok(())
}
}

View File

@ -1,5 +1,5 @@
use std::fs;
use std::path;
use std::io::Read;
use crate::utils::task_log;
use serde::{Deserialize, Deserializer};
@ -7,75 +7,108 @@ use serde::{Deserialize, Deserializer};
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
let p = pool.clone();
tokio::spawn(async move {
import_posts("/app", &p).await;
let _ = import_posts("app/", &p).await;
});
}
async fn import_posts(dir_path: &str, pool: &sqlx::Pool<sqlx::Postgres>) {
println!("hello from import_posts");
let task = task_log::start(1, pool).await.unwrap();
let entries = fs::read_dir(dir_path).unwrap();
async fn import_posts(
dir_path: &str,
pool: &sqlx::Pool<sqlx::Postgres>,
) -> Result<(), Box<dyn std::error::Error>> {
println!("Beginning post import process");
// Start task logging
let task = task_log::start(1, pool).await?;
// Setup markdown options
let options = MarkdownOptions {
options: markdown::Constructs::gfm(),
};
for f in entries {
let file = f.unwrap();
// Read directory contents
let entries = fs::read_dir(dir_path)?;
// Process each file
for entry_result in entries {
let file = entry_result?;
let file_path = file.path();
if file_path.is_file() {
// Skip non-file entries
if !file_path.is_file() {
continue;
}
let file_name = file.file_name();
let file_name_final = &file_name.to_str().unwrap();
let exists = sqlx::query_as::<_, FilenameExists>(
"SELECT EXISTS(SELECT 1 FROM posts WHERE filename = $1)",
let file_name_str = match file_name.to_str() {
Some(name) => name,
None => {
eprintln!("Skipping file with non-UTF8 filename: {:?}", file_path);
continue;
}
};
println!("Processing file: {}", file_name_str);
// Check if file already exists in database
let exists_query = sqlx::query_as!(
FilenameExists,
"SELECT EXISTS(SELECT 1 FROM posts p WHERE p.filename = $1) as filename",
file_name_str
)
.bind(file_name_final)
.fetch_one(pool)
.await
.unwrap()
.filename;
.await?;
if !exists.is_empty() {
println!(
"File does not exist! Inserting: {:?}",
file_path.file_name()
);
let file_md_contents = process_read_file(file_path, &options);
let content = markdown::to_html(&file_md_contents);
let metadata =
crate::utils::front_matter::YamlFrontMatter::parse::<MarkdownMetadata>(
&content,
)
.unwrap();
let title = metadata.metadata.title;
// Skip if file already exists in database
if !exists_query.filename.unwrap_or(false) {
println!("Importing new file: {}", file_name_str);
sqlx::query_as::<_, InsertPosts>(
"INSERT INTO posts (title, body, filename, author_id) VALUES ($1, $2, $3, $4) RETURNING (title, body, filename, author_id)",
// Process file contents
let file_md_contents = process_read_file(&file_path)?;
// Extract metadata
let document = crate::utils::front_matter::YamlFrontMatter::parse::<MarkdownMetadata>(
&file_md_contents,
)?;
let content =
markdown::to_html_with_options(&document.content, &markdown::Options::default());
println!("{:?}", content);
let title = document.metadata.title;
let content_final = content.unwrap();
// Insert into database
let results = sqlx::query_as::<_, InsertPosts>(
"INSERT INTO posts (title, body, filename, author_id) VALUES ($1, $2, $3, $4) RETURNING title, body, filename, author_id"
)
.bind(title)
.bind(content)
.bind(file_name_final)
.bind(1)
.bind(content_final)
.bind(file_name_str)
.bind(1) // Consider making author_id a parameter
.fetch_one(pool)
.await
.unwrap();
}
.await?;
println!("{:?}", results);
println!("Successfully imported: {}", file_name_str);
} else {
println!("Skipping existing file: {}", file_name_str);
}
}
task_log::update(task.task_id, String::from("Completed"), pool)
.await
.unwrap();
// Mark task as completed
task_log::update(task.task_id, String::from("Completed"), pool).await?;
Ok(())
}
fn process_read_file<P: AsRef<path::Path>>(path: P, md_opts: &MarkdownOptions) -> String {
let file_contents = fs::read_to_string(path).unwrap();
markdown::to_html(file_contents.as_str())
fn process_read_file(file_path: &std::path::Path) -> Result<String, std::io::Error> {
let mut file = std::fs::read_to_string(file_path)?;
Ok(file)
}
#[derive(Debug, sqlx::FromRow)]
struct FilenameExists {
filename: String,
filename: Option<bool>,
}
#[derive(Debug, sqlx::FromRow)]
@ -90,7 +123,7 @@ struct MarkdownOptions {
options: markdown::Constructs,
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug)]
struct MarkdownMetadata {
layout: String,
title: String,

View File

@ -1 +1,3 @@
pub mod import_posts;
pub mod upload_rss;
pub mod upload_sitemap;

View File

@ -0,0 +1,40 @@
use sqlx::{Pool, Postgres};
use crate::utils::{
request::{Request, Response},
task_log,
{upload::S3ClientConfig, *},
};
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
let p = pool.clone();
tokio::spawn(async move {
let _ = upload_rss(&p).await;
});
}
async fn upload_rss(pool: &sqlx::Pool<sqlx::Postgres>) -> Result<(), Box<dyn std::error::Error>> {
// start task logging
task_log::start(2, pool).await?;
// get request and request the things
let request = Request::new();
let rss_url = format!("{}/posts/rss", request.base_url);
let rss_result = request.request_url::<String>(&rss_url).await.unwrap();
// upload the sucker to obj storage
if let Response::Xml(rss) = rss_result {
let client_config = S3ClientConfig::from_env().unwrap();
let s3_client = upload::create_s3_client(&client_config).await.unwrap();
let _ = upload::upload(
&s3_client,
client_config.bucket.as_str(),
"feed.xml",
rss.as_str(),
)
.await;
println!("Finished uploading RSS feed");
}
Ok(())
}

View File

@ -0,0 +1,40 @@
use crate::utils::{
request::{Request, Response},
task_log,
{upload::S3ClientConfig, *},
};
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
let p = pool.clone();
tokio::spawn(async move {
let _ = upload_sitemap(&p).await;
});
}
async fn upload_sitemap(
pool: &sqlx::Pool<sqlx::Postgres>,
) -> Result<(), Box<dyn std::error::Error>> {
// TODO:: get sitemap and upload it to bucket??
task_log::start(3, pool).await?;
// get request and request the things
let request = Request::new();
let sitemap_url = format!("{}/posts/sitemap", request.base_url);
let sitemap_result = request.request_url::<String>(&sitemap_url).await;
// upload the sucker to obj storage
if let Response::Xml(sitemap) = sitemap_result {
let client_config = S3ClientConfig::from_env().unwrap();
let s3_client = upload::create_s3_client(&client_config).await.unwrap();
let _ = upload::upload(
&s3_client,
client_config.bucket.as_str(),
"sitemap.xml",
sitemap.as_str(),
)
.await;
println!("Finished uploading sitemap!");
}
Ok(())
}

View File

@ -1,18 +1,34 @@
// derived from https://github.com/EstebanBorai/yaml-front-matter
use serde::de::DeserializeOwned;
pub struct Document<T: DeserializeOwned> {
pub metadata: T,
#[derive(Debug)]
pub struct Document {
pub metadata: FrontMatter,
pub content: String,
}
#[derive(Debug)]
pub struct FrontMatter {
pub layout: String,
pub title: String,
pub date: String,
pub published: bool,
}
pub struct YamlFrontMatter;
impl YamlFrontMatter {
pub fn parse<T: DeserializeOwned>(
markdown: &str,
) -> Result<Document<T>, Box<dyn std::error::Error>> {
) -> Result<Document, Box<dyn std::error::Error>> {
let yaml = YamlFrontMatter::extract(markdown)?;
let metadata = serde_yaml::from_str::<T>(yaml.0.as_str())?;
let clean_yaml = YamlFrontMatter::unescape_str(&yaml.0);
let metadata = match YamlFrontMatter::from_yaml_str(clean_yaml.as_str()) {
Ok(m) => m,
Err(e) => {
println!("{e}");
panic!();
}
};
Ok(Document {
metadata,
@ -52,4 +68,47 @@ impl YamlFrontMatter {
.join("\n"),
))
}
fn unescape_str(s: &str) -> String {
s.replace("\\n", "\n")
.replace("\\\"", "\"")
.replace("\\\\", "\\")
// .replace("\\t", "\t")
// .replace("\\r", "\r")
}
fn from_yaml_str(yaml: &str) -> Result<FrontMatter, String> {
let mut layout = String::new();
let mut title = String::new();
let mut date = String::new();
let mut published = false;
for line in yaml.lines() {
let line = line.trim();
if let Some((key, value)) = line.split_once(':') {
let key = key.trim();
let value = value.trim();
match key {
"layout" => layout = value.to_string(),
"title" => {
// Remove quotes if present
title = value.trim_matches('\'').trim_matches('"').to_string();
}
"date" => date = value.to_string(),
"published" => {
published = value.parse().map_err(|_| "Invalid boolean for published")?;
}
_ => {} // Ignore unknown fields
}
}
}
Ok(FrontMatter {
layout,
title,
date,
published,
})
}
}

View File

@ -1,2 +1,4 @@
pub mod front_matter;
pub mod request;
pub mod task_log;
pub mod upload;

View File

@ -0,0 +1,85 @@
use reqwest::StatusCode;
use std::env;
use std::time::Duration;
#[derive(Debug)]
pub struct Request<'a> {
pub client: reqwest::Client,
pub base_url: Box<str>,
pub full_url: Option<&'a str>,
}
#[derive(Debug)]
pub enum Response<T> {
Json(T),
Xml(String),
Text(String),
Bytes(Vec<u8>),
}
impl<'a> Request<'a> {
pub fn new() -> Self {
Request {
client: reqwest::ClientBuilder::new()
.use_rustls_tls()
.timeout(Duration::from_secs(30))
.build()
.unwrap(),
base_url: env::var("BASE_URI_API")
.expect("Environment variable BASE_URI_API is not found")
.into_boxed_str(),
full_url: None,
}
}
pub async fn request_url<T>(
&self,
url: &String,
) -> Result<Response<T>, Box<dyn std::error::Error>>
where
T: for<'de> serde::Deserialize<'de>,
{
println!("{}", url);
let api_result = match self.client.get(url).send().await {
Ok(r) => r,
Err(e) => return Err(Box::new(e)),
};
match api_result.status() {
StatusCode::OK => {
// TODO: handle errors here
let content_type = api_result
.headers()
.get("content-type")
.and_then(|v| v.to_str().ok())
.unwrap();
if content_type.contains("application/json") {
match api_result.json::<T>().await {
Ok(j) => Ok(Response::Json(j)),
Err(e) => return Err(Box::new(e)),
}
} else if content_type.contains("application/xml") {
match api_result.text().await {
Ok(x) => Ok(Response::Xml(x)),
Err(e) => return Err(Box::new(e)),
}
} else if content_type.starts_with("text/") {
match api_result.text().await {
Ok(t) => Ok(Response::Text(t)),
Err(e) => return Err(Box::new(e)),
}
} else {
match api_result.bytes().await {
Ok(b) => Ok(Response::Bytes(b.to_vec())),
Err(e) => Err(Box::new(e)),
}
}
}
status => Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Unexpected status code: {}", status),
))),
}
}
}

View File

@ -0,0 +1,73 @@
use aws_config::{BehaviorVersion, Region};
use aws_sdk_s3::{config::Credentials, Client, Config};
use std::env;
#[derive(Debug)]
pub struct S3ClientConfig {
pub access_key: String,
secret_key: String,
endpoint: String,
pub bucket: String,
region: String,
}
impl S3ClientConfig {
pub fn from_env() -> Result<Self, Box<dyn std::error::Error>> {
Ok(S3ClientConfig {
access_key: env::var("LINODE_ACCESS_KEY")
.map_err(|_| "LINODE_ACCESS_KEY environment variable not set")?,
secret_key: env::var("LINODE_SECRET_KEY")
.map_err(|_| "LINODE_SECRET_KEY environment variable not set")?,
endpoint: env::var("LINODE_ENDPOINT")
.unwrap_or_else(|_| "us-ord-1.linodeobjects.com".to_string()),
bucket: env::var("LINODE_BUCKET")
.map_err(|_| "LINODE_BUCKET environment variable not set")?,
region: env::var("LINODE_REGION").unwrap_or_else(|_| "us-ord".to_string()),
})
}
}
pub async fn create_s3_client(
config: &S3ClientConfig,
) -> Result<Client, Box<dyn std::error::Error>> {
let credentials = Credentials::new(
&config.access_key,
&config.secret_key,
None,
None,
"linode-object-storage",
);
let s3_config = Config::builder()
.behavior_version(BehaviorVersion::latest())
.region(Region::new(config.region.clone()))
.endpoint_url(format!("https://{}", config.endpoint))
.credentials_provider(credentials)
.force_path_style(false)
.build();
Ok(Client::from_conf(s3_config))
}
pub async fn upload(
client: &Client,
bucket: &str,
key: &str,
content: &str,
) -> Result<(), Box<dyn std::error::Error>> {
println!("Uploading to Linode Object Storage...");
println!("Bucket: {}", bucket);
let put_object_req = client
.put_object()
.bucket(bucket)
.key(key)
.body(content.as_bytes().to_vec().into())
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
.content_type("application/rss+xml")
.send()
.await?;
println!("Upload successful! ETag: {:?}", put_object_req.e_tag());
Ok(())
}

View File

@ -1,16 +0,0 @@
layout {
pane {
pane
pane split_direction="horizontal" {
pane
pane
}
}
}
keybinds {
unbind "Ctrl s"
}
theme "catppuccin-mocha"

View File

@ -61,19 +61,22 @@
wget
nixpkgs-fmt
openssl
openssl.dev
patchelf
deno
sqlx-cli
cargo-watch
cargo-chef
valkey
pkg-config-unwrapped
];
# Environment variables
env = {
RUST_BACKTRACE = "1";
RUST_SRC_PATH = "${pkgs.rustToolchain}/lib/rustlib/src/rust/library";
ZELLIJ_CONFIG_FILE = "config.kdl";
PKG_CONFIG_PATH = "${pkgs.openssl.dev}/lib/pkgconfig";
# ZELLIJ_CONFIG_FILE = "config.kdl";
# PATH = "$PATH:$HOME/.local/share/nvim/mason/bin/deno";
};
};

View File

@ -2,7 +2,10 @@ import { Post } from "../types/index.ts";
export const PostBody = function PostBody({ post }: PostBodyOpts) {
return (
<div class="p-6 bg-[#313244] shadow-md text-[#f5e0dc]">{post.body}</div>
<div
class="p-6 bg-[#313244] shadow-md text-[#f5e0dc] post-content"
dangerouslySetInnerHTML={{ __html: post.body }}
></div>
);
};

View File

@ -1,8 +1,13 @@
import { Head } from "$fresh/runtime.ts";
import { Post } from "../types/index.ts";
import { convertUtc } from "../lib/convertUtc.ts";
export const PostHeader = function PostHeader({ post }: PostHeaderOpts) {
return (
<>
<Head>
<title>Wyatt J. Miller | {post.title}</title>
</Head>
<div class="p-6 bg-[#313244] shadow-md">
<div class="min-w-screen flex flex-col items-center justify-between bg-[#45475a] rounded-lg shadow-md">
<div class="sm:mt-14 sm:mb-14 mt-8 mb-8 flex flex-col items-center gap-y-5 gap-x-10 md:flex-row">
@ -18,6 +23,7 @@ export const PostHeader = function PostHeader({ post }: PostHeaderOpts) {
</div>
</div>
</div>
</>
);
};

View File

@ -0,0 +1,14 @@
export const ShareLinkButton = function ShareLinkButton({props}) {
const [text. setText] = useState("Share");
const onClickHandler = () => {
navigator.clipboard.writeText(location.href);
setText("Copied to clipboard!");
};
return (
<button onClick={onClickHandler}>
{text}
</button>
)
}

View File

@ -11,15 +11,10 @@
},
"lint": {
"rules": {
"tags": [
"fresh",
"recommended"
]
"tags": ["fresh", "recommended"]
}
},
"exclude": [
"**/_fresh/*"
],
"exclude": ["**/_fresh/*"],
"imports": {
"$fresh/": "https://deno.land/x/fresh@1.6.8/",
"$std/": "https://deno.land/std@0.216.0/",
@ -33,7 +28,8 @@
"preact/jsx-runtime": "npm:preact@10.22.1/jsx-runtime",
"tailwindcss": "npm:tailwindcss@3.4.1",
"tailwindcss/": "npm:/tailwindcss@3.4.1/",
"tailwindcss/plugin": "npm:/tailwindcss@3.4.1/plugin.js"
"tailwindcss/plugin": "npm:/tailwindcss@3.4.1/plugin.js",
"tailwind-highlightjs": "npm:tailwind-highlightjs"
},
"compilerOptions": {
"jsx": "react-jsx",

View File

@ -1,6 +1,32 @@
import { FreshContext, Handlers, PageProps } from "$fresh/server.ts";
import { ProjectCard } from "../../islands/ProjectCard.tsx";
export default function Projects() {
interface ProjectData {
project_id: number;
title: string;
repo?: string;
summary: string;
tech: string;
wip?: boolean;
created_at: string;
}
export const handler: Handlers<ProjectData> = {
async GET(_req: Request, ctx: FreshContext) {
const projectResult = await fetch(
`${Deno.env.get("BASE_URI_API")}/projects`,
);
const projectData = await projectResult.json();
return ctx.render({
projectData,
});
},
};
export default function Projects({ data }: PageProps<ProjectData>) {
const { projectData: projects } = data;
return (
<div class="space-y-12 px-10 py-8 sm:min-h-screen bg-[#313244]">
<section
@ -10,56 +36,18 @@ export default function Projects() {
<h1 class="text-3xl text-white font-bold uppercase text-center">
Projects
</h1>
<div class="grid grid-cols-1 sm:grid-cols-2 ">
<div class="grid grid-cols-1 sm:grid-cols-2">
{projects.map((project: any) => {
return (
<ProjectCard
wip
title="Website v2"
summary="This website was built by yours truly!"
// repo="https://scm.wyattjmiller.com/wymiller/my-website-v2"
tech="Typescript, Deno, Fresh, Tailwind, Rust, PostgreSQL, Docker"
/>
<ProjectCard
title="BallBot"
repo="https://scm.wyattjmiller.com/wymiller/ballbot"
summary="A Discord bot that tells me NFL games, teams, and more!"
tech="Rust, Discord SDK, Docker"
/>
<ProjectCard
title="Nix configurations"
repo="https://scm.wyattjmiller.com/wymiller/nix-config-v2"
summary="My 'master' declarative system configuration for multiple computers"
tech="Nix"
/>
<ProjectCard
wip
title="omega"
summary="Music bot for Discord that plays music from different music sources"
tech="Rust, Discord SDK, SurrealDB, yt-dlp"
/>
<ProjectCard
title="gt"
repo="https://scm.wyattjmiller.com/wymiller/gt"
summary="Command line application to interact with Gitea"
tech="Rust"
/>
<ProjectCard
title="The Boyos Bot"
repo="https://github.com/NoahFlowa/BoyosBot"
summary="All-in-one Discord bot, built with my friend, NoahFlowa"
tech="Javascript, Node, Discord SDK, Docker"
/>
<ProjectCard
title="drillsergeant"
repo="https://scm.wyattjmiller.com/wymiller/drillsergeant"
summary="Git commit counter, to scratch an itch I had"
tech="C#, .NET"
/>
<ProjectCard
title="bleak"
repo="https://scm.wyattjmiller.com/wymiller/bleak"
summary="Turns your Raspberry Pi into a lighting controller"
tech="Rust"
title={project.title}
repo={project.repo ?? undefined}
summary={project.summary}
tech={project.tech}
wip={project.wip ?? true}
/>
);
})}
</div>
</section>
</div>

View File

@ -0,0 +1 @@
Sitemap: https://wyattjmiller.us-ord-1.linodeobjects.com/feed.xml

View File

@ -1,3 +1,31 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
.post-content h1 {
@apply text-3xl font-bold text-[#f5e0dc] mb-4 mt-6;
}
.post-content h2 {
@apply text-2xl font-semibold text-[#f5e0dc] mb-3 mt-5;
}
.post-content h3 {
@apply text-xl font-medium text-[#f5e0dc] mb-2 mt-4;
}
.post-content h4 {
@apply text-lg font-medium text-[#f5e0dc] mb-2 mt-3;
}
.post-content p {
@apply mb-3 text-[#f5e0dc];
}
.post-content pre {
@apply overflow-x-scroll bg-[#454656] p-2 mb-4 rounded-lg;
}
.post-content code {
@apply text-[#DCC9C6];
}

View File

@ -1,5 +1,7 @@
import { type Config } from "tailwindcss";
import twHLJS from "tailwind-highlightjs";
export default {
content: ["{routes,islands,components}/**/*.{ts,tsx}"],
// plugins: [twHLJS],
} satisfies Config;