Compare commits
53 Commits
import
...
post-card-
| Author | SHA1 | Date | |
|---|---|---|---|
| 3e24c3936a | |||
| 82cf30447b | |||
| fb071df6e4 | |||
| 49374b646b | |||
| a48396f1bc | |||
| 4762a28cc2 | |||
| 4f8e7a654c | |||
| 88e6b3f1ee | |||
| d590fc505f | |||
| 9fd936cc31 | |||
| f3237c6171 | |||
| ea7881416d | |||
| c98aed3f60 | |||
| 59e10f779f | |||
| 4dd094bee0 | |||
| 2464d2ab17 | |||
| 7e9cf9a7ec | |||
| 9b1e742db4 | |||
| 57da15f1e0 | |||
| 1e77fbfd4d | |||
| b2e22d4b6b | |||
| 3539ceced3 | |||
| a83c13a214 | |||
| 613adcb4c4 | |||
| 8f0ba5289a | |||
| 79cc4caa58 | |||
| 6eea6724bf | |||
| d73e572527 | |||
| 3014c1f841 | |||
| 742a10fb9b | |||
| 7f04dabf92 | |||
| bc8e093651 | |||
| 71b3b1f42d | |||
| 7875c23b67 | |||
| 48d9360a69 | |||
| ef0e9d3777 | |||
| 92b14e63f2 | |||
| baccca1cfa | |||
| 109f8826ff | |||
| d58538599c | |||
| d53f3da4c6 | |||
| 57952ec41d | |||
| 42dff3f186 | |||
| f4937dc382 | |||
| 13d022d44c | |||
| 29011c8f48 | |||
| 6694f47d70 | |||
| a64b8fdceb | |||
| a6b4f6917b | |||
| 585728de9d | |||
| 5a6346617a | |||
| 957858de59 | |||
| f3c96e675b |
104
.github/workflows/build.yaml
vendored
Normal file
104
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
name: Build and Release Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: scm.wyattjmiller.com
|
||||
USERNAME: wymiller # Define username here to use consistently
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- dockerfile: public/Dockerfile
|
||||
image: my-website-v2_public
|
||||
context: ./backend
|
||||
- dockerfile: task/Dockerfile
|
||||
image: my-website-v2_task
|
||||
context: ./backend
|
||||
- dockerfile: Dockerfile
|
||||
image: my-website-v2_frontend
|
||||
context: ./frontend
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.GH_ACTION_USERNAME }}
|
||||
password: ${{ secrets.GH_ACTION_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: scm.wyattjmiller.com/wymiller/${{ matrix.image }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.context }}/${{ matrix.dockerfile }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
|
||||
create-release:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
needs: build-and-push
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create Release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_ACTION_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref_name }}
|
||||
release_name: Release ${{ github.ref_name }}
|
||||
body: |
|
||||
## Docker Images Released
|
||||
|
||||
The following Docker images have been built and pushed to the container registry:
|
||||
|
||||
- `${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_public:${{ github.ref_name }}`
|
||||
- `${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_task:${{ github.ref_name }}`
|
||||
- `${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_frontend:${{ github.ref_name }}`
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_public:${{ github.ref_name }}
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_task:${{ github.ref_name }}
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_frontend:${{ github.ref_name }}
|
||||
```
|
||||
draft: false
|
||||
prerelease: false
|
||||
@@ -8,3 +8,5 @@ This is just an orginizational way of keeping the backend services together (so
|
||||
|
||||
- [`public`](./public/README.md) - a RESTful API service
|
||||
- [`task`](./task/README.md) - a task scheduler service
|
||||
- [`storage`](./storage/README.md) - an internal storage library
|
||||
- [`cache`](./cache/README.md) - an internal caching library
|
||||
|
||||
2
backend/cache/.gitignore
vendored
Normal file
2
backend/cache/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
target/
|
||||
.env
|
||||
1014
backend/cache/Cargo.lock
generated
vendored
Normal file
1014
backend/cache/Cargo.lock
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
9
backend/cache/Cargo.toml
vendored
Normal file
9
backend/cache/Cargo.toml
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "cache"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
fred = "10.1.0"
|
||||
serde = "1.0.219"
|
||||
serde_json = "1.0.140"
|
||||
7
backend/cache/README.md
vendored
Normal file
7
backend/cache/README.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Caching library
|
||||
|
||||
also known as `cache`
|
||||
|
||||
## What is this?
|
||||
|
||||
An internal caching library that houses functionality needed for a key-value database like Redis or Valkey. This was turned into a library because both `public` and `task` needed functionality within.
|
||||
70
backend/cache/src/lib.rs
vendored
Normal file
70
backend/cache/src/lib.rs
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
pub use fred::{
|
||||
clients::Pool,
|
||||
interfaces::{ClientLike, KeysInterface},
|
||||
prelude::*,
|
||||
types::{Expiration, SetOptions},
|
||||
};
|
||||
|
||||
pub struct Cache {
|
||||
pub inmem: Pool,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub async fn get<T>(&mut self, key: String) -> Result<Option<T>, Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let value: Option<String> = self.inmem.get(&key).await?;
|
||||
|
||||
match value {
|
||||
Some(json_str) => match serde_json::from_str::<T>(&json_str) {
|
||||
Ok(deserialized) => Ok(Some(deserialized)),
|
||||
Err(_) => Ok(None),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set<T>(
|
||||
&mut self,
|
||||
key: String,
|
||||
contents: &T,
|
||||
expiration: Option<Expiration>,
|
||||
set_opts: Option<SetOptions>,
|
||||
get: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let json_string = match serde_json::to_string::<T>(contents) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unable to deserialize contents passed to cache".to_string(),
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(self
|
||||
.inmem
|
||||
.set(key, json_string, expiration, set_opts, get)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(self.inmem.del(key).await?)
|
||||
}
|
||||
|
||||
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
match self.inmem.is_connected() {
|
||||
true => Ok(()),
|
||||
false => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
62
backend/public/.sqlx/query-053f5b53a743065aa0105903cdd0ec803861a2477c38a02754d2d350a34aaa68.json
generated
Normal file
62
backend/public/.sqlx/query-053f5b53a743065aa0105903cdd0ec803861a2477c38a02754d2d350a34aaa68.json
generated
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.created_at DESC LIMIT 10",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "053f5b53a743065aa0105903cdd0ec803861a2477c38a02754d2d350a34aaa68"
|
||||
}
|
||||
70
backend/public/.sqlx/query-0891ec97ff1d5d5ab7fbc848ceb4e7ea4f46e2f6282170dfdf90ebc6ab6d5fd9.json
generated
Normal file
70
backend/public/.sqlx/query-0891ec97ff1d5d5ab7fbc848ceb4e7ea4f46e2f6282170dfdf90ebc6ab6d5fd9.json
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, p.is_featured FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.post_id = $1 ORDER BY p.created_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "is_featured",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0891ec97ff1d5d5ab7fbc848ceb4e7ea4f46e2f6282170dfdf90ebc6ab6d5fd9"
|
||||
}
|
||||
62
backend/public/.sqlx/query-0ec6c9d94fceba56112e78c82acc56ae01bc3c641e28ee21e331c06e2fd9e551.json
generated
Normal file
62
backend/public/.sqlx/query-0ec6c9d94fceba56112e78c82acc56ae01bc3c641e28ee21e331c06e2fd9e551.json
generated
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "0ec6c9d94fceba56112e78c82acc56ae01bc3c641e28ee21e331c06e2fd9e551"
|
||||
}
|
||||
42
backend/public/.sqlx/query-1f5f18ecc0f1fe0ea93ca61e3f167640a56fee610379de45017f2608094867f0.json
generated
Normal file
42
backend/public/.sqlx/query-1f5f18ecc0f1fe0ea93ca61e3f167640a56fee610379de45017f2608094867f0.json
generated
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO comments (post_id, name, body) VALUES ($1, $2, $3) RETURNING comment_id, name, body, created_at",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "comment_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"Varchar",
|
||||
"Varchar"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "1f5f18ecc0f1fe0ea93ca61e3f167640a56fee610379de45017f2608094867f0"
|
||||
}
|
||||
66
backend/public/.sqlx/query-3831b52c2db3d1114c4b01a761c74168b66904bacff847844d463454b7fcde43.json
generated
Normal file
66
backend/public/.sqlx/query-3831b52c2db3d1114c4b01a761c74168b66904bacff847844d463454b7fcde43.json
generated
Normal file
@@ -0,0 +1,66 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, a.author_id FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.author_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "3831b52c2db3d1114c4b01a761c74168b66904bacff847844d463454b7fcde43"
|
||||
}
|
||||
62
backend/public/.sqlx/query-49768c8b986078bdfaad191b3ea1f07ca033b2a734162a3f8fcf0ef0a44c1e7f.json
generated
Normal file
62
backend/public/.sqlx/query-49768c8b986078bdfaad191b3ea1f07ca033b2a734162a3f8fcf0ef0a44c1e7f.json
generated
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id LEFT JOIN comments c ON p.post_id = c.post_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY COUNT(c.comment_id) DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "49768c8b986078bdfaad191b3ea1f07ca033b2a734162a3f8fcf0ef0a44c1e7f"
|
||||
}
|
||||
41
backend/public/.sqlx/query-4e39696c45b7533e519452425b5a69d607fd8b99a526002ece8978ccb41f2c69.json
generated
Normal file
41
backend/public/.sqlx/query-4e39696c45b7533e519452425b5a69d607fd8b99a526002ece8978ccb41f2c69.json
generated
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT comment_id, name, body, created_at FROM comments ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "comment_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "4e39696c45b7533e519452425b5a69d607fd8b99a526002ece8978ccb41f2c69"
|
||||
}
|
||||
22
backend/public/.sqlx/query-51fff32b503c65e62320071ff3ec44060b5fb45049b4f489c9a9d92e592ab5a7.json
generated
Normal file
22
backend/public/.sqlx/query-51fff32b503c65e62320071ff3ec44060b5fb45049b4f489c9a9d92e592ab5a7.json
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT COUNT(*) FROM posts p WHERE p.deleted_at IS NULL AND p.author_id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "51fff32b503c65e62320071ff3ec44060b5fb45049b4f489c9a9d92e592ab5a7"
|
||||
}
|
||||
46
backend/public/.sqlx/query-9c0f74750e0f90916b3d2f85d0264e27523c14dff7b7adccd5b4cfbb36918901.json
generated
Normal file
46
backend/public/.sqlx/query-9c0f74750e0f90916b3d2f85d0264e27523c14dff7b7adccd5b4cfbb36918901.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT author_id, first_name, last_name, bio, image FROM authors WHERE author_id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "bio",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "image",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "9c0f74750e0f90916b3d2f85d0264e27523c14dff7b7adccd5b4cfbb36918901"
|
||||
}
|
||||
62
backend/public/.sqlx/query-9d93a8a7c0a2442a511108af36d4adfb1ef8a2fac82448205654742f43dc4e75.json
generated
Normal file
62
backend/public/.sqlx/query-9d93a8a7c0a2442a511108af36d4adfb1ef8a2fac82448205654742f43dc4e75.json
generated
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.is_featured IS true GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "9d93a8a7c0a2442a511108af36d4adfb1ef8a2fac82448205654742f43dc4e75"
|
||||
}
|
||||
62
backend/public/.sqlx/query-ad39df8c37105f13b620f8898e570cdbc54d4bd4e402aac65a28c9aa81803831.json
generated
Normal file
62
backend/public/.sqlx/query-ad39df8c37105f13b620f8898e570cdbc54d4bd4e402aac65a28c9aa81803831.json
generated
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.view_count DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ad39df8c37105f13b620f8898e570cdbc54d4bd4e402aac65a28c9aa81803831"
|
||||
}
|
||||
40
backend/public/.sqlx/query-ae5c1527389fd823f46d3b23e5ab3b8211a6323ceff845487abae26096b3fa01.json
generated
Normal file
40
backend/public/.sqlx/query-ae5c1527389fd823f46d3b23e5ab3b8211a6323ceff845487abae26096b3fa01.json
generated
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT c.comment_id, c.name, c.body, c.created_at FROM comments c LEFT JOIN posts p ON p.post_id = c.post_id WHERE p.post_id = $1 AND c.deleted_at IS NULL ORDER BY created_at DESC LIMIT 20",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "comment_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ae5c1527389fd823f46d3b23e5ab3b8211a6323ceff845487abae26096b3fa01"
|
||||
}
|
||||
47
backend/public/.sqlx/query-e6764f22ac7966bdb64386aedffb9edb89aefb248a1f980d2d4e2e20b1c3ca50.json
generated
Normal file
47
backend/public/.sqlx/query-e6764f22ac7966bdb64386aedffb9edb89aefb248a1f980d2d4e2e20b1c3ca50.json
generated
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT author_id, first_name, last_name, bio, image FROM authors ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "bio",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "image",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "e6764f22ac7966bdb64386aedffb9edb89aefb248a1f980d2d4e2e20b1c3ca50"
|
||||
}
|
||||
56
backend/public/.sqlx/query-ed764b77d39df0583dc05c3ca721176b8c38e5df5fb078a53b808080c865e64d.json
generated
Normal file
56
backend/public/.sqlx/query-ed764b77d39df0583dc05c3ca721176b8c38e5df5fb078a53b808080c865e64d.json
generated
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT project_id, title, repo, summary, tech, wip, created_at FROM projects p WHERE deleted_at IS NULL ORDER BY p.created_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "project_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "repo",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "summary",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "tech",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "wip",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ed764b77d39df0583dc05c3ca721176b8c38e5df5fb078a53b808080c865e64d"
|
||||
}
|
||||
1147
backend/public/Cargo.lock
generated
1147
backend/public/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@ authors = ["Wyatt J. Miller <wyatt@wyattjmiller.com"]
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.7.6", features = ["http2", "tokio"] }
|
||||
axum = { version = "0.8.4", features = ["http2", "tokio"] }
|
||||
tower-http = { version = "0.6.1", features = ["trace", "cors"] }
|
||||
tower_governor = "0.4.2"
|
||||
tokio = { version = "1.40.0", features = ["full"] }
|
||||
@@ -25,3 +25,4 @@ serde_json = "1.0.128"
|
||||
chrono = "0.4.38"
|
||||
xml = "0.8.20"
|
||||
fred = "10.1.0"
|
||||
cache = { version = "*", path = "../cache" }
|
||||
|
||||
12
backend/public/Dockerfile
Normal file
12
backend/public/Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM rust:1.88.0
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./public ./public
|
||||
COPY ./cache ./cache
|
||||
|
||||
RUN cargo build --release --manifest-path ./public/Cargo.toml
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["/app/public/target/release/public"]
|
||||
@@ -1,6 +1,9 @@
|
||||
use sqlx::{Pool, Postgres};
|
||||
|
||||
use crate::routes::{authors::Author, comments::Pagination, posts::Post};
|
||||
use crate::{
|
||||
routes::{authors::Author, posts::Post},
|
||||
utils::pagination::Pagination,
|
||||
};
|
||||
|
||||
pub struct AuthorsDatasource;
|
||||
impl AuthorsDatasource {
|
||||
@@ -8,11 +11,11 @@ impl AuthorsDatasource {
|
||||
pool: &Pool<Postgres>,
|
||||
pagination: Pagination,
|
||||
) -> Result<Vec<Author>, sqlx::Error> {
|
||||
let offset: i64 = (pagination.page_number - 1) * pagination.page_size;
|
||||
let offset: i64 = (pagination.page - 1) * pagination.limit;
|
||||
sqlx::query_as!(
|
||||
Author,
|
||||
"SELECT author_id, first_name, last_name, bio, image FROM authors ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
pagination.page_size,
|
||||
pagination.page,
|
||||
offset,
|
||||
)
|
||||
.fetch_all(pool)
|
||||
@@ -32,13 +35,32 @@ impl AuthorsDatasource {
|
||||
pub async fn get_authors_posts(
|
||||
pool: &Pool<Postgres>,
|
||||
author_id: i32,
|
||||
) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(
|
||||
Post,
|
||||
"SELECT p.post_id, a.first_name, a.last_name, p.title, p.body, p.created_at, a.author_id FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.author_id = $1 ORDER BY created_at DESC",
|
||||
pagination: Pagination,
|
||||
) -> Result<(Vec<Post>, i64), sqlx::Error> {
|
||||
let offset: i64 = (pagination.page - 1) * pagination.limit;
|
||||
println!(
|
||||
"Author ID: {}, Page: {}, Size: {}, Offset: {}",
|
||||
author_id, pagination.page, pagination.limit, offset
|
||||
);
|
||||
|
||||
let total_count = sqlx::query_scalar!(
|
||||
"SELECT COUNT(*) FROM posts p WHERE p.deleted_at IS NULL AND p.author_id = $1",
|
||||
author_id
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?
|
||||
.unwrap_or(0);
|
||||
|
||||
let posts_query = sqlx::query_as!(
|
||||
Post,
|
||||
"SELECT p.post_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, a.author_id FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.author_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
author_id,
|
||||
pagination.limit,
|
||||
offset,
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.await?;
|
||||
|
||||
Ok((posts_query, total_count))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
use crate::routes::comments::{Comment, CommentInputPayload, Pagination};
|
||||
use crate::{
|
||||
routes::comments::{Comment, CommentInputPayload},
|
||||
utils::pagination::Pagination,
|
||||
};
|
||||
use sqlx::{Pool, Postgres};
|
||||
|
||||
pub struct CommentsDatasource;
|
||||
@@ -25,8 +28,8 @@ impl CommentsDatasource {
|
||||
pool: &Pool<Postgres>,
|
||||
pagination: Pagination,
|
||||
) -> Result<Vec<Comment>, sqlx::Error> {
|
||||
let offset: i64 = (pagination.page_number - 1) * pagination.page_size;
|
||||
sqlx::query_as!(Comment, "SELECT comment_id, name, body, created_at FROM comments ORDER BY created_at DESC LIMIT $1 OFFSET $2", pagination.page_size, offset)
|
||||
let offset: i64 = (pagination.page - 1) * pagination.limit;
|
||||
sqlx::query_as!(Comment, "SELECT comment_id, name, body, created_at FROM comments ORDER BY created_at DESC LIMIT $1 OFFSET $2", pagination.page, offset)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::routes::posts::{Post, PostFeaturedVariant};
|
||||
pub struct PostsDatasource;
|
||||
impl PostsDatasource {
|
||||
pub async fn get_all(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.created_at DESC LIMIT 10")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.created_at DESC LIMIT 10")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
@@ -19,31 +19,31 @@ impl PostsDatasource {
|
||||
.execute(pool)
|
||||
.await;
|
||||
|
||||
sqlx::query_as!(PostFeaturedVariant, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.is_featured FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.post_id = $1 ORDER BY p.created_at DESC", post_id)
|
||||
sqlx::query_as!(PostFeaturedVariant, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, p.is_featured FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.post_id = $1 ORDER BY p.created_at DESC", post_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_recent(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_popular(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id LEFT JOIN comments c ON p.post_id = c.post_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY COUNT(c.comment_id) DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id LEFT JOIN comments c ON p.post_id = c.post_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY COUNT(c.comment_id) DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_hot(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.view_count DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.view_count DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_featured(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.is_featured IS true GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.is_featured IS true GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use axum::Router;
|
||||
use cache::ClientLike;
|
||||
use config::config;
|
||||
use fred::prelude::*;
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
use std::fs::File;
|
||||
use std::sync::Arc;
|
||||
@@ -8,9 +8,9 @@ use std::time::Duration;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::signal;
|
||||
use tokio::sync::Mutex;
|
||||
use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
|
||||
// use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
|
||||
use tower_http::{
|
||||
cors::{Any, CorsLayer},
|
||||
cors::CorsLayer,
|
||||
trace::{self, TraceLayer},
|
||||
};
|
||||
use tracing_subscriber::{filter, layer::SubscriberExt, prelude::*, util::SubscriberInitExt};
|
||||
@@ -58,11 +58,6 @@ async fn main() {
|
||||
)
|
||||
.init();
|
||||
|
||||
let cors = CorsLayer::new()
|
||||
.allow_methods(Any)
|
||||
.allow_headers(Any)
|
||||
.allow_origin(Any);
|
||||
|
||||
// if std::env::var("RUST_ENV").unwrap_or_else(|_| "development".to_string()) != "development" {
|
||||
//println!("we're not in development, starting up the rate limiter");
|
||||
//let governor_conf = Arc::new(
|
||||
@@ -101,13 +96,13 @@ async fn main() {
|
||||
.expect("Failed to connect to database");
|
||||
|
||||
let pool_size = 8;
|
||||
let config = Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
|
||||
let config = cache::Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
|
||||
|
||||
let redis_pool = Builder::from_config(config)
|
||||
let redis_pool = cache::Builder::from_config(config)
|
||||
.with_performance_config(|config| {
|
||||
config.default_command_timeout = Duration::from_secs(60);
|
||||
})
|
||||
.set_policy(ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
|
||||
.set_policy(cache::ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
|
||||
.build_pool(pool_size)
|
||||
.expect("Failed to create cache pool");
|
||||
|
||||
@@ -121,20 +116,11 @@ async fn main() {
|
||||
|
||||
// build our application with some routes
|
||||
let app = Router::new()
|
||||
.nest("/", routes::root::RootRoute::routes())
|
||||
.nest("/posts", routes::posts::PostsRoute::routes(&app_state))
|
||||
.nest(
|
||||
"/comments",
|
||||
routes::comments::CommentsRoute::routes(&app_state),
|
||||
)
|
||||
.nest(
|
||||
"/authors",
|
||||
routes::authors::AuthorsRoute::routes(&app_state),
|
||||
)
|
||||
.nest(
|
||||
"/projects",
|
||||
routes::projects::ProjectsRoute::routes(&app_state),
|
||||
)
|
||||
.merge(routes::root::RootRoute::routes())
|
||||
.merge(routes::posts::PostsRoute::routes(&app_state))
|
||||
.merge(routes::comments::CommentsRoute::routes(&app_state))
|
||||
.merge(routes::authors::AuthorsRoute::routes(&app_state))
|
||||
.merge(routes::projects::ProjectsRoute::routes(&app_state))
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
@@ -142,7 +128,6 @@ async fn main() {
|
||||
.on_response(trace::DefaultOnResponse::new().level(tracing::Level::INFO)),
|
||||
)
|
||||
.fallback(routes::root::RootRoute::not_found);
|
||||
// .layer(cors);
|
||||
//.layer(GovernorLayer {
|
||||
// config: governor_conf,
|
||||
//});
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
extract::{Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Json,
|
||||
};
|
||||
use fred::types::Expiration;
|
||||
use cache::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{datasources::authors::AuthorsDatasource, state::AppState};
|
||||
|
||||
use super::comments::Pagination;
|
||||
use crate::{
|
||||
datasources::authors::AuthorsDatasource,
|
||||
routes::posts::Post,
|
||||
state::AppState,
|
||||
utils::pagination::{Pagination, PaginationQuery},
|
||||
};
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
pub struct Author {
|
||||
@@ -26,20 +29,31 @@ pub struct AuthorGetOneParams {
|
||||
pub id: i32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct AuthorPostsResponse {
|
||||
posts: Vec<Post>,
|
||||
total_posts: i64,
|
||||
}
|
||||
|
||||
pub struct AuthorsRoute;
|
||||
impl AuthorsRoute {
|
||||
pub fn routes(app_state: &AppState) -> axum::Router {
|
||||
axum::Router::new()
|
||||
.route("/", get(AuthorsRoute::get_all))
|
||||
.route("/:id", get(AuthorsRoute::get_one))
|
||||
.route("/:id/posts", get(AuthorsRoute::get_authors_posts))
|
||||
.route("/authors", get(AuthorsRoute::get_all))
|
||||
.route("/authors/{id}", get(AuthorsRoute::get_one))
|
||||
.route("/authors/{id}/posts", get(AuthorsRoute::get_authors_posts))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
async fn get_all(
|
||||
State(app_state): State<AppState>,
|
||||
Json(pagination): Json<Pagination>,
|
||||
Query(query): Query<PaginationQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let pagination = Pagination {
|
||||
page: query.page.unwrap_or(1),
|
||||
limit: query.limit.unwrap_or(12),
|
||||
};
|
||||
|
||||
let mut state = app_state.lock().await;
|
||||
let cached: Option<Vec<Author>> = state
|
||||
.cache
|
||||
@@ -104,6 +118,7 @@ impl AuthorsRoute {
|
||||
let state = app_state.clone();
|
||||
|
||||
tracing::info!("storing database data in cache");
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut s = state.lock().await;
|
||||
let _ = s
|
||||
@@ -127,12 +142,20 @@ impl AuthorsRoute {
|
||||
async fn get_authors_posts(
|
||||
State(app_state): State<AppState>,
|
||||
Path(params): Path<AuthorGetOneParams>,
|
||||
Query(pagination): Query<PaginationQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let pagination = Pagination {
|
||||
page: pagination.page.unwrap_or(1),
|
||||
limit: pagination.limit.unwrap_or(12),
|
||||
};
|
||||
|
||||
let state = app_state.lock().await;
|
||||
|
||||
match AuthorsDatasource::get_authors_posts(&state.database, params.id).await {
|
||||
Ok(p) => Ok(Json(p)),
|
||||
match AuthorsDatasource::get_authors_posts(&state.database, params.id, pagination).await {
|
||||
Ok((posts, total_posts)) => Ok(Json(AuthorPostsResponse { posts, total_posts })),
|
||||
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,20 @@
|
||||
use crate::{datasources::comments::CommentsDatasource, state::AppState, utils::datetime::*};
|
||||
use crate::{
|
||||
datasources::comments::CommentsDatasource,
|
||||
state::AppState,
|
||||
utils::{
|
||||
datetime::*,
|
||||
pagination::{Pagination, PaginationQuery},
|
||||
},
|
||||
};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
extract::{Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::{get, post},
|
||||
Json,
|
||||
};
|
||||
use cache::{Expiration, SetOptions};
|
||||
use chrono::Utc;
|
||||
use fred::types::{Expiration, SetOptions};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
@@ -21,13 +28,6 @@ pub struct CommentInputPayload {
|
||||
pub struct CommentPathParams {
|
||||
id: i32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Pagination {
|
||||
pub page_number: i64,
|
||||
pub page_size: i64,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct Comment {
|
||||
pub comment_id: i32,
|
||||
@@ -43,9 +43,9 @@ impl CommentsRoute {
|
||||
pub fn routes(app_state: &AppState) -> axum::Router {
|
||||
// add more comment routes here!
|
||||
axum::Router::new()
|
||||
.route("/post/:id", get(CommentsRoute::get_post_comments))
|
||||
.route("/add", post(CommentsRoute::insert_comment))
|
||||
.route("/index", get(CommentsRoute::get_comments_index))
|
||||
.route("/comments/post/{id}", get(CommentsRoute::get_post_comments))
|
||||
.route("/comments/add", post(CommentsRoute::insert_comment))
|
||||
.route("/comments/index", get(CommentsRoute::get_comments_index))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
@@ -96,8 +96,13 @@ impl CommentsRoute {
|
||||
|
||||
async fn get_comments_index(
|
||||
State(app_state): State<AppState>,
|
||||
Json(pagination): Json<Pagination>,
|
||||
Query(query): Query<PaginationQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let pagination = Pagination {
|
||||
page: query.page.unwrap_or(1),
|
||||
limit: query.limit.unwrap_or(12),
|
||||
};
|
||||
|
||||
let state = app_state.lock().await;
|
||||
|
||||
match CommentsDatasource::get_index_comments(&state.database, pagination).await {
|
||||
@@ -106,3 +111,5 @@ impl CommentsRoute {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -15,8 +15,8 @@ use axum::{
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
use cache::Expiration;
|
||||
use chrono::Utc;
|
||||
use fred::types::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
@@ -31,6 +31,9 @@ pub struct Post {
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub created_at: Option<chrono::DateTime<Utc>>,
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub publish_date: Option<chrono::DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
@@ -44,6 +47,9 @@ pub struct PostFeaturedVariant {
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub created_at: Option<chrono::DateTime<Utc>>,
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub publish_date: Option<chrono::DateTime<Utc>>,
|
||||
pub is_featured: Option<bool>,
|
||||
}
|
||||
|
||||
@@ -57,14 +63,14 @@ impl PostsRoute {
|
||||
pub fn routes(app_state: &AppState) -> Router {
|
||||
// add more post routes here!
|
||||
Router::new()
|
||||
.route("/all", get(PostsRoute::get_all))
|
||||
.route("/:id", get(PostsRoute::get_one))
|
||||
.route("/recent", get(PostsRoute::get_recent_posts))
|
||||
.route("/popular", get(PostsRoute::get_popular_posts))
|
||||
.route("/hot", get(PostsRoute::get_hot_posts))
|
||||
.route("/featured", get(PostsRoute::get_featured_posts))
|
||||
.route("/rss", get(PostsRoute::get_rss_posts))
|
||||
.route("/sitemap", get(PostsRoute::get_sitemap))
|
||||
.route("/posts/all", get(PostsRoute::get_all))
|
||||
.route("/posts/{id}", get(PostsRoute::get_one))
|
||||
.route("/posts/recent", get(PostsRoute::get_recent_posts))
|
||||
.route("/posts/popular", get(PostsRoute::get_popular_posts))
|
||||
.route("/posts/hot", get(PostsRoute::get_hot_posts))
|
||||
.route("/posts/featured", get(PostsRoute::get_featured_posts))
|
||||
.route("/posts/rss", get(PostsRoute::get_rss_posts))
|
||||
.route("/posts/sitemap", get(PostsRoute::get_sitemap))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use crate::{datasources::projects::ProjectsDatasource, state::AppState, utils::datetime::*};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::get, Json, Router};
|
||||
use fred::types::Expiration;
|
||||
use cache::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
@@ -21,7 +20,7 @@ pub struct ProjectsRoute;
|
||||
impl ProjectsRoute {
|
||||
pub fn routes(app_state: &AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/", get(ProjectsRoute::get_all))
|
||||
.route("/projects", get(ProjectsRoute::get_all))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
@@ -67,3 +66,5 @@ impl ProjectsRoute {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
|
||||
use crate::{datasources::posts::PostsDatasource, state::AppState};
|
||||
|
||||
use super::posts::Post;
|
||||
|
||||
pub struct RootRoute;
|
||||
impl RootRoute {
|
||||
pub fn routes() -> Router {
|
||||
|
||||
@@ -1,83 +1,17 @@
|
||||
use fred::interfaces::KeysInterface;
|
||||
use fred::{clients::Pool, prelude::*};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub type AppState = std::sync::Arc<tokio::sync::Mutex<AppInternalState>>;
|
||||
|
||||
pub struct AppInternalState {
|
||||
pub database: sqlx::postgres::PgPool,
|
||||
pub cache: Cache,
|
||||
}
|
||||
|
||||
pub struct Cache {
|
||||
pub inmem: Pool,
|
||||
pub cache: cache::Cache,
|
||||
}
|
||||
|
||||
impl AppInternalState {
|
||||
pub fn new(database: PgPool, cache: Pool) -> Self {
|
||||
pub fn new(database: PgPool, cache: cache::Pool) -> Self {
|
||||
AppInternalState {
|
||||
database,
|
||||
cache: Cache { inmem: cache },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub async fn get<T>(&mut self, key: String) -> Result<Option<T>, Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let value: Option<String> = self.inmem.get(&key).await?;
|
||||
|
||||
match value {
|
||||
Some(json_str) => match serde_json::from_str::<T>(&json_str) {
|
||||
Ok(deserialized) => Ok(Some(deserialized)),
|
||||
Err(_) => Ok(None),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set<T>(
|
||||
&mut self,
|
||||
key: String,
|
||||
contents: &T,
|
||||
expiration: Option<Expiration>,
|
||||
set_opts: Option<SetOptions>,
|
||||
get: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let json_string = match serde_json::to_string::<T>(contents) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unable to deserialize contents passed to cache".to_string(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(self
|
||||
.inmem
|
||||
.set(key, json_string, expiration, set_opts, get)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(self.inmem.del(key).await?)
|
||||
}
|
||||
|
||||
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
match self.inmem.is_connected() {
|
||||
true => Ok(()),
|
||||
false => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
))),
|
||||
cache: cache::Cache { inmem: cache },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod datetime;
|
||||
pub mod pagination;
|
||||
pub mod rss;
|
||||
pub mod sitemap;
|
||||
|
||||
13
backend/public/src/utils/pagination.rs
Normal file
13
backend/public/src/utils/pagination.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct PaginationQuery {
|
||||
pub page: Option<i64>,
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Pagination {
|
||||
pub page: i64,
|
||||
pub limit: i64,
|
||||
}
|
||||
@@ -83,7 +83,8 @@ pub fn generate_rss(
|
||||
<atom:link href="https://wyattjmiller.com/posts.xml" rel="self" type="application/rss+xml" />
|
||||
{}
|
||||
</channel>
|
||||
</rss>"#,
|
||||
</rss>
|
||||
"#,
|
||||
rss_entries
|
||||
)
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ impl SitemapEntry {
|
||||
pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
|
||||
let urls = entries
|
||||
.values()
|
||||
.into_iter()
|
||||
.map(|entry| entry.to_item())
|
||||
.collect::<String>();
|
||||
format!(
|
||||
@@ -39,21 +38,21 @@ pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
|
||||
|
||||
pub fn get_static_pages(entries: &mut HashMap<String, SitemapEntry>, web_url: &String) {
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
"10000".to_string(),
|
||||
SitemapEntry {
|
||||
location: web_url.clone(),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
"10001".to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/posts", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
"10002".to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/projects", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
|
||||
2
backend/storage/.gitignore
vendored
Normal file
2
backend/storage/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
target/
|
||||
.env
|
||||
3589
backend/storage/Cargo.lock
generated
Normal file
3589
backend/storage/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
backend/storage/Cargo.toml
Normal file
15
backend/storage/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "storage"
|
||||
description = "Internal object storage library"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
aws-sdk-s3 = "1.94.0"
|
||||
aws-config = "1.8"
|
||||
azure_core = "0.25.0"
|
||||
azure_storage = "0.21.0"
|
||||
azure_storage_blobs = "0.21.0"
|
||||
async-trait = "0.1"
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
thiserror = "2.0.12"
|
||||
7
backend/storage/README.md
Normal file
7
backend/storage/README.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Storage library
|
||||
|
||||
also known as `storage`
|
||||
|
||||
## What is this?
|
||||
|
||||
An internal storage library. This was needed because both `public` and `task` needed storage functionality. Additionally, this helps maintainability and avoids duplicate code.
|
||||
11
backend/storage/src/error.rs
Normal file
11
backend/storage/src/error.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use azure_core::error::HttpError;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AdapterError {
|
||||
#[error("Azure error: {0}")]
|
||||
Azure(#[from] azure_core::Error),
|
||||
#[error("HTTP error: {0}")]
|
||||
Http(#[from] HttpError),
|
||||
#[error("Serialization error: {0}")]
|
||||
Serialization(String),
|
||||
}
|
||||
2
backend/storage/src/lib.rs
Normal file
2
backend/storage/src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod error;
|
||||
pub mod services;
|
||||
126
backend/storage/src/services/aws.rs
Normal file
126
backend/storage/src/services/aws.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use crate::{error::AdapterError, services::ObjectStorageClient};
|
||||
use async_trait::async_trait;
|
||||
use aws_config::{BehaviorVersion, Region};
|
||||
use aws_sdk_s3::{Client, Config, config::Credentials};
|
||||
use std::env;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct S3ClientConfig {
|
||||
pub access_key: String,
|
||||
secret_key: String,
|
||||
endpoint: String,
|
||||
pub bucket: String,
|
||||
region: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct S3Client {
|
||||
client: Client,
|
||||
pub client_config: S3ClientConfig,
|
||||
}
|
||||
|
||||
impl S3ClientConfig {
|
||||
pub fn new(
|
||||
access_key: &str,
|
||||
secret_key: &str,
|
||||
endpoint: &str,
|
||||
bucket: &str,
|
||||
region: &str,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Ok(S3ClientConfig {
|
||||
access_key: access_key.to_owned(),
|
||||
secret_key: secret_key.to_owned(),
|
||||
endpoint: endpoint.to_owned(),
|
||||
bucket: bucket.to_owned(),
|
||||
region: region.to_owned(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_env() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Ok(S3ClientConfig {
|
||||
access_key: env::var("S3_ACCESS_KEY")
|
||||
.map_err(|_| "S3_ACCESS_KEY environment variable not set")?,
|
||||
secret_key: env::var("S3_SECRET_KEY")
|
||||
.map_err(|_| "S3_SECRET_KEY environment variable not set")?,
|
||||
endpoint: env::var("S3_ENDPOINT")
|
||||
.unwrap_or_else(|_| "us-ord-1.linodeobjects.com".to_string()),
|
||||
bucket: env::var("S3_BUCKET").map_err(|_| "S3_BUCKET environment variable not set")?,
|
||||
region: env::var("S3_REGION").unwrap_or_else(|_| "us-ord".to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl S3Client {
|
||||
pub fn new(config: &S3ClientConfig) -> Self {
|
||||
let credentials = Credentials::new(
|
||||
&config.access_key,
|
||||
&config.secret_key,
|
||||
None,
|
||||
None,
|
||||
"linode-object-storage",
|
||||
);
|
||||
|
||||
let s3_config = Config::builder()
|
||||
.behavior_version(BehaviorVersion::latest())
|
||||
.region(Region::new(config.region.clone()))
|
||||
.endpoint_url(format!("https://{}", config.endpoint))
|
||||
.credentials_provider(credentials)
|
||||
.force_path_style(false)
|
||||
.build();
|
||||
|
||||
Self {
|
||||
client: Client::from_conf(s3_config),
|
||||
client_config: config.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ObjectStorageClient for S3Client {
|
||||
type Error = AdapterError;
|
||||
|
||||
async fn put_object(&self, bucket: &str, key: &str, data: Vec<u8>) -> Result<(), Self::Error> {
|
||||
println!("Uploading to S3 (or S3 like) Object Storage...");
|
||||
println!("Bucket: {}", bucket);
|
||||
|
||||
let _ = self
|
||||
.client
|
||||
.put_object()
|
||||
.bucket(bucket)
|
||||
.key(key)
|
||||
.body(data.into())
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.content_type("application/xml")
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_object(&self, _bucket: &str, _key: &str) -> Result<Vec<u8>, Self::Error> {
|
||||
todo!("not impl")
|
||||
}
|
||||
|
||||
async fn delete_object(&self, _bucket: &str, _key: &str) -> Result<(), Self::Error> {
|
||||
todo!("not impl")
|
||||
}
|
||||
|
||||
async fn list_objects(
|
||||
&self,
|
||||
_bucket: &str,
|
||||
_prefix: Option<&str>,
|
||||
) -> Result<Vec<String>, Self::Error> {
|
||||
todo!("not impl")
|
||||
}
|
||||
|
||||
async fn object_exists(&self, _bucket: &str, _key: &str) -> Result<bool, Self::Error> {
|
||||
todo!("not impl")
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for S3ClientConfig {
|
||||
fn default() -> Self {
|
||||
S3ClientConfig::from_env().unwrap()
|
||||
}
|
||||
}
|
||||
71
backend/storage/src/services/azure.rs
Normal file
71
backend/storage/src/services/azure.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use crate::error::AdapterError;
|
||||
use async_trait::async_trait;
|
||||
use azure_storage::prelude::*;
|
||||
use azure_storage_blobs::prelude::*;
|
||||
|
||||
use super::ObjectStorageClient;
|
||||
|
||||
pub struct AzureBlobClient {
|
||||
client: BlobServiceClient,
|
||||
}
|
||||
|
||||
impl AzureBlobClient {
|
||||
pub fn new(account_name: &str, account_key: String) -> Self {
|
||||
let storage_credentials = StorageCredentials::access_key(account_name, account_key);
|
||||
let client = BlobServiceClient::new(account_name, storage_credentials);
|
||||
|
||||
Self { client }
|
||||
}
|
||||
|
||||
// Helper method to get container client
|
||||
fn get_container_client(&self, container_name: &str) -> ContainerClient {
|
||||
self.client.container_client(container_name)
|
||||
}
|
||||
|
||||
// Helper method to get blob client
|
||||
fn get_blob_client(&self, container_name: &str, blob_name: &str) -> BlobClient {
|
||||
self.get_container_client(container_name)
|
||||
.blob_client(blob_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ObjectStorageClient for AzureBlobClient {
|
||||
type Error = AdapterError;
|
||||
|
||||
async fn put_object(
|
||||
&self,
|
||||
bucket: &str, // container name
|
||||
key: &str, // blob name
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), Self::Error> {
|
||||
let blob_client = self.get_blob_client(bucket, key);
|
||||
let _request = blob_client.put_block_blob(data).await.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_object(&self, bucket: &str, key: &str) -> Result<Vec<u8>, Self::Error> {
|
||||
let blob_client = self.get_blob_client(bucket, key);
|
||||
|
||||
let response = blob_client.get_content().await.unwrap();
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn delete_object(&self, bucket: &str, key: &str) -> Result<(), Self::Error> {
|
||||
let blob_client = self.get_blob_client(bucket, key);
|
||||
blob_client.delete().await.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_objects(
|
||||
&self,
|
||||
_bucket: &str,
|
||||
_prefix: Option<&str>,
|
||||
) -> Result<Vec<String>, Self::Error> {
|
||||
todo!("not impl")
|
||||
}
|
||||
|
||||
async fn object_exists(&self, _bucket: &str, _key: &str) -> Result<bool, Self::Error> {
|
||||
todo!("not impl")
|
||||
}
|
||||
}
|
||||
23
backend/storage/src/services/mod.rs
Normal file
23
backend/storage/src/services/mod.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
pub mod aws;
|
||||
pub mod azure;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
#[async_trait]
|
||||
pub trait ObjectStorageClient {
|
||||
type Error;
|
||||
|
||||
async fn put_object(&self, bucket: &str, key: &str, data: Vec<u8>) -> Result<(), Self::Error>;
|
||||
|
||||
async fn get_object(&self, bucket: &str, key: &str) -> Result<Vec<u8>, Self::Error>;
|
||||
|
||||
async fn delete_object(&self, bucket: &str, key: &str) -> Result<(), Self::Error>;
|
||||
|
||||
async fn list_objects(
|
||||
&self,
|
||||
bucket: &str,
|
||||
prefix: Option<&str>,
|
||||
) -> Result<Vec<String>, Self::Error>;
|
||||
|
||||
async fn object_exists(&self, bucket: &str, key: &str) -> Result<bool, Self::Error>;
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
DATABASE_URL=postgres://wyatt:wyattisawesome@192.168.100.253:5432/postgres
|
||||
46
backend/task/.sqlx/query-364c58ab7678af9d36003af9858e69b876be3939a4d9f34a95950ab7cc166778.json
generated
Normal file
46
backend/task/.sqlx/query-364c58ab7678af9d36003af9858e69b876be3939a4d9f34a95950ab7cc166778.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO logs (task_id, created_at, task_status) VALUES ($1, now(), 'pending') RETURNING task_id, log_id, created_at, task_status, finished_at",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "task_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "log_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "task_status",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "finished_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "364c58ab7678af9d36003af9858e69b876be3939a4d9f34a95950ab7cc166778"
|
||||
}
|
||||
22
backend/task/.sqlx/query-723a24f681b1b7866e4a2636ddda2bb8ed78d60540158ffa0fbebba4bdbfa2b9.json
generated
Normal file
22
backend/task/.sqlx/query-723a24f681b1b7866e4a2636ddda2bb8ed78d60540158ffa0fbebba4bdbfa2b9.json
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT EXISTS(SELECT 1 FROM posts p WHERE p.filename = $1) as filename",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "filename",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "723a24f681b1b7866e4a2636ddda2bb8ed78d60540158ffa0fbebba4bdbfa2b9"
|
||||
}
|
||||
15
backend/task/.sqlx/query-e3f9cdc6fede1a8601c3775e829f04eef5b00cf7bc5a087b5ba5c70f99e76763.json
generated
Normal file
15
backend/task/.sqlx/query-e3f9cdc6fede1a8601c3775e829f04eef5b00cf7bc5a087b5ba5c70f99e76763.json
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "UPDATE logs SET task_status = $1 WHERE task_id = $2",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e3f9cdc6fede1a8601c3775e829f04eef5b00cf7bc5a087b5ba5c70f99e76763"
|
||||
}
|
||||
1410
backend/task/Cargo.lock
generated
1410
backend/task/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,8 @@ edition = "2021"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
storage = { version = "0.1.0", path = "../storage" }
|
||||
cache = { version = "0.1.0", path = "../cache" }
|
||||
tokio = { version = "1.19.2", features = ["full"] }
|
||||
reqwest = { version = "0.12.20", features = ["json", "rustls-tls"] }
|
||||
job_scheduler = "1.2.1"
|
||||
@@ -21,7 +23,5 @@ futures = "0.3.30"
|
||||
markdown = "1.0.0-alpha.20"
|
||||
serde = { version = "*", features = ["derive"] }
|
||||
serde_yml = "*"
|
||||
aws-sdk-s3 = "1.94.0"
|
||||
aws-config = "1.8"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
|
||||
14
backend/task/Dockerfile
Normal file
14
backend/task/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM rust:1.88.0
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./task ./task
|
||||
COPY ./cache ./cache
|
||||
COPY ./storage ./storage
|
||||
|
||||
RUN mkdir /app/posts
|
||||
RUN cargo build --release --manifest-path ./task/Cargo.toml
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["/app/task/target/release/task"]
|
||||
@@ -12,4 +12,9 @@ This is a task runner/scheduler programs that will fire off various tasks. These
|
||||
|
||||
For `task` to work properly, please make sure to first create the `.env` file, then fill out the following environment variables:
|
||||
|
||||
- `BASE_URI_API` - needed for communicating with `public`
|
||||
- `DATABASE_URL` - needed for communicating to Postgres
|
||||
- `REDIS_URL` - needed for communicating with the cache (Redis or Valkey)
|
||||
- `S3_ACCESS_KEY` - needed for Amazon S3 (or compatible services) storage
|
||||
- `S3_SECRET_KEY` - needed for Amazon S3 (or compatible services) storage
|
||||
- `S3_BUCKET` - needed for Amazon S3 (or compatible services) storage
|
||||
|
||||
11
backend/task/src/config.rs
Normal file
11
backend/task/src/config.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn config() -> Configuration {
|
||||
Configuration {
|
||||
env: dotenvy::dotenv(),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Configuration {
|
||||
env: Result<PathBuf, dotenvy::Error>,
|
||||
}
|
||||
@@ -1,16 +1,20 @@
|
||||
use cache::ClientLike;
|
||||
use chrono::Utc;
|
||||
use sqlx::{postgres::PgPoolOptions, Pool, Postgres};
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use storage::services::aws;
|
||||
use tasks::*;
|
||||
|
||||
//mod config;
|
||||
mod config;
|
||||
mod tasks;
|
||||
mod utils;
|
||||
|
||||
pub struct TaskManager<'a> {
|
||||
pool: Pool<Postgres>,
|
||||
cache: cache::Pool,
|
||||
s3_client: aws::S3Client,
|
||||
jobs: Vec<TaskJob>,
|
||||
last_activated: Option<chrono::DateTime<Utc>>,
|
||||
last_job: Option<TaskJob>,
|
||||
@@ -29,6 +33,7 @@ pub struct TaskLog {
|
||||
#[derive(Debug)]
|
||||
pub enum TaskStatus {
|
||||
Pending(String),
|
||||
Running(String),
|
||||
Completed(String),
|
||||
Failed(String),
|
||||
}
|
||||
@@ -47,7 +52,9 @@ pub struct TaskJob {
|
||||
async fn main() {
|
||||
println!("Hello, world!");
|
||||
|
||||
dotenvy::dotenv().unwrap();
|
||||
let _ = config::config();
|
||||
|
||||
// setup database
|
||||
let database_url =
|
||||
env::var("DATABASE_URL").expect("Environment variable DATABASE_URL is not found");
|
||||
let pool = PgPoolOptions::new()
|
||||
@@ -57,7 +64,35 @@ async fn main() {
|
||||
.await
|
||||
.expect("Failed to connect to the database");
|
||||
|
||||
let mut manager = TaskManager::new(pool);
|
||||
// setup redis/valkey
|
||||
let redis_url = match std::env::var("REDIS_URL").unwrap().as_str() {
|
||||
// TODO: fix the unwrap ^
|
||||
"" => "redis://localhost:6379".to_string(),
|
||||
x => x.to_string(),
|
||||
};
|
||||
|
||||
let pool_size = 8;
|
||||
let config = cache::Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
|
||||
|
||||
let redis_pool = cache::Builder::from_config(config)
|
||||
.with_performance_config(|config| {
|
||||
config.default_command_timeout = Duration::from_secs(60);
|
||||
})
|
||||
.set_policy(cache::ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
|
||||
.build_pool(pool_size)
|
||||
.expect("Failed to create cache pool");
|
||||
|
||||
if std::env::var("REDIS_URL").unwrap() != "" {
|
||||
// TODO: fix the unwrap ^
|
||||
redis_pool.init().await.expect("Failed to connect to cache");
|
||||
let _ = redis_pool.flushall::<i32>(false).await;
|
||||
}
|
||||
|
||||
// setup storage
|
||||
let s3_client_config = aws::S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = aws::S3Client::new(&s3_client_config);
|
||||
|
||||
let mut manager = TaskManager::new(pool, redis_pool, s3_client);
|
||||
manager.register_jobs().await.unwrap();
|
||||
|
||||
loop {
|
||||
@@ -67,9 +102,11 @@ async fn main() {
|
||||
}
|
||||
|
||||
impl<'a> TaskManager<'a> {
|
||||
fn new(pool: Pool<Postgres>) -> Self {
|
||||
fn new(pool: Pool<Postgres>, cache: cache::Pool, s3_client: aws::S3Client) -> Self {
|
||||
TaskManager {
|
||||
pool,
|
||||
cache,
|
||||
s3_client,
|
||||
jobs: Vec::new(),
|
||||
last_activated: None,
|
||||
last_job: None,
|
||||
@@ -99,11 +136,15 @@ impl<'a> TaskManager<'a> {
|
||||
}
|
||||
2 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
Box::new(move || upload_rss::register(&pool))
|
||||
let cache = Arc::new(self.cache.clone());
|
||||
let s3_client = Arc::new(self.s3_client.clone());
|
||||
Box::new(move || upload_rss::register(&pool, &cache, &s3_client))
|
||||
}
|
||||
3 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
Box::new(move || upload_sitemap::register(&pool))
|
||||
let cache = Arc::new(self.cache.clone());
|
||||
let s3_client = Arc::new(self.s3_client.clone());
|
||||
Box::new(move || upload_sitemap::register(&pool, &cache, &s3_client))
|
||||
}
|
||||
id => return Err(format!("Unknown task_id: {}", id).into()),
|
||||
};
|
||||
|
||||
@@ -2,12 +2,13 @@ use std::fs;
|
||||
use std::io::Read;
|
||||
|
||||
use crate::utils::task_log;
|
||||
use chrono::{DateTime, FixedOffset, Utc};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
let p = pool.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = import_posts("app/", &p).await;
|
||||
let _ = import_posts("/app/posts", &p).await;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -19,21 +20,15 @@ async fn import_posts(
|
||||
|
||||
// Start task logging
|
||||
let task = task_log::start(1, pool).await?;
|
||||
|
||||
// Setup markdown options
|
||||
let options = MarkdownOptions {
|
||||
options: markdown::Constructs::gfm(),
|
||||
};
|
||||
|
||||
// Read directory contents
|
||||
let entries = fs::read_dir(dir_path)?;
|
||||
|
||||
// Process each file
|
||||
for entry_result in entries {
|
||||
let file = entry_result?;
|
||||
let file_path = file.path();
|
||||
|
||||
// Skip non-file entries
|
||||
if !file_path.is_file() {
|
||||
continue;
|
||||
}
|
||||
@@ -71,22 +66,23 @@ async fn import_posts(
|
||||
|
||||
let content =
|
||||
markdown::to_html_with_options(&document.content, &markdown::Options::default());
|
||||
println!("{:?}", content);
|
||||
|
||||
let title = document.metadata.title;
|
||||
let pub_date =
|
||||
DateTime::parse_from_str(document.metadata.date.as_ref(), "%Y-%m-%d %H:%M:%S %z")?;
|
||||
let content_final = content.unwrap();
|
||||
|
||||
// Insert into database
|
||||
let results = sqlx::query_as::<_, InsertPosts>(
|
||||
"INSERT INTO posts (title, body, filename, author_id) VALUES ($1, $2, $3, $4) RETURNING title, body, filename, author_id"
|
||||
"INSERT INTO posts (title, body, filename, publish_date, author_id) VALUES ($1, $2, $3, $4, $5) RETURNING title, body, filename, author_id"
|
||||
)
|
||||
.bind(title)
|
||||
.bind(content_final)
|
||||
.bind(file_name_str)
|
||||
.bind(pub_date)
|
||||
.bind(1) // Consider making author_id a parameter
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
println!("{:?}", results);
|
||||
|
||||
println!("Successfully imported: {}", file_name_str);
|
||||
} else {
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
use sqlx::{Pool, Postgres};
|
||||
|
||||
use crate::utils::{
|
||||
request::{Request, Response},
|
||||
task_log,
|
||||
{upload::S3ClientConfig, *},
|
||||
};
|
||||
use cache::KeysInterface;
|
||||
use storage::services::{aws::S3Client, ObjectStorageClient};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>, cache: &cache::Pool, s3_client: &S3Client) {
|
||||
let p = pool.clone();
|
||||
let c = cache.clone();
|
||||
let s3 = s3_client.to_owned();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = upload_rss(&p).await;
|
||||
let _ = upload_rss(&p, &c, s3).await;
|
||||
});
|
||||
}
|
||||
|
||||
async fn upload_rss(pool: &sqlx::Pool<sqlx::Postgres>) -> Result<(), Box<dyn std::error::Error>> {
|
||||
async fn upload_rss(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
cache: &cache::Pool,
|
||||
s3_client: S3Client,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// start task logging
|
||||
task_log::start(2, pool).await?;
|
||||
|
||||
@@ -24,15 +30,37 @@ async fn upload_rss(pool: &sqlx::Pool<sqlx::Postgres>) -> Result<(), Box<dyn std
|
||||
|
||||
// upload the sucker to obj storage
|
||||
if let Response::Xml(rss) = rss_result {
|
||||
let client_config = S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = upload::create_s3_client(&client_config).await.unwrap();
|
||||
let _ = upload::upload(
|
||||
&s3_client,
|
||||
client_config.bucket.as_str(),
|
||||
let cached: &Option<String> = &cache.get(String::from("rss")).await.unwrap_or(None);
|
||||
let cache_clone = cache.clone();
|
||||
if let Some(cached_value) = cached {
|
||||
if *cached_value == rss {
|
||||
println!("Response is the same in the cache, exiting");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let r = rss.clone();
|
||||
|
||||
let _ = s3_client
|
||||
.put_object(
|
||||
s3_client.client_config.bucket.as_str(),
|
||||
"feed.xml",
|
||||
rss.as_str(),
|
||||
rss.as_bytes().to_vec(),
|
||||
)
|
||||
.await;
|
||||
.await?;
|
||||
|
||||
tokio::spawn(async move {
|
||||
cache_clone
|
||||
.set::<String, String, &String>(
|
||||
String::from("rss"),
|
||||
&r,
|
||||
Some(cache::Expiration::EX(3600)),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
println!("Finished uploading RSS feed");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,38 +1,65 @@
|
||||
use crate::utils::{
|
||||
request::{Request, Response},
|
||||
task_log,
|
||||
{upload::S3ClientConfig, *},
|
||||
};
|
||||
use cache::KeysInterface;
|
||||
use storage::services::{aws::S3Client, ObjectStorageClient};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>, cache: &cache::Pool, s3_client: &S3Client) {
|
||||
let p = pool.clone();
|
||||
let c = cache.clone();
|
||||
let s3 = s3_client.to_owned();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = upload_sitemap(&p).await;
|
||||
let _ = upload_sitemap(&p, &c, s3).await;
|
||||
});
|
||||
}
|
||||
|
||||
async fn upload_sitemap(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
cache: &cache::Pool,
|
||||
s3_client: S3Client,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO:: get sitemap and upload it to bucket??
|
||||
// start task logging
|
||||
task_log::start(3, pool).await?;
|
||||
|
||||
// get request and request the things
|
||||
let request = Request::new();
|
||||
let sitemap_url = format!("{}/posts/sitemap", request.base_url);
|
||||
let sitemap_result = request.request_url::<String>(&sitemap_url).await;
|
||||
let sitemap_result = request.request_url::<String>(&sitemap_url).await.unwrap();
|
||||
|
||||
// upload the sucker to obj storage
|
||||
if let Response::Xml(sitemap) = sitemap_result {
|
||||
let client_config = S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = upload::create_s3_client(&client_config).await.unwrap();
|
||||
let _ = upload::upload(
|
||||
&s3_client,
|
||||
client_config.bucket.as_str(),
|
||||
let cached: &Option<String> = &cache.get(String::from("sitemap")).await.unwrap_or(None);
|
||||
let cache_clone = cache.clone();
|
||||
if let Some(cached_value) = cached {
|
||||
if *cached_value == sitemap {
|
||||
println!("Response is the same in the cache, exiting");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let s = sitemap.clone();
|
||||
|
||||
let _ = s3_client
|
||||
.put_object(
|
||||
s3_client.client_config.bucket.as_str(),
|
||||
"sitemap.xml",
|
||||
sitemap.as_str(),
|
||||
sitemap.as_bytes().to_vec(),
|
||||
)
|
||||
.await;
|
||||
.await?;
|
||||
|
||||
tokio::spawn(async move {
|
||||
cache_clone
|
||||
.set::<String, String, &String>(
|
||||
String::from("sitemap"),
|
||||
&s,
|
||||
Some(cache::Expiration::EX(3600)),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
println!("Finished uploading sitemap!");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
use aws_config::{BehaviorVersion, Region};
|
||||
use aws_sdk_s3::{config::Credentials, Client, Config};
|
||||
use std::env;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct S3ClientConfig {
|
||||
pub access_key: String,
|
||||
secret_key: String,
|
||||
endpoint: String,
|
||||
pub bucket: String,
|
||||
region: String,
|
||||
}
|
||||
|
||||
impl S3ClientConfig {
|
||||
pub fn from_env() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Ok(S3ClientConfig {
|
||||
access_key: env::var("LINODE_ACCESS_KEY")
|
||||
.map_err(|_| "LINODE_ACCESS_KEY environment variable not set")?,
|
||||
secret_key: env::var("LINODE_SECRET_KEY")
|
||||
.map_err(|_| "LINODE_SECRET_KEY environment variable not set")?,
|
||||
endpoint: env::var("LINODE_ENDPOINT")
|
||||
.unwrap_or_else(|_| "us-ord-1.linodeobjects.com".to_string()),
|
||||
bucket: env::var("LINODE_BUCKET")
|
||||
.map_err(|_| "LINODE_BUCKET environment variable not set")?,
|
||||
region: env::var("LINODE_REGION").unwrap_or_else(|_| "us-ord".to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_s3_client(
|
||||
config: &S3ClientConfig,
|
||||
) -> Result<Client, Box<dyn std::error::Error>> {
|
||||
let credentials = Credentials::new(
|
||||
&config.access_key,
|
||||
&config.secret_key,
|
||||
None,
|
||||
None,
|
||||
"linode-object-storage",
|
||||
);
|
||||
|
||||
let s3_config = Config::builder()
|
||||
.behavior_version(BehaviorVersion::latest())
|
||||
.region(Region::new(config.region.clone()))
|
||||
.endpoint_url(format!("https://{}", config.endpoint))
|
||||
.credentials_provider(credentials)
|
||||
.force_path_style(false)
|
||||
.build();
|
||||
|
||||
Ok(Client::from_conf(s3_config))
|
||||
}
|
||||
|
||||
pub async fn upload(
|
||||
client: &Client,
|
||||
bucket: &str,
|
||||
key: &str,
|
||||
content: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("Uploading to Linode Object Storage...");
|
||||
println!("Bucket: {}", bucket);
|
||||
|
||||
let put_object_req = client
|
||||
.put_object()
|
||||
.bucket(bucket)
|
||||
.key(key)
|
||||
.body(content.as_bytes().to_vec().into())
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.content_type("application/rss+xml")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
println!("Upload successful! ETag: {:?}", put_object_req.e_tag());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
137
docker-compose.yaml
Normal file
137
docker-compose.yaml
Normal file
@@ -0,0 +1,137 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
valkey-mywebsite:
|
||||
image: valkey/valkey:8.0.2
|
||||
container_name: valkey-mywebsite
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- valkey_data:/data
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD", "valkey-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
postgres-mywebsite:
|
||||
image: postgres:16
|
||||
container_name: postgres-mywebsite
|
||||
# fill these in with postgres env vars
|
||||
environment:
|
||||
POSTGRES_USER: wyatt
|
||||
POSTGRES_PASSWORD: test # <<< replace this
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./init-db:/docker-entrypoint-initdb.d
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U wyatt -d postgres"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
frontend:
|
||||
image: scm.wyattjmiller.com/wymiller/my-website-v2_frontend:master
|
||||
container_name: frontend
|
||||
ports:
|
||||
- "8000:8000"
|
||||
# fill these in the frontend env vars for prod
|
||||
environment:
|
||||
- BASE_URI_API=
|
||||
- BASE_URI_WEB=
|
||||
- EMAIL_FORM=
|
||||
- RSS_URI=
|
||||
- SITEMAP_URI=
|
||||
- VIRTUAL_HOST=wyattjmiller.com
|
||||
- VIRTUAL_PORT=80
|
||||
- LETSENCRYPT_HOST=wyattjmiller.com
|
||||
- LETSENCRYPT_EMAIL=wjmiller2016@gmail.com
|
||||
volumes:
|
||||
- ./deno-fresh-app:/app
|
||||
- /app/node_modules
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
public-mywebsite:
|
||||
image: scm.wyattjmiller.com/wymiller/my-website-v2_public:master
|
||||
container_name: public-mywebsite
|
||||
ports:
|
||||
- "3000:3000"
|
||||
# fill these in with public env vars for prod
|
||||
environment:
|
||||
- DATABASE_URL=
|
||||
- REDIS_URL=
|
||||
- BASE_URI_WEB=
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
# make sure to change the url too
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
task-mywebsite:
|
||||
image: scm.wyattjmiller.com/wymiller/my-website-v2_task:master
|
||||
container_name: task-mywebsite
|
||||
# fill these in with task env vars for prod
|
||||
environment:
|
||||
- DATABASE_URL=
|
||||
- BASE_URI_API=
|
||||
- S3_ACCESS_KEY=
|
||||
- S3_SECRET_KEY=
|
||||
- S3_BUCKET=
|
||||
- REDIS_URL=
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
volumes:
|
||||
- ./backend/task/app:/app/posts # <<< place all markdown files here
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD", "pgrep", "-f", "task-mywebsite"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
networks:
|
||||
app_network:
|
||||
driver: bridge
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 172.20.0.0/16
|
||||
|
||||
volumes:
|
||||
valkey_mywebsite_data:
|
||||
driver: local
|
||||
postgres_mywebsite_data:
|
||||
driver: local
|
||||
22
flake.lock
generated
22
flake.lock
generated
@@ -21,11 +21,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1729525729,
|
||||
"narHash": "sha256-YiooFGeR7+sXSkHNfSzT8GQf+xtzbDwUbfbwkCCyuUs=",
|
||||
"lastModified": 1748076591,
|
||||
"narHash": "sha256-zfcYlOBYGfp4uxPC9ctaWf37bjZagbQ0pw7mqgTqfBI=",
|
||||
"owner": "nekowinston",
|
||||
"repo": "nix-deno",
|
||||
"rev": "e92687492a4faec48ab1eb45adbdba30c876b0e5",
|
||||
"rev": "0b22de7dd34c7d7c7cd46cedee0b65592dc57d3e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -36,12 +36,12 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1741862977,
|
||||
"narHash": "sha256-prZ0M8vE/ghRGGZcflvxCu40ObKaB+ikn74/xQoNrGQ=",
|
||||
"rev": "cdd2ef009676ac92b715ff26630164bb88fec4e0",
|
||||
"revCount": 715614,
|
||||
"lastModified": 1752436162,
|
||||
"narHash": "sha256-Kt1UIPi7kZqkSc5HVj6UY5YLHHEzPBkgpNUByuyxtlw=",
|
||||
"rev": "dfcd5b901dbab46c9c6e80b265648481aafb01f8",
|
||||
"revCount": 806304,
|
||||
"type": "tarball",
|
||||
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2411.715614%2Brev-cdd2ef009676ac92b715ff26630164bb88fec4e0/019590d8-bf83-7849-9c87-9e373480fc07/source.tar.gz"
|
||||
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2505.806304%2Brev-dfcd5b901dbab46c9c6e80b265648481aafb01f8/01980f2c-e7f3-7efc-b369-7ebec7be6e59/source.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
@@ -63,11 +63,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1742005800,
|
||||
"narHash": "sha256-6wuOGWkyW6R4A6Th9NMi6WK2jjddvZt7V2+rLPk6L3o=",
|
||||
"lastModified": 1752633862,
|
||||
"narHash": "sha256-Bj7ozT1+5P7NmvDcuAXJvj56txcXuAhk3Vd9FdWFQzk=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "028cd247a6375f83b94adc33d83676480fc9c294",
|
||||
"rev": "8668ca94858206ac3db0860a9dec471de0d995f8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
17
frontend/Dockerfile
Normal file
17
frontend/Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM denoland/deno:alpine
|
||||
|
||||
RUN apk add bash
|
||||
|
||||
# USER deno
|
||||
|
||||
RUN deno cache --reload deno.json
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN bash -c 'deno cache main.ts'
|
||||
|
||||
RUN bash -c 'deno task build'
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["deno", "run", "-A", "main.ts"]
|
||||
@@ -2,40 +2,42 @@ import * as hi from "jsr:@preact-icons/hi2";
|
||||
|
||||
export default function Footer() {
|
||||
return (
|
||||
<footer class="bg-[#313244] text-[#cdd6f4] py-8">
|
||||
<footer class="bg-[#313244] text-[#cdd6f4] py-8 mt-auto">
|
||||
<div class="container mx-auto px-4">
|
||||
{/* Grid layout that switches from 2 to 1 column on small screens */}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-8">
|
||||
<div class="space-y-2">
|
||||
{/* 2x2 grid on mobile, horizontal row on desktop - all centered */}
|
||||
<div class="grid grid-cols-2 place-items-center md:flex md:flex-row items-center justify-center md:gap-8">
|
||||
<a
|
||||
class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href={`${Deno.env.get("BASE_URI_API")}/posts/rss`}
|
||||
class="text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="/rss"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiOutlineRss />
|
||||
RSS
|
||||
</div>
|
||||
</a>
|
||||
|
||||
<a
|
||||
class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="/sitemap.xml"
|
||||
class="text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="/sitemap"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiOutlineMap />
|
||||
Sitemap
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<a class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]">
|
||||
<a
|
||||
class="text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="/resume.pdf"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiOutlineBriefcase />
|
||||
Resume
|
||||
</div>
|
||||
</a>
|
||||
|
||||
<a
|
||||
class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
class="text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="mailto:wjmiller2016@gmail.com"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
@@ -43,13 +45,6 @@ export default function Footer() {
|
||||
Email me
|
||||
</div>
|
||||
</a>
|
||||
<a
|
||||
class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="https://github.com/wymillerlinux"
|
||||
>
|
||||
<div class="flex items-center gap-2">GitHub</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="border-t border-gray-700 mt-8 pt-4 text-center">
|
||||
|
||||
@@ -35,14 +35,14 @@ export default function Header() {
|
||||
return (
|
||||
<header>
|
||||
<nav>
|
||||
<div class="bg-[#313244] flex flex-wrap justify-center space-x-6 p-4">
|
||||
<div class="grid grid-cols-2 mt-4 place-items-center md:flex md:flex-row items-center justify-center md:gap-8">
|
||||
{headerLinks.map((l) => {
|
||||
const newTab = l.newTab ? "_blank" : "_self";
|
||||
return (
|
||||
<a
|
||||
href={l.linkTo}
|
||||
target={newTab}
|
||||
class="text-[#cdd6f4] text-lg font-medium transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer"
|
||||
class="text-[#cdd6f4] text-md sm:text-lg font-medium transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
{l.icon} {l.name}
|
||||
|
||||
89
frontend/components/PaginationControl.tsx
Normal file
89
frontend/components/PaginationControl.tsx
Normal file
@@ -0,0 +1,89 @@
|
||||
import * as hi from "jsr:@preact-icons/hi2";
|
||||
|
||||
export function PaginationControl({
|
||||
paginatedData,
|
||||
currentUrl,
|
||||
authorId,
|
||||
}: {
|
||||
paginatedData: PaginatedPosts;
|
||||
currentUrl: URL;
|
||||
authorId: number;
|
||||
}) {
|
||||
const buildUrl = (page: number, limit?: number) => {
|
||||
const params = new URLSearchParams(currentUrl.searchParams);
|
||||
params.set("page", page.toString());
|
||||
if (limit) params.set("limit", limit.toString());
|
||||
return `${currentUrl.pathname}?${params.toString()}`;
|
||||
};
|
||||
|
||||
if (paginatedData.totalPages <= 1) return null;
|
||||
|
||||
return (
|
||||
<div class="mt-8 space-y-4">
|
||||
{/* Pagination info and controls */}
|
||||
<div class="flex flex-col sm:flex-row justify-center items-center gap-4">
|
||||
<div class="flex items-center gap-2">
|
||||
{paginatedData.hasPrevPage && (
|
||||
<a
|
||||
href={buildUrl(paginatedData.currentPage - 1)}
|
||||
class="px-4 py-2 bg-[#45475a] text-[#cdd6f4] shadow-sm rounded hover:bg-[#6A6B7A] transition-colors"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiChevronDoubleLeft />
|
||||
Previous
|
||||
</div>
|
||||
</a>
|
||||
)}
|
||||
|
||||
{/* Page numbers */}
|
||||
<div class="flex gap-1">
|
||||
{Array.from(
|
||||
{ length: Math.min(paginatedData.totalPages, 7) },
|
||||
(_, i) => {
|
||||
let pageNum;
|
||||
if (paginatedData.totalPages <= 7) {
|
||||
pageNum = i + 1;
|
||||
} else {
|
||||
const start = Math.max(1, paginatedData.currentPage - 3);
|
||||
const end = Math.min(paginatedData.totalPages, start + 6);
|
||||
pageNum = start + i;
|
||||
if (pageNum > end) return null;
|
||||
}
|
||||
|
||||
const isCurrentPage = pageNum === paginatedData.currentPage;
|
||||
|
||||
return (
|
||||
<a
|
||||
key={pageNum}
|
||||
href={buildUrl(pageNum)}
|
||||
class={`px-3 py-1 rounded text-sm shadow-sm ${
|
||||
isCurrentPage
|
||||
? "bg-[#6A6B7A] text-[#cdd6f4]"
|
||||
: "bg-[#45475a] text-[#cdd6f4] hover:bg-[#6A6B7A]"
|
||||
}`}
|
||||
>
|
||||
{pageNum}
|
||||
</a>
|
||||
);
|
||||
},
|
||||
)}
|
||||
</div>
|
||||
|
||||
{paginatedData.hasNextPage && (
|
||||
<a
|
||||
href={buildUrl(paginatedData.currentPage + 1)}
|
||||
class="px-4 py-2 bg-[#45475a] text-[#cdd6f4] shadow-sm rounded hover:bg-[#6A6B7A] transition-colors"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
Next
|
||||
<hi.HiChevronDoubleRight />
|
||||
</div>
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Quick jump to page */}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -2,10 +2,13 @@ import { Post } from "../types/index.ts";
|
||||
|
||||
export const PostBody = function PostBody({ post }: PostBodyOpts) {
|
||||
return (
|
||||
<div class="mx-auto max-w-4xl p-4 bg-[#313244]">
|
||||
<div
|
||||
class="p-6 bg-[#313244] shadow-md text-[#f5e0dc] post-content"
|
||||
class="p-6 bg-[#484659] shadow-md rounded-lg text-[#f5e0dc] post-content overflow-hidden break-words hyphens-auto max-w-full
|
||||
[&>*]:max-w-5xl [&>*]:overflow-wrap-anywhere"
|
||||
dangerouslySetInnerHTML={{ __html: post.body }}
|
||||
></div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -2,22 +2,31 @@ import { convertUtc } from "../lib/convertUtc.ts";
|
||||
import { truncateString } from "../lib/truncate.ts";
|
||||
import { Post } from "../types/index.ts";
|
||||
|
||||
export const PostCard = function PostCard({ post }: { post: Post }) {
|
||||
export const PostCard = function PostCard({
|
||||
post,
|
||||
colorValue,
|
||||
}: {
|
||||
post: Post;
|
||||
colorValue: string;
|
||||
}) {
|
||||
return (
|
||||
<div class="p-6 bg-[#45475a] rounded-lg shadow-md transition-all duration-300 ease-in-out hover:shadow-xl hover:scale-105">
|
||||
<div
|
||||
class={`p-6 bg-[#484659] rounded-lg shadow-xl transition-all duration-300 ease-in-out border-b-4 hover:shadow-xl hover:scale-105`}
|
||||
style={{ borderBottomColor: colorValue }}
|
||||
>
|
||||
<a href={`${Deno.env.get("BASE_URI_WEB")}/posts/${post.post_id}`}>
|
||||
<h2 class="text-white text-lg font-bold mb-2">{post.title}</h2>
|
||||
<p class="text-white">
|
||||
Written by{" "}
|
||||
<a
|
||||
class="text-white transition-all duration-300 ease-in-out hover:text-[#74c7ec] hover:drop-shadow-[0_0_10px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer"
|
||||
class="text-white transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_10px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer"
|
||||
href={`${Deno.env.get("BASE_URI_WEB")}/authors/${post.author_id}`}
|
||||
>
|
||||
{post.first_name} {post.last_name}
|
||||
</a>{" "}
|
||||
at {convertUtc(post.created_at)}
|
||||
at {convertUtc(post.publish_date)}
|
||||
</p>
|
||||
<p class="text-gray-400">{truncateString(post.body, 15)}</p>
|
||||
<p class="text-gray-400">{truncateString(post.body, 45)}</p>
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -3,15 +3,19 @@ import { Post } from "../types/index.ts";
|
||||
|
||||
interface PostOpts {
|
||||
posts: Post[];
|
||||
colorValue: string;
|
||||
}
|
||||
|
||||
export const PostCarousel = function PostCarousel({ posts }: PostOpts) {
|
||||
export const PostCarousel = function PostCarousel({
|
||||
posts,
|
||||
colorValue,
|
||||
}: PostOpts) {
|
||||
return (
|
||||
<div className="flex w-full justify-start items-start bg-[#313244] p-8">
|
||||
<div className="max-w-7xl mx-auto">
|
||||
<div className="flex flex-wrap justify-center gap-3">
|
||||
{posts.map((post: Post) => (
|
||||
<PostCard key={post.post_id} post={post} />
|
||||
<PostCard key={post.post_id} post={post} colorValue={colorValue} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -8,8 +8,8 @@ export const PostHeader = function PostHeader({ post }: PostHeaderOpts) {
|
||||
<Head>
|
||||
<title>Wyatt J. Miller | {post.title}</title>
|
||||
</Head>
|
||||
<div class="p-6 bg-[#313244] shadow-md">
|
||||
<div class="min-w-screen flex flex-col items-center justify-between bg-[#45475a] rounded-lg shadow-md">
|
||||
<div class="p-4 bg-[#313244]">
|
||||
<div class="min-w-screen flex flex-col items-center justify-between bg-[#484659] rounded-lg shadow-md">
|
||||
<div class="sm:mt-14 sm:mb-14 mt-8 mb-8 flex flex-col items-center gap-y-5 gap-x-10 md:flex-row">
|
||||
<div class="space-y-2 text-center md:text-left">
|
||||
<p class="text-2xl text-[#f5e0dc] font-bold sm:text-4xl">
|
||||
@@ -17,7 +17,7 @@ export const PostHeader = function PostHeader({ post }: PostHeaderOpts) {
|
||||
</p>
|
||||
<p class="text-md font-medium text-[#E39A9C] sm:text-xl italic">
|
||||
by {post.first_name} {post.last_name} posted on{" "}
|
||||
{convertUtc(post.created_at)}
|
||||
{convertUtc(post.publish_date)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
export const ShareLinkButton = function ShareLinkButton({ props }) {
|
||||
const [text. setText] = useState("Share");
|
||||
const [text, setText] = useState("Share");
|
||||
|
||||
const onClickHandler = () => {
|
||||
navigator.clipboard.writeText(location.href);
|
||||
setText("Copied to clipboard!");
|
||||
setTimeout(() => {
|
||||
setText("Share");
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
return (
|
||||
<button onClick={onClickHandler}>
|
||||
{text}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
return <button onClick={onClickHandler}>{text}</button>;
|
||||
};
|
||||
|
||||
@@ -12,8 +12,12 @@ import * as $index from "./routes/index.tsx";
|
||||
import * as $posts_id_ from "./routes/posts/[id].tsx";
|
||||
import * as $posts_index from "./routes/posts/index.tsx";
|
||||
import * as $projects_index from "./routes/projects/index.tsx";
|
||||
import * as $rss_index from "./routes/rss/index.tsx";
|
||||
import * as $sitemap_index from "./routes/sitemap/index.tsx";
|
||||
import * as $Counter from "./islands/Counter.tsx";
|
||||
import * as $ProjectCard from "./islands/ProjectCard.tsx";
|
||||
import * as $modal from "./islands/modal.tsx";
|
||||
import * as $portal from "./islands/portal.tsx";
|
||||
import { type Manifest } from "$fresh/server.ts";
|
||||
|
||||
const manifest = {
|
||||
@@ -28,10 +32,14 @@ const manifest = {
|
||||
"./routes/posts/[id].tsx": $posts_id_,
|
||||
"./routes/posts/index.tsx": $posts_index,
|
||||
"./routes/projects/index.tsx": $projects_index,
|
||||
"./routes/rss/index.tsx": $rss_index,
|
||||
"./routes/sitemap/index.tsx": $sitemap_index,
|
||||
},
|
||||
islands: {
|
||||
"./islands/Counter.tsx": $Counter,
|
||||
"./islands/ProjectCard.tsx": $ProjectCard,
|
||||
"./islands/modal.tsx": $modal,
|
||||
"./islands/portal.tsx": $portal,
|
||||
},
|
||||
baseUrl: import.meta.url,
|
||||
} satisfies Manifest;
|
||||
|
||||
@@ -1,28 +1,45 @@
|
||||
import { useState } from "preact/hooks";
|
||||
import { Portal } from "./portal.tsx";
|
||||
import { Modal } from "./modal.tsx";
|
||||
|
||||
export const ProjectCard = function ProjectCard(props: ProjectProps) {
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
return (
|
||||
<div
|
||||
class={`group space-y-1 rounded-md ${
|
||||
props.wip ? "border-2 border-dashed" : "cursor-pointer"
|
||||
} bg-[#45475a] px-3 py-2 m-10 shadow-md transition-all duration-300 ease-in-out hover:shadow-xl hover:scale-105`}
|
||||
onClick={() => props.repo && open(props.repo, "_blank")}
|
||||
class={`md:m-8 group space-y-1 rounded-md ${
|
||||
props.wip ? "border-2" : "cursor-pointer"
|
||||
} bg-[#44485b] px-3 py-2 m-4 shadow-md transition-all duration-300 ease-in-out border-b-4 border-b-[#94e2d5] hover:shadow-xl hover:scale-105`}
|
||||
style={
|
||||
props.wip
|
||||
? {
|
||||
borderTopStyle: "dashed",
|
||||
borderRightStyle: "dashed",
|
||||
borderLeftStyle: "dashed",
|
||||
}
|
||||
: {}
|
||||
}
|
||||
onClick={() => {
|
||||
// clicking the card (not the link) opens the modal
|
||||
console.log("opened portal");
|
||||
setOpen(true);
|
||||
}}
|
||||
>
|
||||
<div class="flex items-center justify-between">
|
||||
<h2 class="text-lg text-white font-black uppercase">
|
||||
<a href={props.repo} target="_blank">
|
||||
<a
|
||||
href={props.repo}
|
||||
target="_blank"
|
||||
onClick={(e) => {
|
||||
// clicking the link should not open the modal
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
{props.title}
|
||||
</a>
|
||||
</h2>
|
||||
<div class="bg-[#585b70] text-[#a6adc8] text-xs font-bold uppercase px-2.5 py-0.5 rounded-full">
|
||||
{props.repo && (
|
||||
<a
|
||||
class="hover:underline"
|
||||
href={props.repo}
|
||||
target="_blank"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
Active
|
||||
</a>
|
||||
)}
|
||||
{props.repo && <span>Active</span>}
|
||||
{!props.repo && !props.wip && <span>Dead</span>}
|
||||
{props.wip && <span>WIP</span>}
|
||||
</div>
|
||||
@@ -33,6 +50,38 @@ export const ProjectCard = function ProjectCard(props: ProjectProps) {
|
||||
<p class="whitespace-pre-wrap text-sm font-semibold text-[#a6adc8]">
|
||||
{props.tech}
|
||||
</p>
|
||||
|
||||
{open && !props.wip ? (
|
||||
<Portal into="body">
|
||||
<Modal
|
||||
title={props.title}
|
||||
onClose={() => setOpen(false)}
|
||||
actions={[
|
||||
{
|
||||
label: "Open repository",
|
||||
onClick: () => {
|
||||
if (props.repo) window.open(props.repo, "_blank");
|
||||
},
|
||||
variant: "primary",
|
||||
},
|
||||
{
|
||||
label: "Close",
|
||||
onClick: () => setOpen(false),
|
||||
variant: "secondary",
|
||||
},
|
||||
]}
|
||||
>
|
||||
<div class="space-y-3">
|
||||
<p class="text-sm text-gray-800 dark:text-gray-200">
|
||||
{props.summary}
|
||||
</p>
|
||||
<p class="text-xs font-mono text-gray-600 dark:text-gray-300">
|
||||
Technologies used: {props.tech}
|
||||
</p>
|
||||
</div>
|
||||
</Modal>
|
||||
</Portal>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
158
frontend/islands/modal.tsx
Normal file
158
frontend/islands/modal.tsx
Normal file
@@ -0,0 +1,158 @@
|
||||
import { useEffect, useRef, useState } from "preact/hooks";
|
||||
import type { ComponentChildren } from "preact";
|
||||
|
||||
type ModalAction = {
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
variant?: "primary" | "secondary" | "link";
|
||||
};
|
||||
|
||||
type ModalProps = {
|
||||
title?: string;
|
||||
/**
|
||||
* Called after the modal has finished its exit animation.
|
||||
* The Modal will run the exit animation internally and then call onClose().
|
||||
*/
|
||||
onClose: () => void;
|
||||
children: ComponentChildren;
|
||||
ariaLabel?: string;
|
||||
actions?: ModalAction[]; // rendered in footer; each button will be given flex-1 so buttons fill the width together
|
||||
/**
|
||||
* Optional: duration (ms) of enter/exit animation. Defaults to 200.
|
||||
* Keep this in sync with the CSS transition-duration classes used below.
|
||||
*/
|
||||
animationDurationMs?: number;
|
||||
};
|
||||
|
||||
export function Modal({
|
||||
title,
|
||||
onClose,
|
||||
children,
|
||||
ariaLabel,
|
||||
actions,
|
||||
animationDurationMs = 200,
|
||||
}: ModalProps) {
|
||||
// Controls the entrance/exit animation state. true => visible (enter), false => hidden (exit)
|
||||
const [isVisible, setIsVisible] = useState(false);
|
||||
// Prevent double-triggering the close flow
|
||||
const closingRef = useRef(false);
|
||||
// Hold the timeout id for cleanup
|
||||
const timeoutRef = useRef<number | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Defer to next frame so initial "hidden" styles are applied before animating to visible.
|
||||
const raf = requestAnimationFrame(() => setIsVisible(true));
|
||||
|
||||
function onKey(e: KeyboardEvent) {
|
||||
if (e.key === "Escape") {
|
||||
startCloseFlow();
|
||||
}
|
||||
}
|
||||
document.addEventListener("keydown", onKey);
|
||||
|
||||
return () => {
|
||||
cancelAnimationFrame(raf);
|
||||
document.removeEventListener("keydown", onKey);
|
||||
if (timeoutRef.current !== null) {
|
||||
clearTimeout(timeoutRef.current);
|
||||
}
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
// If no actions provided, render a single Close button
|
||||
const footerActions: ModalAction[] =
|
||||
actions && actions.length > 0
|
||||
? actions
|
||||
: [{ label: "Close", onClick: startCloseFlow, variant: "primary" }];
|
||||
|
||||
// Start exit animation and call onClose after animationDurationMs
|
||||
function startCloseFlow() {
|
||||
if (closingRef.current) return;
|
||||
closingRef.current = true;
|
||||
setIsVisible(false);
|
||||
// Wait for the CSS transition to finish before signalling parent to actually unmount
|
||||
timeoutRef.current = window.setTimeout(() => {
|
||||
timeoutRef.current = null;
|
||||
onClose();
|
||||
}, animationDurationMs);
|
||||
}
|
||||
|
||||
// Animation classes (enter & exit):
|
||||
// - panel: transitions opacity + transform for a subtle fade + pop
|
||||
// - backdrop: transitions opacity for fade
|
||||
const panelBase =
|
||||
"relative z-10 max-w-lg w-full bg-white dark:bg-[#1f2937] rounded-lg shadow-xl p-6 mx-4 transform transition-all";
|
||||
// We explicitly set the CSS transition duration inline to keep class + timeout in sync.
|
||||
const panelVisible = "opacity-100 translate-y-0 scale-100";
|
||||
const panelHidden = "opacity-0 translate-y-2 scale-95";
|
||||
|
||||
const backdropBase =
|
||||
"absolute inset-0 bg-black/50 backdrop-blur-sm transition-opacity";
|
||||
const backdropVisible = "opacity-100";
|
||||
const backdropHidden = "opacity-0";
|
||||
|
||||
// Footer button class generator
|
||||
const renderActionButton = (act: ModalAction) => {
|
||||
const base =
|
||||
"flex-1 w-full px-4 py-2 rounded-md font-semibold focus:outline-none";
|
||||
const styles =
|
||||
act.variant === "primary"
|
||||
? "bg-[#94e2d5] text-black hover:brightness-95"
|
||||
: act.variant === "link"
|
||||
? "bg-transparent text-[#075985] underline"
|
||||
: "bg-gray-200 dark:bg-gray-700 text-gray-800 dark:text-gray-200 hover:brightness-95";
|
||||
|
||||
return (
|
||||
<button key={act.label} onClick={act.onClick} class={`${base} ${styles}`}>
|
||||
{act.label}
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
class="fixed inset-0 z-50 flex items-center justify-center"
|
||||
aria-modal="true"
|
||||
role="dialog"
|
||||
aria-label={ariaLabel ?? title ?? "Modal dialog"}
|
||||
>
|
||||
{/* Backdrop */}
|
||||
<div
|
||||
// inline style for transitionDuration to keep JS timeout and CSS synced
|
||||
style={{ transitionDuration: `${animationDurationMs}ms` }}
|
||||
class={`${backdropBase} ${isVisible ? backdropVisible : backdropHidden}`}
|
||||
onClick={startCloseFlow}
|
||||
/>
|
||||
{/* Modal panel */}
|
||||
<div
|
||||
style={{ transitionDuration: `${animationDurationMs}ms` }}
|
||||
class={`${panelBase} ${isVisible ? panelVisible : panelHidden}`}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<div class="flex items-start justify-between">
|
||||
<h3 class="text-lg font-semibold text-gray-900 dark:text-white">
|
||||
{title}
|
||||
</h3>
|
||||
<button
|
||||
onClick={startCloseFlow}
|
||||
aria-label="Close modal"
|
||||
class="ml-4 rounded-md text-gray-700 dark:text-gray-300 hover:bg-gray-100 dark:hover:bg-gray-700 p-1"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class="mt-4 text-sm text-gray-700 dark:text-gray-300">
|
||||
{children}
|
||||
</div>
|
||||
|
||||
<div class="mt-6">
|
||||
<div class="flex gap-3 w-full">
|
||||
{footerActions.map(renderActionButton)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
38
frontend/islands/portal.tsx
Normal file
38
frontend/islands/portal.tsx
Normal file
@@ -0,0 +1,38 @@
|
||||
import { useEffect, useState } from "preact/hooks";
|
||||
import { createPortal } from "preact/compat";
|
||||
import type { ComponentChildren } from "preact";
|
||||
|
||||
type PortalProps = {
|
||||
into?: string | HTMLElement;
|
||||
children: ComponentChildren;
|
||||
};
|
||||
|
||||
export function Portal({ into = "body", children }: PortalProps) {
|
||||
const [host, setHost] = useState<HTMLElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof document === "undefined") return;
|
||||
|
||||
let target: HTMLElement | null = null;
|
||||
if (typeof into === "string") {
|
||||
target = into === "body" ? document.body : document.querySelector(into);
|
||||
} else {
|
||||
target = into;
|
||||
}
|
||||
|
||||
if (!target) target = document.body;
|
||||
|
||||
const wrapper = document.createElement("div");
|
||||
wrapper.className = "preact-portal-root";
|
||||
target.appendChild(wrapper);
|
||||
setHost(wrapper);
|
||||
|
||||
return () => {
|
||||
if (wrapper.parentNode) wrapper.parentNode.removeChild(wrapper);
|
||||
setHost(null);
|
||||
};
|
||||
}, [into]);
|
||||
|
||||
if (!host) return null;
|
||||
return createPortal(children, host);
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
export const truncateString = (str: string, maxLength: number) => {
|
||||
str = str.replace(/<[^>]*>/g, "");
|
||||
return str.length > maxLength ? `${str.slice(0, maxLength)}...` : str;
|
||||
};
|
||||
|
||||
@@ -8,7 +8,7 @@ export default function App({ Component }: PageProps) {
|
||||
<title>frontend</title>
|
||||
<link rel="stylesheet" href="/styles.css" />
|
||||
</head>
|
||||
<body>
|
||||
<body class="bg-[#313244]">
|
||||
<Component />
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -2,13 +2,20 @@ import { FreshContext, Handlers, PageProps } from "$fresh/server.ts";
|
||||
import AuthorCard from "../../components/AuthorCard.tsx";
|
||||
import { Post } from "../../types/index.ts";
|
||||
import { PostCarousel } from "../../components/PostCarousel.tsx";
|
||||
import { PaginationControl } from "../../components/PaginationControl.tsx";
|
||||
|
||||
export const handler: Handlers<PageData> = {
|
||||
async GET(_req: Request, ctx: FreshContext) {
|
||||
async GET(req: Request, ctx: FreshContext) {
|
||||
try {
|
||||
const url = new URL(req.url);
|
||||
const page = parseInt(url.searchParams.get("page") || "1");
|
||||
const limit = parseInt(url.searchParams.get("limit") || "12");
|
||||
|
||||
const [authorResponse, authorPostResponse] = await Promise.all([
|
||||
fetch(`${Deno.env.get("BASE_URI_API")}/authors/${ctx.params.id}`),
|
||||
fetch(`${Deno.env.get("BASE_URI_API")}/authors/${ctx.params.id}/posts`),
|
||||
fetch(
|
||||
`${Deno.env.get("BASE_URI_API")}/authors/${ctx.params.id}/posts?page=${page}&limit=${limit}`,
|
||||
),
|
||||
]);
|
||||
|
||||
const [authorData, authorPostData] = await Promise.all([
|
||||
@@ -16,9 +23,37 @@ export const handler: Handlers<PageData> = {
|
||||
authorPostResponse.json(),
|
||||
]);
|
||||
|
||||
let paginatedData: PaginatedPosts;
|
||||
|
||||
if (authorPostData.posts && authorPostData.total_posts !== undefined) {
|
||||
const totalPages = Math.ceil(authorPostData.total_posts / limit);
|
||||
paginatedData = {
|
||||
posts: authorPostData.posts,
|
||||
currentPage: page,
|
||||
totalPages,
|
||||
hasNextPage: page < totalPages,
|
||||
hasPrevPage: page > 1,
|
||||
totalPosts: authorPostData.total_posts,
|
||||
};
|
||||
} else {
|
||||
const allPosts = Array.isArray(authorPostData) ? authorPostData : [];
|
||||
const totalPages = Math.ceil(allPosts.length / limit);
|
||||
const startIndex = (page - 1) * limit;
|
||||
const endIndex = startIndex + limit;
|
||||
|
||||
paginatedData = {
|
||||
posts: allPosts.slice(startIndex, endIndex),
|
||||
currentPage: page,
|
||||
totalPages,
|
||||
hasNextPage: page < totalPages,
|
||||
hasPrevPage: page > 1,
|
||||
totalPosts: allPosts.length,
|
||||
};
|
||||
}
|
||||
|
||||
return ctx.render({
|
||||
authorData,
|
||||
authorPostData,
|
||||
authorPostData: paginatedData,
|
||||
});
|
||||
} catch (error) {
|
||||
return ctx.render({
|
||||
@@ -30,7 +65,7 @@ export const handler: Handlers<PageData> = {
|
||||
},
|
||||
};
|
||||
|
||||
export default function AuthorIdentifier({ data }: PageProps<PageData>) {
|
||||
export default function AuthorIdentifier({ data, url }: PageProps<PageData>) {
|
||||
const { authorData, authorPostData, error } = data;
|
||||
|
||||
if (error) {
|
||||
@@ -52,7 +87,12 @@ export default function AuthorIdentifier({ data }: PageProps<PageData>) {
|
||||
<AuthorCard author={authorData} isIdentified={true} />
|
||||
</div>
|
||||
<div>
|
||||
<PostCarousel posts={authorPostData} />
|
||||
<PostCarousel posts={authorPostData.posts} />
|
||||
<PaginationControl
|
||||
paginatedData={authorPostData}
|
||||
currentUrl={url}
|
||||
authorId={authorData.author_id}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -26,7 +26,6 @@ export const handler: Handlers = {
|
||||
message: formData.get("message")?.toString(),
|
||||
};
|
||||
|
||||
// Validation logic
|
||||
const errors: FormState["errors"] = {};
|
||||
|
||||
if (!state.name || state.name.trim() === "") {
|
||||
@@ -44,7 +43,6 @@ export const handler: Handlers = {
|
||||
errors.message = "Message is required";
|
||||
}
|
||||
|
||||
// If there are errors, return the form with error messages
|
||||
if (Object.keys(errors).length > 0) {
|
||||
return ctx.render({
|
||||
...state,
|
||||
@@ -56,7 +54,6 @@ export const handler: Handlers = {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
console.log(res);
|
||||
|
||||
if (!res.ok || res.status !== 200) {
|
||||
return ctx.render({
|
||||
@@ -77,11 +74,15 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
<div class="bg-[#313244] min-h-screen">
|
||||
<div class="px-4 py-8 mx-auto p-6 flex flex-col bg-[#313244] min-h-screen w-full md:max-w-md">
|
||||
<Head>
|
||||
<title>Contact</title>
|
||||
<title>Wyatt J. Miller | Contact</title>
|
||||
</Head>
|
||||
<h1 class="text-3xl text-white font-bold uppercase text-center">
|
||||
Contact
|
||||
</h1>
|
||||
<p class="md:text-lg sm:text-md text-white mt-5 mb-5">
|
||||
Got a question? Here to yell at me? Send me something!
|
||||
</p>
|
||||
<br />
|
||||
{data?.submitted && (
|
||||
<div
|
||||
class="bg-[#a6e3a1] text-[#313244] px-4 py-3 rounded relative"
|
||||
@@ -114,7 +115,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
required
|
||||
placeholder="Your Name"
|
||||
value={data?.name || ""}
|
||||
class={`w-full px-3 py-2 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500
|
||||
class={`w-full px-3 py-2 bg-[#ECECEE] border rounded-md focus:outline-transparent
|
||||
${data?.errors?.name ? "border-[#f38ba8]" : "border-[#313244]"}`}
|
||||
/>
|
||||
{data?.errors?.name && (
|
||||
@@ -137,7 +138,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
required
|
||||
placeholder="your@email.com"
|
||||
value={data?.email || ""}
|
||||
class={`w-full px-3 py-2 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500
|
||||
class={`w-full px-3 py-2 bg-[#ECECEE] border rounded-md focus:outline-transparent
|
||||
${data?.errors?.email ? "border-[#f38ba8]" : "border-[#313244]"}`}
|
||||
/>
|
||||
{data?.errors?.email && (
|
||||
@@ -159,7 +160,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
required
|
||||
placeholder="Write your message here..."
|
||||
rows={4}
|
||||
class={`w-full px-3 py-2 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500
|
||||
class={`w-full px-3 py-2 bg-[#ECECEE] border rounded-md focus:outline-transparent
|
||||
${data?.errors?.message ? "border-red-500" : "border-gray-300"}`}
|
||||
>
|
||||
{data?.message || ""}
|
||||
@@ -174,7 +175,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
<div>
|
||||
<button
|
||||
type="submit"
|
||||
class="w-full bg-[#89b4fa] text-[#313244] py-2 px-4 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||
class="w-full bg-[#44475b] text-[#cdd6f4] py-2 px-4 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 shadow-md"
|
||||
>
|
||||
Send Message
|
||||
</button>
|
||||
|
||||
@@ -2,8 +2,8 @@ import { PhotoCircle } from "../components/PhotoCircle.tsx";
|
||||
|
||||
export default function Home() {
|
||||
return (
|
||||
<body>
|
||||
<div class="min-w-screen flex flex-col items-center justify-between bg-[#313244] sm:min-h-screen">
|
||||
<body class="bg-[#313244]">
|
||||
<div class="flex flex-col items-center justify-between min-h-screen">
|
||||
<div class="sm:mt-14 sm:mb-14 mt-12 mb-4 flex flex-col items-center gap-y-5 gap-x-10 md:flex-row">
|
||||
<PhotoCircle
|
||||
src="https://wyattjmiller.us-ord-1.linodeobjects.com/IMG_1480-min.png"
|
||||
|
||||
@@ -52,10 +52,11 @@ export default function PostPage({ data }: PageProps<PageData>) {
|
||||
Featured Posts
|
||||
</h2>
|
||||
</div>
|
||||
<div className="text-lg font-thin italic text-white mb-4 text-center flex">
|
||||
|
||||
<div className="text-lg font-thin italic text-white mb-4 text-center flex underline decoration-[#89b4fa] decoration-2">
|
||||
Ignite the impossible
|
||||
</div>
|
||||
<PostCarousel posts={featuredPosts} />
|
||||
<PostCarousel posts={featuredPosts} colorValue="#89b4fa" />
|
||||
</section>
|
||||
<section>
|
||||
<div class="flex items-center gap-2 text-2xl text-white md:justify-start">
|
||||
@@ -64,22 +65,10 @@ export default function PostPage({ data }: PageProps<PageData>) {
|
||||
Recent Posts
|
||||
</h2>
|
||||
</div>
|
||||
<div className="text-lg font-thin italic mb-4 text-white text-center flex">
|
||||
<div className="text-lg font-thin italic mb-4 text-white text-center flex underline decoration-[#89dceb] decoration-2">
|
||||
Now with 100% fresh perspective
|
||||
</div>
|
||||
<PostCarousel posts={recentPosts} />
|
||||
</section>
|
||||
<section>
|
||||
<div class="flex items-center gap-2 text-2xl text-white md:justify-start">
|
||||
<hi.HiOutlineFire />
|
||||
<h2 class="text-2xl font-bold text-white text-center lg:text-left">
|
||||
Hot Posts
|
||||
</h2>
|
||||
</div>
|
||||
<div className="text-lg font-thin italic mb-4 text-white text-center flex">
|
||||
Making chaos look cool since forever
|
||||
</div>
|
||||
<PostCarousel posts={hotPosts} />
|
||||
<PostCarousel posts={recentPosts} colorValue="#89dceb" />
|
||||
</section>
|
||||
<section>
|
||||
<div class="flex items-center gap-2 text-2xl text-white md:justify-start">
|
||||
@@ -88,10 +77,10 @@ export default function PostPage({ data }: PageProps<PageData>) {
|
||||
Popular Posts
|
||||
</h2>
|
||||
</div>
|
||||
<div className="text-lg font-thin italic mb-4 text-white text-center flex">
|
||||
<div className="text-lg font-thin italic mb-4 text-white text-center flex underline decoration-[#b4befe] decoration-2">
|
||||
Content may cause uncontrollable reading
|
||||
</div>
|
||||
<PostCarousel posts={popularPosts} />
|
||||
<PostCarousel posts={popularPosts} colorValue="#b4befe" />
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -31,11 +31,17 @@ export default function Projects({ data }: PageProps<ProjectData>) {
|
||||
<div class="space-y-12 px-10 py-8 sm:min-h-screen bg-[#313244]">
|
||||
<section
|
||||
id="projects"
|
||||
class="lg:grid-cols-desktop grid scroll-mt-16 grid-cols-1 gap-x-10 gap-y-4 bg-[#313244] "
|
||||
class="lg:grid-cols-desktop grid scroll-mt-8 grid-cols-1 gap-x-4 gap-y-2 bg-[#313244] "
|
||||
>
|
||||
<h1 class="text-3xl text-white font-bold uppercase text-center">
|
||||
Projects
|
||||
</h1>
|
||||
<p class="md:text-lg sm:text-md text-white">
|
||||
Here's a collection of software and electronics projects I've been
|
||||
tinkering with during my free time - some are ongoing adventures,
|
||||
others are finished experiments, but they've all been exciting
|
||||
challenges that keep me busy when I'm not doing "real work" stuff!
|
||||
</p>
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2">
|
||||
{projects.map((project: any) => {
|
||||
return (
|
||||
|
||||
3
frontend/routes/rss/index.tsx
Normal file
3
frontend/routes/rss/index.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
export function handler(req: Request): Response {
|
||||
return Response.redirect(`${Deno.env.get("RSS_URI")}`, 307);
|
||||
}
|
||||
3
frontend/routes/sitemap/index.tsx
Normal file
3
frontend/routes/sitemap/index.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
export function handler(req: Request): Response {
|
||||
return Response.redirect(`${Deno.env.get("SITEMAP_URI")}`, 307);
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 4.2 KiB |
@@ -6,6 +6,7 @@ export type Post = {
|
||||
title: string;
|
||||
body: string;
|
||||
created_at: string;
|
||||
publish_date: string;
|
||||
};
|
||||
|
||||
export type Author = {
|
||||
|
||||
Reference in New Issue
Block a user