Compare commits
2 Commits
master
...
message-qu
Author | SHA1 | Date | |
---|---|---|---|
7ada37f005 | |||
0a12cfcd57 |
104
.github/workflows/build.yaml
vendored
104
.github/workflows/build.yaml
vendored
@@ -1,104 +0,0 @@
|
||||
name: Build and Release Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: scm.wyattjmiller.com
|
||||
USERNAME: wymiller # Define username here to use consistently
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- dockerfile: public/Dockerfile
|
||||
image: my-website-v2_public
|
||||
context: ./backend
|
||||
- dockerfile: task/Dockerfile
|
||||
image: my-website-v2_task
|
||||
context: ./backend
|
||||
- dockerfile: Dockerfile
|
||||
image: my-website-v2_frontend
|
||||
context: ./frontend
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.GH_ACTION_USERNAME }}
|
||||
password: ${{ secrets.GH_ACTION_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: scm.wyattjmiller.com/wymiller/${{ matrix.image }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.context }}/${{ matrix.dockerfile }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
|
||||
create-release:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
needs: build-and-push
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create Release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_ACTION_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref_name }}
|
||||
release_name: Release ${{ github.ref_name }}
|
||||
body: |
|
||||
## Docker Images Released
|
||||
|
||||
The following Docker images have been built and pushed to the container registry:
|
||||
|
||||
- `${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_public:${{ github.ref_name }}`
|
||||
- `${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_task:${{ github.ref_name }}`
|
||||
- `${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_frontend:${{ github.ref_name }}`
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_public:${{ github.ref_name }}
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_task:${{ github.ref_name }}
|
||||
docker pull ${{ env.REGISTRY }}/${{ env.USERNAME }}/my-website-v2_frontend:${{ github.ref_name }}
|
||||
```
|
||||
draft: false
|
||||
prerelease: false
|
@@ -8,5 +8,3 @@ This is just an orginizational way of keeping the backend services together (so
|
||||
|
||||
- [`public`](./public/README.md) - a RESTful API service
|
||||
- [`task`](./task/README.md) - a task scheduler service
|
||||
- [`storage`](./storage/README.md) - an internal storage library
|
||||
- [`cache`](./cache/README.md) - an internal caching library
|
||||
|
1014
backend/cache/Cargo.lock
generated
vendored
1014
backend/cache/Cargo.lock
generated
vendored
File diff suppressed because it is too large
Load Diff
9
backend/cache/Cargo.toml
vendored
9
backend/cache/Cargo.toml
vendored
@@ -1,9 +0,0 @@
|
||||
[package]
|
||||
name = "cache"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
fred = "10.1.0"
|
||||
serde = "1.0.219"
|
||||
serde_json = "1.0.140"
|
7
backend/cache/README.md
vendored
7
backend/cache/README.md
vendored
@@ -1,7 +0,0 @@
|
||||
# Caching library
|
||||
|
||||
also known as `cache`
|
||||
|
||||
## What is this?
|
||||
|
||||
An internal caching library that houses functionality needed for a key-value database like Redis or Valkey. This was turned into a library because both `public` and `task` needed functionality within.
|
70
backend/cache/src/lib.rs
vendored
70
backend/cache/src/lib.rs
vendored
@@ -1,70 +0,0 @@
|
||||
pub use fred::{
|
||||
clients::Pool,
|
||||
interfaces::{ClientLike, KeysInterface},
|
||||
prelude::*,
|
||||
types::{Expiration, SetOptions},
|
||||
};
|
||||
|
||||
pub struct Cache {
|
||||
pub inmem: Pool,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub async fn get<T>(&mut self, key: String) -> Result<Option<T>, Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let value: Option<String> = self.inmem.get(&key).await?;
|
||||
|
||||
match value {
|
||||
Some(json_str) => match serde_json::from_str::<T>(&json_str) {
|
||||
Ok(deserialized) => Ok(Some(deserialized)),
|
||||
Err(_) => Ok(None),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set<T>(
|
||||
&mut self,
|
||||
key: String,
|
||||
contents: &T,
|
||||
expiration: Option<Expiration>,
|
||||
set_opts: Option<SetOptions>,
|
||||
get: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let json_string = match serde_json::to_string::<T>(contents) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unable to deserialize contents passed to cache".to_string(),
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(self
|
||||
.inmem
|
||||
.set(key, json_string, expiration, set_opts, get)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(self.inmem.del(key).await?)
|
||||
}
|
||||
|
||||
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
match self.inmem.is_connected() {
|
||||
true => Ok(()),
|
||||
false => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,62 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.created_at DESC LIMIT 10",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "053f5b53a743065aa0105903cdd0ec803861a2477c38a02754d2d350a34aaa68"
|
||||
}
|
@@ -1,70 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, p.is_featured FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.post_id = $1 ORDER BY p.created_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "is_featured",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0891ec97ff1d5d5ab7fbc848ceb4e7ea4f46e2f6282170dfdf90ebc6ab6d5fd9"
|
||||
}
|
@@ -1,62 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "0ec6c9d94fceba56112e78c82acc56ae01bc3c641e28ee21e331c06e2fd9e551"
|
||||
}
|
@@ -1,42 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO comments (post_id, name, body) VALUES ($1, $2, $3) RETURNING comment_id, name, body, created_at",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "comment_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"Varchar",
|
||||
"Varchar"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "1f5f18ecc0f1fe0ea93ca61e3f167640a56fee610379de45017f2608094867f0"
|
||||
}
|
@@ -1,66 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, a.author_id FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.author_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "3831b52c2db3d1114c4b01a761c74168b66904bacff847844d463454b7fcde43"
|
||||
}
|
@@ -1,62 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id LEFT JOIN comments c ON p.post_id = c.post_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY COUNT(c.comment_id) DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "49768c8b986078bdfaad191b3ea1f07ca033b2a734162a3f8fcf0ef0a44c1e7f"
|
||||
}
|
@@ -1,41 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT comment_id, name, body, created_at FROM comments ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "comment_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "4e39696c45b7533e519452425b5a69d607fd8b99a526002ece8978ccb41f2c69"
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT COUNT(*) FROM posts p WHERE p.deleted_at IS NULL AND p.author_id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "51fff32b503c65e62320071ff3ec44060b5fb45049b4f489c9a9d92e592ab5a7"
|
||||
}
|
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT author_id, first_name, last_name, bio, image FROM authors WHERE author_id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "bio",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "image",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "9c0f74750e0f90916b3d2f85d0264e27523c14dff7b7adccd5b4cfbb36918901"
|
||||
}
|
@@ -1,62 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.is_featured IS true GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "9d93a8a7c0a2442a511108af36d4adfb1ef8a2fac82448205654742f43dc4e75"
|
||||
}
|
@@ -1,62 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.view_count DESC LIMIT 3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "post_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "body",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "publish_date",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ad39df8c37105f13b620f8898e570cdbc54d4bd4e402aac65a28c9aa81803831"
|
||||
}
|
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT c.comment_id, c.name, c.body, c.created_at FROM comments c LEFT JOIN posts p ON p.post_id = c.post_id WHERE p.post_id = $1 AND c.deleted_at IS NULL ORDER BY created_at DESC LIMIT 20",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "comment_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ae5c1527389fd823f46d3b23e5ab3b8211a6323ceff845487abae26096b3fa01"
|
||||
}
|
@@ -1,47 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT author_id, first_name, last_name, bio, image FROM authors ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "author_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "first_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "last_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "bio",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "image",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "e6764f22ac7966bdb64386aedffb9edb89aefb248a1f980d2d4e2e20b1c3ca50"
|
||||
}
|
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT project_id, title, repo, summary, tech, wip, created_at FROM projects p WHERE deleted_at IS NULL ORDER BY p.created_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "project_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "repo",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "summary",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "tech",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "wip",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ed764b77d39df0583dc05c3ca721176b8c38e5df5fb078a53b808080c865e64d"
|
||||
}
|
1061
backend/public/Cargo.lock
generated
1061
backend/public/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -25,4 +25,3 @@ serde_json = "1.0.128"
|
||||
chrono = "0.4.38"
|
||||
xml = "0.8.20"
|
||||
fred = "10.1.0"
|
||||
cache = { version = "*", path = "../cache" }
|
||||
|
@@ -1,12 +0,0 @@
|
||||
FROM rust:1.88.0
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./public ./public
|
||||
COPY ./cache ./cache
|
||||
|
||||
RUN cargo build --release --manifest-path ./public/Cargo.toml
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["/app/public/target/release/public"]
|
@@ -1,9 +1,6 @@
|
||||
use sqlx::{Pool, Postgres};
|
||||
|
||||
use crate::{
|
||||
routes::{authors::Author, posts::Post},
|
||||
utils::pagination::Pagination,
|
||||
};
|
||||
use crate::routes::{authors::Author, comments::Pagination, posts::Post};
|
||||
|
||||
pub struct AuthorsDatasource;
|
||||
impl AuthorsDatasource {
|
||||
@@ -11,11 +8,11 @@ impl AuthorsDatasource {
|
||||
pool: &Pool<Postgres>,
|
||||
pagination: Pagination,
|
||||
) -> Result<Vec<Author>, sqlx::Error> {
|
||||
let offset: i64 = (pagination.page - 1) * pagination.limit;
|
||||
let offset: i64 = (pagination.page_number - 1) * pagination.page_size;
|
||||
sqlx::query_as!(
|
||||
Author,
|
||||
"SELECT author_id, first_name, last_name, bio, image FROM authors ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
pagination.page,
|
||||
pagination.page_size,
|
||||
offset,
|
||||
)
|
||||
.fetch_all(pool)
|
||||
@@ -35,32 +32,13 @@ impl AuthorsDatasource {
|
||||
pub async fn get_authors_posts(
|
||||
pool: &Pool<Postgres>,
|
||||
author_id: i32,
|
||||
pagination: Pagination,
|
||||
) -> Result<(Vec<Post>, i64), sqlx::Error> {
|
||||
let offset: i64 = (pagination.page - 1) * pagination.limit;
|
||||
println!(
|
||||
"Author ID: {}, Page: {}, Size: {}, Offset: {}",
|
||||
author_id, pagination.page, pagination.limit, offset
|
||||
);
|
||||
|
||||
let total_count = sqlx::query_scalar!(
|
||||
"SELECT COUNT(*) FROM posts p WHERE p.deleted_at IS NULL AND p.author_id = $1",
|
||||
) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(
|
||||
Post,
|
||||
"SELECT p.post_id, a.first_name, a.last_name, p.title, p.body, p.created_at, a.author_id FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.author_id = $1 ORDER BY created_at DESC",
|
||||
author_id
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?
|
||||
.unwrap_or(0);
|
||||
|
||||
let posts_query = sqlx::query_as!(
|
||||
Post,
|
||||
"SELECT p.post_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, a.author_id FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.author_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
author_id,
|
||||
pagination.limit,
|
||||
offset,
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
Ok((posts_query, total_count))
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
@@ -1,7 +1,4 @@
|
||||
use crate::{
|
||||
routes::comments::{Comment, CommentInputPayload},
|
||||
utils::pagination::Pagination,
|
||||
};
|
||||
use crate::routes::comments::{Comment, CommentInputPayload, Pagination};
|
||||
use sqlx::{Pool, Postgres};
|
||||
|
||||
pub struct CommentsDatasource;
|
||||
@@ -28,8 +25,8 @@ impl CommentsDatasource {
|
||||
pool: &Pool<Postgres>,
|
||||
pagination: Pagination,
|
||||
) -> Result<Vec<Comment>, sqlx::Error> {
|
||||
let offset: i64 = (pagination.page - 1) * pagination.limit;
|
||||
sqlx::query_as!(Comment, "SELECT comment_id, name, body, created_at FROM comments ORDER BY created_at DESC LIMIT $1 OFFSET $2", pagination.page, offset)
|
||||
let offset: i64 = (pagination.page_number - 1) * pagination.page_size;
|
||||
sqlx::query_as!(Comment, "SELECT comment_id, name, body, created_at FROM comments ORDER BY created_at DESC LIMIT $1 OFFSET $2", pagination.page_size, offset)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ use crate::routes::posts::{Post, PostFeaturedVariant};
|
||||
pub struct PostsDatasource;
|
||||
impl PostsDatasource {
|
||||
pub async fn get_all(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.created_at DESC LIMIT 10")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.created_at DESC LIMIT 10")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
@@ -19,31 +19,31 @@ impl PostsDatasource {
|
||||
.execute(pool)
|
||||
.await;
|
||||
|
||||
sqlx::query_as!(PostFeaturedVariant, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date, p.is_featured FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.post_id = $1 ORDER BY p.created_at DESC", post_id)
|
||||
sqlx::query_as!(PostFeaturedVariant, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.is_featured FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.post_id = $1 ORDER BY p.created_at DESC", post_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_recent(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_popular(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id LEFT JOIN comments c ON p.post_id = c.post_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY COUNT(c.comment_id) DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id LEFT JOIN comments c ON p.post_id = c.post_id WHERE p.deleted_at IS NULL GROUP BY p.post_id, a.first_name, a.last_name ORDER BY COUNT(c.comment_id) DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_hot(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.view_count DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL ORDER BY p.view_count DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_featured(pool: &Pool<Postgres>) -> Result<Vec<Post>, sqlx::Error> {
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at, p.publish_date FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.is_featured IS true GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
sqlx::query_as!(Post, "SELECT p.post_id, p.author_id, a.first_name, a.last_name, p.title, p.body, p.created_at FROM posts p LEFT JOIN authors a ON a.author_id = p.author_id WHERE p.deleted_at IS NULL AND p.is_featured IS true GROUP BY p.post_id, a.first_name, a.last_name ORDER BY p.created_at DESC LIMIT 3")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
use axum::Router;
|
||||
use cache::ClientLike;
|
||||
use config::config;
|
||||
use fred::prelude::*;
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
use std::fs::File;
|
||||
use std::sync::Arc;
|
||||
@@ -8,9 +8,9 @@ use std::time::Duration;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::signal;
|
||||
use tokio::sync::Mutex;
|
||||
// use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
|
||||
use tower_governor::{governor::GovernorConfigBuilder, GovernorLayer};
|
||||
use tower_http::{
|
||||
cors::CorsLayer,
|
||||
cors::{Any, CorsLayer},
|
||||
trace::{self, TraceLayer},
|
||||
};
|
||||
use tracing_subscriber::{filter, layer::SubscriberExt, prelude::*, util::SubscriberInitExt};
|
||||
@@ -58,6 +58,11 @@ async fn main() {
|
||||
)
|
||||
.init();
|
||||
|
||||
let cors = CorsLayer::new()
|
||||
.allow_methods(Any)
|
||||
.allow_headers(Any)
|
||||
.allow_origin(Any);
|
||||
|
||||
// if std::env::var("RUST_ENV").unwrap_or_else(|_| "development".to_string()) != "development" {
|
||||
//println!("we're not in development, starting up the rate limiter");
|
||||
//let governor_conf = Arc::new(
|
||||
@@ -96,13 +101,13 @@ async fn main() {
|
||||
.expect("Failed to connect to database");
|
||||
|
||||
let pool_size = 8;
|
||||
let config = cache::Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
|
||||
let config = Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
|
||||
|
||||
let redis_pool = cache::Builder::from_config(config)
|
||||
let redis_pool = Builder::from_config(config)
|
||||
.with_performance_config(|config| {
|
||||
config.default_command_timeout = Duration::from_secs(60);
|
||||
})
|
||||
.set_policy(cache::ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
|
||||
.set_policy(ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
|
||||
.build_pool(pool_size)
|
||||
.expect("Failed to create cache pool");
|
||||
|
||||
@@ -128,6 +133,7 @@ async fn main() {
|
||||
.on_response(trace::DefaultOnResponse::new().level(tracing::Level::INFO)),
|
||||
)
|
||||
.fallback(routes::root::RootRoute::not_found);
|
||||
// .layer(cors);
|
||||
//.layer(GovernorLayer {
|
||||
// config: governor_conf,
|
||||
//});
|
||||
|
@@ -1,19 +1,16 @@
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Json,
|
||||
};
|
||||
use cache::Expiration;
|
||||
use fred::types::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
datasources::authors::AuthorsDatasource,
|
||||
routes::posts::Post,
|
||||
state::AppState,
|
||||
utils::pagination::{Pagination, PaginationQuery},
|
||||
};
|
||||
use crate::{datasources::authors::AuthorsDatasource, state::AppState};
|
||||
|
||||
use super::comments::Pagination;
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
pub struct Author {
|
||||
@@ -29,12 +26,6 @@ pub struct AuthorGetOneParams {
|
||||
pub id: i32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct AuthorPostsResponse {
|
||||
posts: Vec<Post>,
|
||||
total_posts: i64,
|
||||
}
|
||||
|
||||
pub struct AuthorsRoute;
|
||||
impl AuthorsRoute {
|
||||
pub fn routes(app_state: &AppState) -> axum::Router {
|
||||
@@ -47,13 +38,8 @@ impl AuthorsRoute {
|
||||
|
||||
async fn get_all(
|
||||
State(app_state): State<AppState>,
|
||||
Query(query): Query<PaginationQuery>,
|
||||
Json(pagination): Json<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let pagination = Pagination {
|
||||
page: query.page.unwrap_or(1),
|
||||
limit: query.limit.unwrap_or(12),
|
||||
};
|
||||
|
||||
let mut state = app_state.lock().await;
|
||||
let cached: Option<Vec<Author>> = state
|
||||
.cache
|
||||
@@ -118,7 +104,6 @@ impl AuthorsRoute {
|
||||
let state = app_state.clone();
|
||||
|
||||
tracing::info!("storing database data in cache");
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut s = state.lock().await;
|
||||
let _ = s
|
||||
@@ -142,20 +127,12 @@ impl AuthorsRoute {
|
||||
async fn get_authors_posts(
|
||||
State(app_state): State<AppState>,
|
||||
Path(params): Path<AuthorGetOneParams>,
|
||||
Query(pagination): Query<PaginationQuery>,
|
||||
) -> impl IntoResponse {
|
||||
let pagination = Pagination {
|
||||
page: pagination.page.unwrap_or(1),
|
||||
limit: pagination.limit.unwrap_or(12),
|
||||
};
|
||||
|
||||
let state = app_state.lock().await;
|
||||
|
||||
match AuthorsDatasource::get_authors_posts(&state.database, params.id, pagination).await {
|
||||
Ok((posts, total_posts)) => Ok(Json(AuthorPostsResponse { posts, total_posts })),
|
||||
match AuthorsDatasource::get_authors_posts(&state.database, params.id).await {
|
||||
Ok(p) => Ok(Json(p)),
|
||||
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,20 +1,13 @@
|
||||
use crate::{
|
||||
datasources::comments::CommentsDatasource,
|
||||
state::AppState,
|
||||
utils::{
|
||||
datetime::*,
|
||||
pagination::{Pagination, PaginationQuery},
|
||||
},
|
||||
};
|
||||
use crate::{datasources::comments::CommentsDatasource, state::AppState, utils::datetime::*};
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::{get, post},
|
||||
Json,
|
||||
};
|
||||
use cache::{Expiration, SetOptions};
|
||||
use chrono::Utc;
|
||||
use fred::types::{Expiration, SetOptions};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
@@ -28,6 +21,13 @@ pub struct CommentInputPayload {
|
||||
pub struct CommentPathParams {
|
||||
id: i32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Pagination {
|
||||
pub page_number: i64,
|
||||
pub page_size: i64,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct Comment {
|
||||
pub comment_id: i32,
|
||||
@@ -96,13 +96,8 @@ impl CommentsRoute {
|
||||
|
||||
async fn get_comments_index(
|
||||
State(app_state): State<AppState>,
|
||||
Query(query): Query<PaginationQuery>,
|
||||
Json(pagination): Json<Pagination>,
|
||||
) -> impl IntoResponse {
|
||||
let pagination = Pagination {
|
||||
page: query.page.unwrap_or(1),
|
||||
limit: query.limit.unwrap_or(12),
|
||||
};
|
||||
|
||||
let state = app_state.lock().await;
|
||||
|
||||
match CommentsDatasource::get_index_comments(&state.database, pagination).await {
|
||||
@@ -111,5 +106,3 @@ impl CommentsRoute {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -15,8 +15,8 @@ use axum::{
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
use cache::Expiration;
|
||||
use chrono::Utc;
|
||||
use fred::types::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
@@ -31,9 +31,6 @@ pub struct Post {
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub created_at: Option<chrono::DateTime<Utc>>,
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub publish_date: Option<chrono::DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
@@ -47,9 +44,6 @@ pub struct PostFeaturedVariant {
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub created_at: Option<chrono::DateTime<Utc>>,
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub publish_date: Option<chrono::DateTime<Utc>>,
|
||||
pub is_featured: Option<bool>,
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
use crate::{datasources::projects::ProjectsDatasource, state::AppState, utils::datetime::*};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::get, Json, Router};
|
||||
use cache::Expiration;
|
||||
use fred::types::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
@@ -66,5 +67,3 @@ impl ProjectsRoute {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,10 +1,15 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
|
||||
use crate::{datasources::posts::PostsDatasource, state::AppState};
|
||||
|
||||
use super::posts::Post;
|
||||
|
||||
pub struct RootRoute;
|
||||
impl RootRoute {
|
||||
pub fn routes() -> Router {
|
||||
|
@@ -1,17 +1,83 @@
|
||||
use fred::interfaces::KeysInterface;
|
||||
use fred::{clients::Pool, prelude::*};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub type AppState = std::sync::Arc<tokio::sync::Mutex<AppInternalState>>;
|
||||
|
||||
pub struct AppInternalState {
|
||||
pub database: sqlx::postgres::PgPool,
|
||||
pub cache: cache::Cache,
|
||||
pub cache: Cache,
|
||||
}
|
||||
|
||||
pub struct Cache {
|
||||
pub inmem: Pool,
|
||||
}
|
||||
|
||||
impl AppInternalState {
|
||||
pub fn new(database: PgPool, cache: cache::Pool) -> Self {
|
||||
pub fn new(database: PgPool, cache: Pool) -> Self {
|
||||
AppInternalState {
|
||||
database,
|
||||
cache: cache::Cache { inmem: cache },
|
||||
cache: Cache { inmem: cache },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub async fn get<T>(&mut self, key: String) -> Result<Option<T>, Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let value: Option<String> = self.inmem.get(&key).await?;
|
||||
|
||||
match value {
|
||||
Some(json_str) => match serde_json::from_str::<T>(&json_str) {
|
||||
Ok(deserialized) => Ok(Some(deserialized)),
|
||||
Err(_) => Ok(None),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set<T>(
|
||||
&mut self,
|
||||
key: String,
|
||||
contents: &T,
|
||||
expiration: Option<Expiration>,
|
||||
set_opts: Option<SetOptions>,
|
||||
get: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
|
||||
{
|
||||
self.is_connected()?;
|
||||
let json_string = match serde_json::to_string::<T>(contents) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unable to deserialize contents passed to cache".to_string(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(self
|
||||
.inmem
|
||||
.set(key, json_string, expiration, set_opts, get)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(self.inmem.del(key).await?)
|
||||
}
|
||||
|
||||
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
match self.inmem.is_connected() {
|
||||
true => Ok(()),
|
||||
false => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,4 +1,3 @@
|
||||
pub mod datetime;
|
||||
pub mod pagination;
|
||||
pub mod rss;
|
||||
pub mod sitemap;
|
||||
|
@@ -1,13 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct PaginationQuery {
|
||||
pub page: Option<i64>,
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Pagination {
|
||||
pub page: i64,
|
||||
pub limit: i64,
|
||||
}
|
@@ -73,18 +73,17 @@ pub fn generate_rss(
|
||||
format!(
|
||||
r#"<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>{safe_title}</title>
|
||||
<description>{safe_description}</description>
|
||||
<link>{link}</link>
|
||||
<language>en-us</language>
|
||||
<ttl>60</ttl>
|
||||
<generator>Kyouma 1.0.0-SE</generator>
|
||||
<atom:link href="https://wyattjmiller.com/posts.xml" rel="self" type="application/rss+xml" />
|
||||
{}
|
||||
</channel>
|
||||
</rss>
|
||||
"#,
|
||||
<channel>
|
||||
<title>{safe_title}</title>
|
||||
<description>{safe_description}</description>
|
||||
<link>{link}</link>
|
||||
<language>en-us</language>
|
||||
<ttl>60</ttl>
|
||||
<generator>Kyouma 1.0.0-SE</generator>
|
||||
<atom:link href="https://wyattjmiller.com/posts.xml" rel="self" type="application/rss+xml" />
|
||||
{}
|
||||
</channel>
|
||||
</rss>"#,
|
||||
rss_entries
|
||||
)
|
||||
}
|
||||
|
@@ -23,6 +23,7 @@ impl SitemapEntry {
|
||||
pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
|
||||
let urls = entries
|
||||
.values()
|
||||
.into_iter()
|
||||
.map(|entry| entry.to_item())
|
||||
.collect::<String>();
|
||||
format!(
|
||||
@@ -38,21 +39,21 @@ pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
|
||||
|
||||
pub fn get_static_pages(entries: &mut HashMap<String, SitemapEntry>, web_url: &String) {
|
||||
entries.insert(
|
||||
"10000".to_string(),
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: web_url.clone(),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
"10001".to_string(),
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/posts", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
"10002".to_string(),
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/projects", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
|
7
backend/queue/Cargo.lock
generated
Normal file
7
backend/queue/Cargo.lock
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "queue"
|
||||
version = "0.1.0"
|
6
backend/queue/Cargo.toml
Normal file
6
backend/queue/Cargo.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[package]
|
||||
name = "queue"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
14
backend/queue/src/lib.rs
Normal file
14
backend/queue/src/lib.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
pub fn add(left: u64, right: u64) -> u64 {
|
||||
left + right
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = add(2, 2);
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
}
|
@@ -1,7 +0,0 @@
|
||||
# Storage library
|
||||
|
||||
also known as `storage`
|
||||
|
||||
## What is this?
|
||||
|
||||
An internal storage library. This was needed because both `public` and `task` needed storage functionality. Additionally, this helps maintainability and avoids duplicate code.
|
@@ -4,7 +4,7 @@ use aws_config::{BehaviorVersion, Region};
|
||||
use aws_sdk_s3::{Client, Config, config::Credentials};
|
||||
use std::env;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct S3ClientConfig {
|
||||
pub access_key: String,
|
||||
secret_key: String,
|
||||
@@ -13,29 +13,11 @@ pub struct S3ClientConfig {
|
||||
region: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct S3Client {
|
||||
client: Client,
|
||||
pub client_config: S3ClientConfig,
|
||||
}
|
||||
|
||||
impl S3ClientConfig {
|
||||
pub fn new(
|
||||
access_key: &str,
|
||||
secret_key: &str,
|
||||
endpoint: &str,
|
||||
bucket: &str,
|
||||
region: &str,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Ok(S3ClientConfig {
|
||||
access_key: access_key.to_owned(),
|
||||
secret_key: secret_key.to_owned(),
|
||||
endpoint: endpoint.to_owned(),
|
||||
bucket: bucket.to_owned(),
|
||||
region: region.to_owned(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_env() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Ok(S3ClientConfig {
|
||||
access_key: env::var("S3_ACCESS_KEY")
|
||||
@@ -70,7 +52,6 @@ impl S3Client {
|
||||
|
||||
Self {
|
||||
client: Client::from_conf(s3_config),
|
||||
client_config: config.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -118,9 +99,3 @@ impl ObjectStorageClient for S3Client {
|
||||
todo!("not impl")
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for S3ClientConfig {
|
||||
fn default() -> Self {
|
||||
S3ClientConfig::from_env().unwrap()
|
||||
}
|
||||
}
|
||||
|
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO logs (task_id, created_at, task_status) VALUES ($1, now(), 'pending') RETURNING task_id, log_id, created_at, task_status, finished_at",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "task_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "log_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "task_status",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "finished_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "364c58ab7678af9d36003af9858e69b876be3939a4d9f34a95950ab7cc166778"
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT EXISTS(SELECT 1 FROM posts p WHERE p.filename = $1) as filename",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "filename",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "723a24f681b1b7866e4a2636ddda2bb8ed78d60540158ffa0fbebba4bdbfa2b9"
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "UPDATE logs SET task_status = $1 WHERE task_id = $2",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e3f9cdc6fede1a8601c3775e829f04eef5b00cf7bc5a087b5ba5c70f99e76763"
|
||||
}
|
959
backend/task/Cargo.lock
generated
959
backend/task/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,6 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
storage = { version = "0.1.0", path = "../storage" }
|
||||
cache = { version = "0.1.0", path = "../cache" }
|
||||
tokio = { version = "1.19.2", features = ["full"] }
|
||||
reqwest = { version = "0.12.20", features = ["json", "rustls-tls"] }
|
||||
job_scheduler = "1.2.1"
|
||||
|
@@ -1,14 +0,0 @@
|
||||
FROM rust:1.88.0
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./task ./task
|
||||
COPY ./cache ./cache
|
||||
COPY ./storage ./storage
|
||||
|
||||
RUN mkdir /app/posts
|
||||
RUN cargo build --release --manifest-path ./task/Cargo.toml
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["/app/task/target/release/task"]
|
@@ -12,9 +12,4 @@ This is a task runner/scheduler programs that will fire off various tasks. These
|
||||
|
||||
For `task` to work properly, please make sure to first create the `.env` file, then fill out the following environment variables:
|
||||
|
||||
- `BASE_URI_API` - needed for communicating with `public`
|
||||
- `DATABASE_URL` - needed for communicating to Postgres
|
||||
- `REDIS_URL` - needed for communicating with the cache (Redis or Valkey)
|
||||
- `S3_ACCESS_KEY` - needed for Amazon S3 (or compatible services) storage
|
||||
- `S3_SECRET_KEY` - needed for Amazon S3 (or compatible services) storage
|
||||
- `S3_BUCKET` - needed for Amazon S3 (or compatible services) storage
|
||||
|
@@ -1,11 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn config() -> Configuration {
|
||||
Configuration {
|
||||
env: dotenvy::dotenv(),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Configuration {
|
||||
env: Result<PathBuf, dotenvy::Error>,
|
||||
}
|
@@ -1,20 +1,16 @@
|
||||
use cache::ClientLike;
|
||||
use chrono::Utc;
|
||||
use sqlx::{postgres::PgPoolOptions, Pool, Postgres};
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use storage::services::aws;
|
||||
use tasks::*;
|
||||
|
||||
mod config;
|
||||
//mod config;
|
||||
mod tasks;
|
||||
mod utils;
|
||||
|
||||
pub struct TaskManager<'a> {
|
||||
pool: Pool<Postgres>,
|
||||
cache: cache::Pool,
|
||||
s3_client: aws::S3Client,
|
||||
jobs: Vec<TaskJob>,
|
||||
last_activated: Option<chrono::DateTime<Utc>>,
|
||||
last_job: Option<TaskJob>,
|
||||
@@ -52,9 +48,7 @@ pub struct TaskJob {
|
||||
async fn main() {
|
||||
println!("Hello, world!");
|
||||
|
||||
let _ = config::config();
|
||||
|
||||
// setup database
|
||||
dotenvy::dotenv().unwrap();
|
||||
let database_url =
|
||||
env::var("DATABASE_URL").expect("Environment variable DATABASE_URL is not found");
|
||||
let pool = PgPoolOptions::new()
|
||||
@@ -64,35 +58,7 @@ async fn main() {
|
||||
.await
|
||||
.expect("Failed to connect to the database");
|
||||
|
||||
// setup redis/valkey
|
||||
let redis_url = match std::env::var("REDIS_URL").unwrap().as_str() {
|
||||
// TODO: fix the unwrap ^
|
||||
"" => "redis://localhost:6379".to_string(),
|
||||
x => x.to_string(),
|
||||
};
|
||||
|
||||
let pool_size = 8;
|
||||
let config = cache::Config::from_url(&redis_url).unwrap(); // TODO: fix the unwrap <<<
|
||||
|
||||
let redis_pool = cache::Builder::from_config(config)
|
||||
.with_performance_config(|config| {
|
||||
config.default_command_timeout = Duration::from_secs(60);
|
||||
})
|
||||
.set_policy(cache::ReconnectPolicy::new_exponential(0, 100, 30_000, 2))
|
||||
.build_pool(pool_size)
|
||||
.expect("Failed to create cache pool");
|
||||
|
||||
if std::env::var("REDIS_URL").unwrap() != "" {
|
||||
// TODO: fix the unwrap ^
|
||||
redis_pool.init().await.expect("Failed to connect to cache");
|
||||
let _ = redis_pool.flushall::<i32>(false).await;
|
||||
}
|
||||
|
||||
// setup storage
|
||||
let s3_client_config = aws::S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = aws::S3Client::new(&s3_client_config);
|
||||
|
||||
let mut manager = TaskManager::new(pool, redis_pool, s3_client);
|
||||
let mut manager = TaskManager::new(pool);
|
||||
manager.register_jobs().await.unwrap();
|
||||
|
||||
loop {
|
||||
@@ -102,11 +68,9 @@ async fn main() {
|
||||
}
|
||||
|
||||
impl<'a> TaskManager<'a> {
|
||||
fn new(pool: Pool<Postgres>, cache: cache::Pool, s3_client: aws::S3Client) -> Self {
|
||||
fn new(pool: Pool<Postgres>) -> Self {
|
||||
TaskManager {
|
||||
pool,
|
||||
cache,
|
||||
s3_client,
|
||||
jobs: Vec::new(),
|
||||
last_activated: None,
|
||||
last_job: None,
|
||||
@@ -136,15 +100,11 @@ impl<'a> TaskManager<'a> {
|
||||
}
|
||||
2 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
let cache = Arc::new(self.cache.clone());
|
||||
let s3_client = Arc::new(self.s3_client.clone());
|
||||
Box::new(move || upload_rss::register(&pool, &cache, &s3_client))
|
||||
Box::new(move || upload_rss::register(&pool))
|
||||
}
|
||||
3 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
let cache = Arc::new(self.cache.clone());
|
||||
let s3_client = Arc::new(self.s3_client.clone());
|
||||
Box::new(move || upload_sitemap::register(&pool, &cache, &s3_client))
|
||||
Box::new(move || upload_sitemap::register(&pool))
|
||||
}
|
||||
id => return Err(format!("Unknown task_id: {}", id).into()),
|
||||
};
|
||||
|
@@ -2,13 +2,12 @@ use std::fs;
|
||||
use std::io::Read;
|
||||
|
||||
use crate::utils::task_log;
|
||||
use chrono::{DateTime, FixedOffset, Utc};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
let p = pool.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = import_posts("/app/posts", &p).await;
|
||||
let _ = import_posts("app/", &p).await;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -20,15 +19,21 @@ async fn import_posts(
|
||||
|
||||
// Start task logging
|
||||
let task = task_log::start(1, pool).await?;
|
||||
|
||||
// Setup markdown options
|
||||
let options = MarkdownOptions {
|
||||
options: markdown::Constructs::gfm(),
|
||||
};
|
||||
|
||||
// Read directory contents
|
||||
let entries = fs::read_dir(dir_path)?;
|
||||
|
||||
// Process each file
|
||||
for entry_result in entries {
|
||||
let file = entry_result?;
|
||||
let file_path = file.path();
|
||||
|
||||
// Skip non-file entries
|
||||
if !file_path.is_file() {
|
||||
continue;
|
||||
}
|
||||
@@ -66,23 +71,22 @@ async fn import_posts(
|
||||
|
||||
let content =
|
||||
markdown::to_html_with_options(&document.content, &markdown::Options::default());
|
||||
println!("{:?}", content);
|
||||
|
||||
let title = document.metadata.title;
|
||||
let pub_date =
|
||||
DateTime::parse_from_str(document.metadata.date.as_ref(), "%Y-%m-%d %H:%M:%S %z")?;
|
||||
let content_final = content.unwrap();
|
||||
|
||||
// Insert into database
|
||||
let results = sqlx::query_as::<_, InsertPosts>(
|
||||
"INSERT INTO posts (title, body, filename, publish_date, author_id) VALUES ($1, $2, $3, $4, $5) RETURNING title, body, filename, author_id"
|
||||
"INSERT INTO posts (title, body, filename, author_id) VALUES ($1, $2, $3, $4) RETURNING title, body, filename, author_id"
|
||||
)
|
||||
.bind(title)
|
||||
.bind(content_final)
|
||||
.bind(file_name_str)
|
||||
.bind(pub_date)
|
||||
.bind(1) // Consider making author_id a parameter
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
println!("{:?}", results);
|
||||
|
||||
println!("Successfully imported: {}", file_name_str);
|
||||
} else {
|
||||
|
@@ -2,24 +2,19 @@ use crate::utils::{
|
||||
request::{Request, Response},
|
||||
task_log,
|
||||
};
|
||||
use cache::KeysInterface;
|
||||
use storage::services::{aws::S3Client, ObjectStorageClient};
|
||||
use storage::services::{
|
||||
aws::{S3Client, S3ClientConfig},
|
||||
ObjectStorageClient,
|
||||
};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>, cache: &cache::Pool, s3_client: &S3Client) {
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
let p = pool.clone();
|
||||
let c = cache.clone();
|
||||
let s3 = s3_client.to_owned();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = upload_rss(&p, &c, s3).await;
|
||||
let _ = upload_rss(&p).await;
|
||||
});
|
||||
}
|
||||
|
||||
async fn upload_rss(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
cache: &cache::Pool,
|
||||
s3_client: S3Client,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
async fn upload_rss(pool: &sqlx::Pool<sqlx::Postgres>) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// start task logging
|
||||
task_log::start(2, pool).await?;
|
||||
|
||||
@@ -30,37 +25,15 @@ async fn upload_rss(
|
||||
|
||||
// upload the sucker to obj storage
|
||||
if let Response::Xml(rss) = rss_result {
|
||||
let cached: &Option<String> = &cache.get(String::from("rss")).await.unwrap_or(None);
|
||||
let cache_clone = cache.clone();
|
||||
if let Some(cached_value) = cached {
|
||||
if *cached_value == rss {
|
||||
println!("Response is the same in the cache, exiting");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let r = rss.clone();
|
||||
|
||||
let client_config = S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = S3Client::new(&client_config);
|
||||
let _ = s3_client
|
||||
.put_object(
|
||||
s3_client.client_config.bucket.as_str(),
|
||||
client_config.bucket.as_str(),
|
||||
"feed.xml",
|
||||
rss.as_bytes().to_vec(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
tokio::spawn(async move {
|
||||
cache_clone
|
||||
.set::<String, String, &String>(
|
||||
String::from("rss"),
|
||||
&r,
|
||||
Some(cache::Expiration::EX(3600)),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
.await;
|
||||
println!("Finished uploading RSS feed");
|
||||
}
|
||||
|
||||
|
@@ -2,23 +2,20 @@ use crate::utils::{
|
||||
request::{Request, Response},
|
||||
task_log,
|
||||
};
|
||||
use cache::KeysInterface;
|
||||
use storage::services::{aws::S3Client, ObjectStorageClient};
|
||||
use storage::services::{
|
||||
aws::{S3Client, S3ClientConfig},
|
||||
ObjectStorageClient,
|
||||
};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>, cache: &cache::Pool, s3_client: &S3Client) {
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
let p = pool.clone();
|
||||
let c = cache.clone();
|
||||
let s3 = s3_client.to_owned();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = upload_sitemap(&p, &c, s3).await;
|
||||
let _ = upload_sitemap(&p).await;
|
||||
});
|
||||
}
|
||||
|
||||
async fn upload_sitemap(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
cache: &cache::Pool,
|
||||
s3_client: S3Client,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// start task logging
|
||||
task_log::start(3, pool).await?;
|
||||
@@ -30,36 +27,15 @@ async fn upload_sitemap(
|
||||
|
||||
// upload the sucker to obj storage
|
||||
if let Response::Xml(sitemap) = sitemap_result {
|
||||
let cached: &Option<String> = &cache.get(String::from("sitemap")).await.unwrap_or(None);
|
||||
let cache_clone = cache.clone();
|
||||
if let Some(cached_value) = cached {
|
||||
if *cached_value == sitemap {
|
||||
println!("Response is the same in the cache, exiting");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let s = sitemap.clone();
|
||||
|
||||
let client_config = S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = S3Client::new(&client_config);
|
||||
let _ = s3_client
|
||||
.put_object(
|
||||
s3_client.client_config.bucket.as_str(),
|
||||
client_config.bucket.as_str(),
|
||||
"sitemap.xml",
|
||||
sitemap.as_bytes().to_vec(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
tokio::spawn(async move {
|
||||
cache_clone
|
||||
.set::<String, String, &String>(
|
||||
String::from("sitemap"),
|
||||
&s,
|
||||
Some(cache::Expiration::EX(3600)),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
.await;
|
||||
println!("Finished uploading sitemap!");
|
||||
}
|
||||
|
||||
|
@@ -1,137 +0,0 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
valkey-mywebsite:
|
||||
image: valkey/valkey:8.0.2
|
||||
container_name: valkey-mywebsite
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- valkey_data:/data
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD", "valkey-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
postgres-mywebsite:
|
||||
image: postgres:16
|
||||
container_name: postgres-mywebsite
|
||||
# fill these in with postgres env vars
|
||||
environment:
|
||||
POSTGRES_USER: wyatt
|
||||
POSTGRES_PASSWORD: test # <<< replace this
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./init-db:/docker-entrypoint-initdb.d
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U wyatt -d postgres"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
frontend:
|
||||
image: scm.wyattjmiller.com/wymiller/my-website-v2_frontend:master
|
||||
container_name: frontend
|
||||
ports:
|
||||
- "8000:8000"
|
||||
# fill these in the frontend env vars for prod
|
||||
environment:
|
||||
- BASE_URI_API=
|
||||
- BASE_URI_WEB=
|
||||
- EMAIL_FORM=
|
||||
- RSS_URI=
|
||||
- SITEMAP_URI=
|
||||
- VIRTUAL_HOST=wyattjmiller.com
|
||||
- VIRTUAL_PORT=80
|
||||
- LETSENCRYPT_HOST=wyattjmiller.com
|
||||
- LETSENCRYPT_EMAIL=wjmiller2016@gmail.com
|
||||
volumes:
|
||||
- ./deno-fresh-app:/app
|
||||
- /app/node_modules
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
public-mywebsite:
|
||||
image: scm.wyattjmiller.com/wymiller/my-website-v2_public:master
|
||||
container_name: public-mywebsite
|
||||
ports:
|
||||
- "3000:3000"
|
||||
# fill these in with public env vars for prod
|
||||
environment:
|
||||
- DATABASE_URL=
|
||||
- REDIS_URL=
|
||||
- BASE_URI_WEB=
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
# make sure to change the url too
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
task-mywebsite:
|
||||
image: scm.wyattjmiller.com/wymiller/my-website-v2_task:master
|
||||
container_name: task-mywebsite
|
||||
# fill these in with task env vars for prod
|
||||
environment:
|
||||
- DATABASE_URL=
|
||||
- BASE_URI_API=
|
||||
- S3_ACCESS_KEY=
|
||||
- S3_SECRET_KEY=
|
||||
- S3_BUCKET=
|
||||
- REDIS_URL=
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
volumes:
|
||||
- ./backend/task/app:/app/posts # <<< place all markdown files here
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- app_network
|
||||
healthcheck:
|
||||
test: ["CMD", "pgrep", "-f", "task-mywebsite"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
networks:
|
||||
app_network:
|
||||
driver: bridge
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 172.20.0.0/16
|
||||
|
||||
volumes:
|
||||
valkey_mywebsite_data:
|
||||
driver: local
|
||||
postgres_mywebsite_data:
|
||||
driver: local
|
22
flake.lock
generated
22
flake.lock
generated
@@ -21,11 +21,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1748076591,
|
||||
"narHash": "sha256-zfcYlOBYGfp4uxPC9ctaWf37bjZagbQ0pw7mqgTqfBI=",
|
||||
"lastModified": 1729525729,
|
||||
"narHash": "sha256-YiooFGeR7+sXSkHNfSzT8GQf+xtzbDwUbfbwkCCyuUs=",
|
||||
"owner": "nekowinston",
|
||||
"repo": "nix-deno",
|
||||
"rev": "0b22de7dd34c7d7c7cd46cedee0b65592dc57d3e",
|
||||
"rev": "e92687492a4faec48ab1eb45adbdba30c876b0e5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -36,12 +36,12 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1752436162,
|
||||
"narHash": "sha256-Kt1UIPi7kZqkSc5HVj6UY5YLHHEzPBkgpNUByuyxtlw=",
|
||||
"rev": "dfcd5b901dbab46c9c6e80b265648481aafb01f8",
|
||||
"revCount": 806304,
|
||||
"lastModified": 1741862977,
|
||||
"narHash": "sha256-prZ0M8vE/ghRGGZcflvxCu40ObKaB+ikn74/xQoNrGQ=",
|
||||
"rev": "cdd2ef009676ac92b715ff26630164bb88fec4e0",
|
||||
"revCount": 715614,
|
||||
"type": "tarball",
|
||||
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2505.806304%2Brev-dfcd5b901dbab46c9c6e80b265648481aafb01f8/01980f2c-e7f3-7efc-b369-7ebec7be6e59/source.tar.gz"
|
||||
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2411.715614%2Brev-cdd2ef009676ac92b715ff26630164bb88fec4e0/019590d8-bf83-7849-9c87-9e373480fc07/source.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
@@ -63,11 +63,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1752633862,
|
||||
"narHash": "sha256-Bj7ozT1+5P7NmvDcuAXJvj56txcXuAhk3Vd9FdWFQzk=",
|
||||
"lastModified": 1742005800,
|
||||
"narHash": "sha256-6wuOGWkyW6R4A6Th9NMi6WK2jjddvZt7V2+rLPk6L3o=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "8668ca94858206ac3db0860a9dec471de0d995f8",
|
||||
"rev": "028cd247a6375f83b94adc33d83676480fc9c294",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@@ -1,17 +0,0 @@
|
||||
FROM denoland/deno:alpine
|
||||
|
||||
RUN apk add bash
|
||||
|
||||
# USER deno
|
||||
|
||||
RUN deno cache --reload deno.json
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN bash -c 'deno cache main.ts'
|
||||
|
||||
RUN bash -c 'deno task build'
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["deno", "run", "-A", "main.ts"]
|
@@ -2,14 +2,14 @@ import * as hi from "jsr:@preact-icons/hi2";
|
||||
|
||||
export default function Footer() {
|
||||
return (
|
||||
<footer class="bg-[#313244] text-[#cdd6f4] py-8 mt-auto">
|
||||
<footer class="bg-[#313244] text-[#cdd6f4] py-8">
|
||||
<div class="container mx-auto px-4">
|
||||
{/* Grid layout that switches from 2 to 1 column on small screens */}
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-8">
|
||||
<div class="space-y-2">
|
||||
<a
|
||||
class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="/rss"
|
||||
href={Deno.env.get("BASE_URI_RSS")}
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiOutlineRss />
|
||||
@@ -18,7 +18,7 @@ export default function Footer() {
|
||||
</a>
|
||||
<a
|
||||
class="mb-8 text-[#cdd6f4] transition-all duration-300 ease-in-out hover:text-[#cba6f7] hover:drop-shadow-[0_0_20px_rgba(96,165,250,0.7)] hover:scale-110 cursor-pointer visited:text-[#bac2de]"
|
||||
href="/sitemap"
|
||||
href={Deno.env.get("BASE_URI_SITEMAP")}
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiOutlineMap />
|
||||
|
@@ -1,89 +0,0 @@
|
||||
import * as hi from "jsr:@preact-icons/hi2";
|
||||
|
||||
export function PaginationControl({
|
||||
paginatedData,
|
||||
currentUrl,
|
||||
authorId,
|
||||
}: {
|
||||
paginatedData: PaginatedPosts;
|
||||
currentUrl: URL;
|
||||
authorId: number;
|
||||
}) {
|
||||
const buildUrl = (page: number, limit?: number) => {
|
||||
const params = new URLSearchParams(currentUrl.searchParams);
|
||||
params.set("page", page.toString());
|
||||
if (limit) params.set("limit", limit.toString());
|
||||
return `${currentUrl.pathname}?${params.toString()}`;
|
||||
};
|
||||
|
||||
if (paginatedData.totalPages <= 1) return null;
|
||||
|
||||
return (
|
||||
<div class="mt-8 space-y-4">
|
||||
{/* Pagination info and controls */}
|
||||
<div class="flex flex-col sm:flex-row justify-center items-center gap-4">
|
||||
<div class="flex items-center gap-2">
|
||||
{paginatedData.hasPrevPage && (
|
||||
<a
|
||||
href={buildUrl(paginatedData.currentPage - 1)}
|
||||
class="px-4 py-2 bg-[#45475a] text-[#cdd6f4] shadow-sm rounded hover:bg-[#6A6B7A] transition-colors"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
<hi.HiChevronDoubleLeft />
|
||||
Previous
|
||||
</div>
|
||||
</a>
|
||||
)}
|
||||
|
||||
{/* Page numbers */}
|
||||
<div class="flex gap-1">
|
||||
{Array.from(
|
||||
{ length: Math.min(paginatedData.totalPages, 7) },
|
||||
(_, i) => {
|
||||
let pageNum;
|
||||
if (paginatedData.totalPages <= 7) {
|
||||
pageNum = i + 1;
|
||||
} else {
|
||||
const start = Math.max(1, paginatedData.currentPage - 3);
|
||||
const end = Math.min(paginatedData.totalPages, start + 6);
|
||||
pageNum = start + i;
|
||||
if (pageNum > end) return null;
|
||||
}
|
||||
|
||||
const isCurrentPage = pageNum === paginatedData.currentPage;
|
||||
|
||||
return (
|
||||
<a
|
||||
key={pageNum}
|
||||
href={buildUrl(pageNum)}
|
||||
class={`px-3 py-1 rounded text-sm shadow-sm ${
|
||||
isCurrentPage
|
||||
? "bg-[#6A6B7A] text-[#cdd6f4]"
|
||||
: "bg-[#45475a] text-[#cdd6f4] hover:bg-[#6A6B7A]"
|
||||
}`}
|
||||
>
|
||||
{pageNum}
|
||||
</a>
|
||||
);
|
||||
},
|
||||
)}
|
||||
</div>
|
||||
|
||||
{paginatedData.hasNextPage && (
|
||||
<a
|
||||
href={buildUrl(paginatedData.currentPage + 1)}
|
||||
class="px-4 py-2 bg-[#45475a] text-[#cdd6f4] shadow-sm rounded hover:bg-[#6A6B7A] transition-colors"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
Next
|
||||
<hi.HiChevronDoubleRight />
|
||||
</div>
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Quick jump to page */}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
@@ -4,7 +4,7 @@ import { Post } from "../types/index.ts";
|
||||
|
||||
export const PostCard = function PostCard({ post }: { post: Post }) {
|
||||
return (
|
||||
<div class="p-6 bg-[#45475a] rounded-lg shadow-xl transition-all duration-300 ease-in-out hover:shadow-xl hover:scale-105">
|
||||
<div class="p-6 bg-[#45475a] rounded-lg shadow-md transition-all duration-300 ease-in-out hover:shadow-xl hover:scale-105">
|
||||
<a href={`${Deno.env.get("BASE_URI_WEB")}/posts/${post.post_id}`}>
|
||||
<h2 class="text-white text-lg font-bold mb-2">{post.title}</h2>
|
||||
<p class="text-white">
|
||||
@@ -15,9 +15,9 @@ export const PostCard = function PostCard({ post }: { post: Post }) {
|
||||
>
|
||||
{post.first_name} {post.last_name}
|
||||
</a>{" "}
|
||||
at {convertUtc(post.publish_date)}
|
||||
at {convertUtc(post.created_at)}
|
||||
</p>
|
||||
<p class="text-gray-400">{truncateString(post.body, 45)}</p>
|
||||
<p class="text-gray-400">{truncateString(post.body, 15)}</p>
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
|
@@ -8,7 +8,7 @@ export const PostHeader = function PostHeader({ post }: PostHeaderOpts) {
|
||||
<Head>
|
||||
<title>Wyatt J. Miller | {post.title}</title>
|
||||
</Head>
|
||||
<div class="p-4 bg-[#313244]">
|
||||
<div class="p-6 bg-[#313244]">
|
||||
<div class="min-w-screen flex flex-col items-center justify-between bg-[#45475a] rounded-lg shadow-md">
|
||||
<div class="sm:mt-14 sm:mb-14 mt-8 mb-8 flex flex-col items-center gap-y-5 gap-x-10 md:flex-row">
|
||||
<div class="space-y-2 text-center md:text-left">
|
||||
@@ -17,7 +17,7 @@ export const PostHeader = function PostHeader({ post }: PostHeaderOpts) {
|
||||
</p>
|
||||
<p class="text-md font-medium text-[#E39A9C] sm:text-xl italic">
|
||||
by {post.first_name} {post.last_name} posted on{" "}
|
||||
{convertUtc(post.publish_date)}
|
||||
{convertUtc(post.created_at)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -1,13 +1,14 @@
|
||||
export const ShareLinkButton = function ShareLinkButton({ props }) {
|
||||
const [text, setText] = useState("Share");
|
||||
export const ShareLinkButton = function ShareLinkButton({props}) {
|
||||
const [text. setText] = useState("Share");
|
||||
|
||||
const onClickHandler = () => {
|
||||
navigator.clipboard.writeText(location.href);
|
||||
setText("Copied to clipboard!");
|
||||
setTimeout(() => {
|
||||
setText("Share");
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
return <button onClick={onClickHandler}>{text}</button>;
|
||||
};
|
||||
return (
|
||||
<button onClick={onClickHandler}>
|
||||
{text}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
|
@@ -12,8 +12,6 @@ import * as $index from "./routes/index.tsx";
|
||||
import * as $posts_id_ from "./routes/posts/[id].tsx";
|
||||
import * as $posts_index from "./routes/posts/index.tsx";
|
||||
import * as $projects_index from "./routes/projects/index.tsx";
|
||||
import * as $rss_index from "./routes/rss/index.tsx";
|
||||
import * as $sitemap_index from "./routes/sitemap/index.tsx";
|
||||
import * as $Counter from "./islands/Counter.tsx";
|
||||
import * as $ProjectCard from "./islands/ProjectCard.tsx";
|
||||
import { type Manifest } from "$fresh/server.ts";
|
||||
@@ -30,8 +28,6 @@ const manifest = {
|
||||
"./routes/posts/[id].tsx": $posts_id_,
|
||||
"./routes/posts/index.tsx": $posts_index,
|
||||
"./routes/projects/index.tsx": $projects_index,
|
||||
"./routes/rss/index.tsx": $rss_index,
|
||||
"./routes/sitemap/index.tsx": $sitemap_index,
|
||||
},
|
||||
islands: {
|
||||
"./islands/Counter.tsx": $Counter,
|
||||
|
@@ -1,9 +1,9 @@
|
||||
export const ProjectCard = function ProjectCard(props: ProjectProps) {
|
||||
return (
|
||||
<div
|
||||
class={`md:m-8 group space-y-1 rounded-md ${
|
||||
class={`group space-y-1 rounded-md ${
|
||||
props.wip ? "border-2 border-dashed" : "cursor-pointer"
|
||||
} bg-[#45475a] px-3 py-2 m-4 shadow-md transition-all duration-300 ease-in-out hover:shadow-xl hover:scale-105`}
|
||||
} bg-[#45475a] px-3 py-2 m-10 shadow-md transition-all duration-300 ease-in-out hover:shadow-xl hover:scale-105`}
|
||||
onClick={() => props.repo && open(props.repo, "_blank")}
|
||||
>
|
||||
<div class="flex items-center justify-between">
|
||||
|
@@ -1,4 +1,3 @@
|
||||
export const truncateString = (str: string, maxLength: number) => {
|
||||
str = str.replace(/<[^>]*>/g, "");
|
||||
return str.length > maxLength ? `${str.slice(0, maxLength)}...` : str;
|
||||
};
|
||||
|
@@ -2,20 +2,13 @@ import { FreshContext, Handlers, PageProps } from "$fresh/server.ts";
|
||||
import AuthorCard from "../../components/AuthorCard.tsx";
|
||||
import { Post } from "../../types/index.ts";
|
||||
import { PostCarousel } from "../../components/PostCarousel.tsx";
|
||||
import { PaginationControl } from "../../components/PaginationControl.tsx";
|
||||
|
||||
export const handler: Handlers<PageData> = {
|
||||
async GET(req: Request, ctx: FreshContext) {
|
||||
async GET(_req: Request, ctx: FreshContext) {
|
||||
try {
|
||||
const url = new URL(req.url);
|
||||
const page = parseInt(url.searchParams.get("page") || "1");
|
||||
const limit = parseInt(url.searchParams.get("limit") || "12");
|
||||
|
||||
const [authorResponse, authorPostResponse] = await Promise.all([
|
||||
fetch(`${Deno.env.get("BASE_URI_API")}/authors/${ctx.params.id}`),
|
||||
fetch(
|
||||
`${Deno.env.get("BASE_URI_API")}/authors/${ctx.params.id}/posts?page=${page}&limit=${limit}`,
|
||||
),
|
||||
fetch(`${Deno.env.get("BASE_URI_API")}/authors/${ctx.params.id}/posts`),
|
||||
]);
|
||||
|
||||
const [authorData, authorPostData] = await Promise.all([
|
||||
@@ -23,37 +16,9 @@ export const handler: Handlers<PageData> = {
|
||||
authorPostResponse.json(),
|
||||
]);
|
||||
|
||||
let paginatedData: PaginatedPosts;
|
||||
|
||||
if (authorPostData.posts && authorPostData.total_posts !== undefined) {
|
||||
const totalPages = Math.ceil(authorPostData.total_posts / limit);
|
||||
paginatedData = {
|
||||
posts: authorPostData.posts,
|
||||
currentPage: page,
|
||||
totalPages,
|
||||
hasNextPage: page < totalPages,
|
||||
hasPrevPage: page > 1,
|
||||
totalPosts: authorPostData.total_posts,
|
||||
};
|
||||
} else {
|
||||
const allPosts = Array.isArray(authorPostData) ? authorPostData : [];
|
||||
const totalPages = Math.ceil(allPosts.length / limit);
|
||||
const startIndex = (page - 1) * limit;
|
||||
const endIndex = startIndex + limit;
|
||||
|
||||
paginatedData = {
|
||||
posts: allPosts.slice(startIndex, endIndex),
|
||||
currentPage: page,
|
||||
totalPages,
|
||||
hasNextPage: page < totalPages,
|
||||
hasPrevPage: page > 1,
|
||||
totalPosts: allPosts.length,
|
||||
};
|
||||
}
|
||||
|
||||
return ctx.render({
|
||||
authorData,
|
||||
authorPostData: paginatedData,
|
||||
authorPostData,
|
||||
});
|
||||
} catch (error) {
|
||||
return ctx.render({
|
||||
@@ -65,7 +30,7 @@ export const handler: Handlers<PageData> = {
|
||||
},
|
||||
};
|
||||
|
||||
export default function AuthorIdentifier({ data, url }: PageProps<PageData>) {
|
||||
export default function AuthorIdentifier({ data }: PageProps<PageData>) {
|
||||
const { authorData, authorPostData, error } = data;
|
||||
|
||||
if (error) {
|
||||
@@ -87,12 +52,7 @@ export default function AuthorIdentifier({ data, url }: PageProps<PageData>) {
|
||||
<AuthorCard author={authorData} isIdentified={true} />
|
||||
</div>
|
||||
<div>
|
||||
<PostCarousel posts={authorPostData.posts} />
|
||||
<PaginationControl
|
||||
paginatedData={authorPostData}
|
||||
currentUrl={url}
|
||||
authorId={authorData.author_id}
|
||||
/>
|
||||
<PostCarousel posts={authorPostData} />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
@@ -26,6 +26,7 @@ export const handler: Handlers = {
|
||||
message: formData.get("message")?.toString(),
|
||||
};
|
||||
|
||||
// Validation logic
|
||||
const errors: FormState["errors"] = {};
|
||||
|
||||
if (!state.name || state.name.trim() === "") {
|
||||
@@ -43,6 +44,7 @@ export const handler: Handlers = {
|
||||
errors.message = "Message is required";
|
||||
}
|
||||
|
||||
// If there are errors, return the form with error messages
|
||||
if (Object.keys(errors).length > 0) {
|
||||
return ctx.render({
|
||||
...state,
|
||||
@@ -54,6 +56,7 @@ export const handler: Handlers = {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
console.log(res);
|
||||
|
||||
if (!res.ok || res.status !== 200) {
|
||||
return ctx.render({
|
||||
@@ -74,15 +77,11 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
<div class="bg-[#313244] min-h-screen">
|
||||
<div class="px-4 py-8 mx-auto p-6 flex flex-col bg-[#313244] min-h-screen w-full md:max-w-md">
|
||||
<Head>
|
||||
<title>Wyatt J. Miller | Contact</title>
|
||||
<title>Contact</title>
|
||||
</Head>
|
||||
<h1 class="text-3xl text-white font-bold uppercase text-center">
|
||||
Contact
|
||||
</h1>
|
||||
<p class="md:text-lg sm:text-md text-white mt-5 mb-5">
|
||||
Got a question? Here to yell at me? Send me something!
|
||||
</p>
|
||||
<br />
|
||||
{data?.submitted && (
|
||||
<div
|
||||
class="bg-[#a6e3a1] text-[#313244] px-4 py-3 rounded relative"
|
||||
@@ -115,7 +114,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
required
|
||||
placeholder="Your Name"
|
||||
value={data?.name || ""}
|
||||
class={`w-full px-3 py-2 bg-[#ECECEE] border rounded-md focus:outline-transparent
|
||||
class={`w-full px-3 py-2 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500
|
||||
${data?.errors?.name ? "border-[#f38ba8]" : "border-[#313244]"}`}
|
||||
/>
|
||||
{data?.errors?.name && (
|
||||
@@ -138,7 +137,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
required
|
||||
placeholder="your@email.com"
|
||||
value={data?.email || ""}
|
||||
class={`w-full px-3 py-2 bg-[#ECECEE] border rounded-md focus:outline-transparent
|
||||
class={`w-full px-3 py-2 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500
|
||||
${data?.errors?.email ? "border-[#f38ba8]" : "border-[#313244]"}`}
|
||||
/>
|
||||
{data?.errors?.email && (
|
||||
@@ -160,7 +159,7 @@ export default function Contact({ data }: PageProps<FormState>) {
|
||||
required
|
||||
placeholder="Write your message here..."
|
||||
rows={4}
|
||||
class={`w-full px-3 py-2 bg-[#ECECEE] border rounded-md focus:outline-transparent
|
||||
class={`w-full px-3 py-2 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500
|
||||
${data?.errors?.message ? "border-red-500" : "border-gray-300"}`}
|
||||
>
|
||||
{data?.message || ""}
|
||||
|
@@ -3,7 +3,7 @@ import { PhotoCircle } from "../components/PhotoCircle.tsx";
|
||||
export default function Home() {
|
||||
return (
|
||||
<body>
|
||||
<div class="flex flex-col items-center justify-between bg-[#313244] min-h-screen">
|
||||
<div class="min-w-screen flex flex-col items-center justify-between bg-[#313244] sm:min-h-screen">
|
||||
<div class="sm:mt-14 sm:mb-14 mt-12 mb-4 flex flex-col items-center gap-y-5 gap-x-10 md:flex-row">
|
||||
<PhotoCircle
|
||||
src="https://wyattjmiller.us-ord-1.linodeobjects.com/IMG_1480-min.png"
|
||||
|
@@ -69,6 +69,18 @@ export default function PostPage({ data }: PageProps<PageData>) {
|
||||
</div>
|
||||
<PostCarousel posts={recentPosts} />
|
||||
</section>
|
||||
<section>
|
||||
<div class="flex items-center gap-2 text-2xl text-white md:justify-start">
|
||||
<hi.HiOutlineFire />
|
||||
<h2 class="text-2xl font-bold text-white text-center lg:text-left">
|
||||
Hot Posts
|
||||
</h2>
|
||||
</div>
|
||||
<div className="text-lg font-thin italic mb-4 text-white text-center flex">
|
||||
Making chaos look cool since forever
|
||||
</div>
|
||||
<PostCarousel posts={hotPosts} />
|
||||
</section>
|
||||
<section>
|
||||
<div class="flex items-center gap-2 text-2xl text-white md:justify-start">
|
||||
<hi.HiOutlineBolt />
|
||||
|
@@ -31,17 +31,11 @@ export default function Projects({ data }: PageProps<ProjectData>) {
|
||||
<div class="space-y-12 px-10 py-8 sm:min-h-screen bg-[#313244]">
|
||||
<section
|
||||
id="projects"
|
||||
class="lg:grid-cols-desktop grid scroll-mt-8 grid-cols-1 gap-x-4 gap-y-2 bg-[#313244] "
|
||||
class="lg:grid-cols-desktop grid scroll-mt-16 grid-cols-1 gap-x-10 gap-y-4 bg-[#313244] "
|
||||
>
|
||||
<h1 class="text-3xl text-white font-bold uppercase text-center">
|
||||
Projects
|
||||
</h1>
|
||||
<p class="md:text-lg sm:text-md text-white">
|
||||
Here's a collection of software and electronics projects I've been
|
||||
tinkering with during my free time - some are ongoing adventures,
|
||||
others are finished experiments, but they've all been exciting
|
||||
challenges that keep me busy when I'm not doing the "real work" stuff!
|
||||
</p>
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2">
|
||||
{projects.map((project: any) => {
|
||||
return (
|
||||
|
@@ -1,3 +0,0 @@
|
||||
export function handler(req: Request): Response {
|
||||
return Response.redirect(`${Deno.env.get("RSS_URI")}`, 307);
|
||||
}
|
@@ -1,3 +0,0 @@
|
||||
export function handler(req: Request): Response {
|
||||
return Response.redirect(`${Deno.env.get("SITEMAP_URI")}`, 307);
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 22 KiB |
@@ -6,7 +6,6 @@ export type Post = {
|
||||
title: string;
|
||||
body: string;
|
||||
created_at: string;
|
||||
publish_date: string;
|
||||
};
|
||||
|
||||
export type Author = {
|
||||
|
Reference in New Issue
Block a user