stuff happened
This commit is contained in:
@ -1,3 +1,4 @@
|
||||
pub mod authors;
|
||||
pub mod comments;
|
||||
pub mod posts;
|
||||
pub mod projects;
|
||||
|
15
backend/public/src/datasources/projects.rs
Normal file
15
backend/public/src/datasources/projects.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use sqlx::{FromRow, Pool, Postgres, Row};
|
||||
|
||||
use crate::routes::projects::Project;
|
||||
|
||||
pub struct ProjectsDatasource;
|
||||
impl ProjectsDatasource {
|
||||
pub async fn get_all(pool: &Pool<Postgres>) -> Result<Vec<Project>, sqlx::Error> {
|
||||
sqlx::query_as!(
|
||||
Project,
|
||||
"SELECT project_id, title, repo, summary, tech, wip, created_at FROM projects p WHERE deleted_at IS NULL ORDER BY p.created_at DESC"
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
}
|
@ -131,6 +131,10 @@ async fn main() {
|
||||
"/authors",
|
||||
routes::authors::AuthorsRoute::routes(&app_state),
|
||||
)
|
||||
.nest(
|
||||
"/projects",
|
||||
routes::projects::ProjectsRoute::routes(&app_state),
|
||||
)
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(
|
||||
TraceLayer::new_for_http()
|
||||
|
@ -1,4 +1,5 @@
|
||||
pub mod authors;
|
||||
pub mod comments;
|
||||
pub mod posts;
|
||||
pub mod projects;
|
||||
pub mod root;
|
||||
|
@ -1,7 +1,11 @@
|
||||
use crate::{
|
||||
datasources::posts::PostsDatasource,
|
||||
state::AppState,
|
||||
utils::{datetime::*, rss},
|
||||
utils::{
|
||||
datetime::*,
|
||||
rss,
|
||||
sitemap::{self, SitemapEntry},
|
||||
},
|
||||
};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use axum::{
|
||||
@ -60,6 +64,7 @@ impl PostsRoute {
|
||||
.route("/hot", get(PostsRoute::get_hot_posts))
|
||||
.route("/featured", get(PostsRoute::get_featured_posts))
|
||||
.route("/rss", get(PostsRoute::get_rss_posts))
|
||||
.route("/sitemap", get(PostsRoute::get_sitemap))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
@ -330,7 +335,8 @@ impl PostsRoute {
|
||||
|
||||
match PostsDatasource::get_all(&state.database).await {
|
||||
Ok(posts) => {
|
||||
let web_url = std::env::var("BASE_URI_WEB").expect("No environment variable found");
|
||||
let web_url =
|
||||
std::env::var("BASE_URI_WEB").expect("Environment BASE_URI_WEB variable found");
|
||||
let mapped_posts: HashMap<String, Post> = posts
|
||||
.into_iter()
|
||||
.map(|post| (post.post_id.to_string(), post))
|
||||
@ -343,9 +349,42 @@ impl PostsRoute {
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
HeaderValue::from_str(r#"attachment; filename="posts.xml""#).unwrap(),
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/xml"),
|
||||
);
|
||||
(headers, xml)
|
||||
}
|
||||
Err(e) => {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("Content-Type", HeaderValue::from_static("text/plain"));
|
||||
(headers, e.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_sitemap(State(app_state): State<AppState>) -> impl IntoResponse {
|
||||
let state = app_state.lock().await;
|
||||
// let cached: Option<Vec<Post>> = None; // TODO: maybe implement cache, later??
|
||||
|
||||
match PostsDatasource::get_all(&state.database).await {
|
||||
Ok(posts) => {
|
||||
let web_url =
|
||||
std::env::var("BASE_URI_WEB").expect("Environment BASE_URI_WEB variable found");
|
||||
let mut entries: HashMap<String, SitemapEntry> = posts
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
(
|
||||
p.post_id.to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/posts/{}", web_url, p.post_id.to_string()),
|
||||
lastmod: p.created_at.unwrap_or_else(|| chrono::Utc::now()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
sitemap::get_static_pages(&mut entries, &web_url);
|
||||
let xml: String = sitemap::generate_sitemap(&entries);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/xml"),
|
||||
|
69
backend/public/src/routes/projects.rs
Normal file
69
backend/public/src/routes/projects.rs
Normal file
@ -0,0 +1,69 @@
|
||||
use crate::{datasources::projects::ProjectsDatasource, state::AppState, utils::datetime::*};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::get, Json, Router};
|
||||
use fred::types::Expiration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(sqlx::FromRow, Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct Project {
|
||||
pub project_id: i32,
|
||||
pub title: String,
|
||||
pub repo: Option<String>,
|
||||
pub summary: String,
|
||||
pub tech: String,
|
||||
pub wip: Option<bool>,
|
||||
#[serde(serialize_with = "serialize_datetime")]
|
||||
#[serde(deserialize_with = "deserialize_datetime")]
|
||||
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
}
|
||||
|
||||
pub struct ProjectsRoute;
|
||||
impl ProjectsRoute {
|
||||
pub fn routes(app_state: &AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/", get(ProjectsRoute::get_all))
|
||||
.with_state(app_state.clone())
|
||||
}
|
||||
|
||||
async fn get_all(State(app_state): State<AppState>) -> impl IntoResponse {
|
||||
let mut state = app_state.lock().await;
|
||||
let cached: Option<Vec<Project>> = state
|
||||
.cache
|
||||
.get(String::from("projects:all"))
|
||||
.await
|
||||
.unwrap_or(None);
|
||||
|
||||
if let Some(projects) = cached {
|
||||
tracing::info!("grabbing all projects from cache");
|
||||
return Ok(Json(projects));
|
||||
};
|
||||
|
||||
match ProjectsDatasource::get_all(&state.database).await {
|
||||
Ok(projects) => {
|
||||
tracing::info!("grabbing all projects from database");
|
||||
if let p = &projects {
|
||||
let projects = p.clone();
|
||||
let state = app_state.clone();
|
||||
|
||||
tracing::info!("storing database data in cache");
|
||||
tokio::spawn(async move {
|
||||
let mut s = state.lock().await;
|
||||
let _ = s
|
||||
.cache
|
||||
.set(
|
||||
String::from("projects:all"),
|
||||
&projects,
|
||||
Some(Expiration::EX(10)),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await;
|
||||
});
|
||||
};
|
||||
|
||||
Ok(Json(projects))
|
||||
}
|
||||
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +1,15 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::{Html, IntoResponse},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
|
||||
use crate::{datasources::posts::PostsDatasource, state::AppState};
|
||||
|
||||
use super::posts::Post;
|
||||
|
||||
pub struct RootRoute;
|
||||
impl RootRoute {
|
||||
pub fn routes() -> Router {
|
||||
|
@ -27,13 +27,7 @@ impl Cache {
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
if !self.inmem.is_connected() {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
)));
|
||||
}
|
||||
|
||||
self.is_connected()?;
|
||||
let value: Option<String> = self.inmem.get(&key).await?;
|
||||
|
||||
match value {
|
||||
@ -56,23 +50,34 @@ impl Cache {
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de> + serde::Serialize,
|
||||
{
|
||||
if !self.inmem.is_connected() {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
)));
|
||||
}
|
||||
self.is_connected()?;
|
||||
let json_string = match serde_json::to_string::<T>(contents) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unable to deserialize contents passed to cache".to_string(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
let json_string = serde_json::to_string(contents)?;
|
||||
self.inmem
|
||||
Ok(self
|
||||
.inmem
|
||||
.set(key, json_string, expiration, set_opts, get)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn del(&mut self, key: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
self.inmem.del(key).await?;
|
||||
Ok(())
|
||||
Ok(self.inmem.del(key).await?)
|
||||
}
|
||||
|
||||
fn is_connected(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
match self.inmem.is_connected() {
|
||||
true => Ok(()),
|
||||
false => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Not connected to cache".to_string(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,2 +1,3 @@
|
||||
pub mod datetime;
|
||||
pub mod rss;
|
||||
pub mod sitemap;
|
||||
|
@ -13,7 +13,8 @@ pub struct RssEntry {
|
||||
|
||||
impl From<posts::Post> for RssEntry {
|
||||
fn from(post: posts::Post) -> Self {
|
||||
let web_url = std::env::var("BASE_URI_WEB").expect("Environment variable not found");
|
||||
let web_url =
|
||||
std::env::var("BASE_URI_WEB").expect("Environment variable BASE_URI_WEB not found");
|
||||
let post_url = format!("{}{}{}", web_url, "/posts/", post.post_id.to_string());
|
||||
let author_full_name = format!("{} {}", post.first_name.unwrap(), post.last_name.unwrap());
|
||||
|
||||
@ -58,10 +59,7 @@ pub fn generate_rss(
|
||||
link: &str,
|
||||
posts: &HashMap<String, posts::Post>,
|
||||
) -> String {
|
||||
println!("{:?}", posts);
|
||||
let values = posts.clone().into_values();
|
||||
println!("{:?}", values);
|
||||
|
||||
let rss_entries = values
|
||||
.map(|p| p.into())
|
||||
.map(|r: RssEntry| r.to_item())
|
||||
@ -69,8 +67,9 @@ pub fn generate_rss(
|
||||
|
||||
let safe_title = escape_str_pcdata(title);
|
||||
let safe_description = escape_str_pcdata(description);
|
||||
println!("{:?}", rss_entries);
|
||||
|
||||
// TODO: change the atom link in this string - it's not correct
|
||||
// change it when we know the URL
|
||||
format!(
|
||||
r#"<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
|
62
backend/public/src/utils/sitemap.rs
Normal file
62
backend/public/src/utils/sitemap.rs
Normal file
@ -0,0 +1,62 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct SitemapEntry {
|
||||
pub location: String,
|
||||
pub lastmod: chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
|
||||
impl SitemapEntry {
|
||||
fn to_item(&self) -> String {
|
||||
format!(
|
||||
r#"
|
||||
<url>
|
||||
<loc>{}</loc>
|
||||
<lastmod>{}</lastmod>
|
||||
</url>
|
||||
"#,
|
||||
self.location,
|
||||
self.lastmod.to_rfc3339(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_sitemap(entries: &HashMap<String, SitemapEntry>) -> String {
|
||||
let urls = entries
|
||||
.values()
|
||||
.into_iter()
|
||||
.map(|entry| entry.to_item())
|
||||
.collect::<String>();
|
||||
format!(
|
||||
r#"
|
||||
<!-- Generated by Kyouma 1.0.0-SE -->
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
{}
|
||||
</urlset>
|
||||
"#,
|
||||
urls
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_static_pages(entries: &mut HashMap<String, SitemapEntry>, web_url: &String) {
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: web_url.clone(),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/posts", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
entries.insert(
|
||||
(entries.len() + 1).to_string(),
|
||||
SitemapEntry {
|
||||
location: format!("{}/projects", web_url),
|
||||
lastmod: chrono::Utc::now(),
|
||||
},
|
||||
);
|
||||
}
|
1428
backend/task/Cargo.lock
generated
1428
backend/task/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -7,6 +7,7 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.19.2", features = ["full"] }
|
||||
reqwest = { version = "0.12.20", features = ["json", "rustls-tls"] }
|
||||
job_scheduler = "1.2.1"
|
||||
sqlx = { version = "0.8.2", features = [
|
||||
"postgres",
|
||||
@ -20,6 +21,7 @@ futures = "0.3.30"
|
||||
markdown = "1.0.0-alpha.20"
|
||||
serde = { version = "*", features = ["derive"] }
|
||||
serde_yml = "*"
|
||||
aws-sdk-s3 = "1.77.0"
|
||||
aws-sdk-s3 = "1.94.0"
|
||||
aws-config = "1.8"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
|
@ -4,4 +4,12 @@ also known as `task`
|
||||
|
||||
## What is this?
|
||||
|
||||
I don't know yet - hopefully this will be filled out soon.
|
||||
This is a task runner/scheduler programs that will fire off various tasks. These tasks can be anything from an blog post import task to a RSS generator task. Additionally, there is task logs inside the database so that you can keep track of tasks when something goes wrong.
|
||||
|
||||
## Things you should know
|
||||
|
||||
`task` uses a `.env` file at the root of the project. The file takes standard environment variables (like enviroment variables you would put into a `.bashrc` or ad-hoc into your shell).
|
||||
|
||||
For `task` to work properly, please make sure to first create the `.env` file, then fill out the following environment variables:
|
||||
|
||||
- `DATABASE_URL` - needed for communicating to Postgres
|
||||
|
@ -3,7 +3,7 @@ use sqlx::{postgres::PgPoolOptions, Pool, Postgres};
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tasks::import_posts;
|
||||
use tasks::*;
|
||||
|
||||
//mod config;
|
||||
mod tasks;
|
||||
@ -87,14 +87,24 @@ impl<'a> TaskManager<'a> {
|
||||
for job in &results {
|
||||
tracing::info!("Registering job: {}", job.task_name);
|
||||
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
let schedule = job
|
||||
.schedule
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse schedule '{}': {}", job.schedule, e))?;
|
||||
|
||||
let task = match job.task_id {
|
||||
1 => Box::new(move || import_posts::register(&pool)),
|
||||
let task: Box<dyn Fn() + Send + Sync> = match job.task_id {
|
||||
1 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
Box::new(move || import_posts::register(&pool))
|
||||
}
|
||||
2 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
Box::new(move || upload_rss::register(&pool))
|
||||
}
|
||||
3 => {
|
||||
let pool = Arc::new(self.pool.clone());
|
||||
Box::new(move || upload_sitemap::register(&pool))
|
||||
}
|
||||
id => return Err(format!("Unknown task_id: {}", id).into()),
|
||||
};
|
||||
|
||||
|
@ -64,7 +64,6 @@ async fn import_posts(
|
||||
|
||||
// Process file contents
|
||||
let file_md_contents = process_read_file(&file_path)?;
|
||||
// println!("{:?}", file_md_contents);
|
||||
// Extract metadata
|
||||
let document = crate::utils::front_matter::YamlFrontMatter::parse::<MarkdownMetadata>(
|
||||
&file_md_contents,
|
||||
@ -74,10 +73,8 @@ async fn import_posts(
|
||||
markdown::to_html_with_options(&document.content, &markdown::Options::default());
|
||||
println!("{:?}", content);
|
||||
|
||||
// println!("{:?}", document);
|
||||
let title = document.metadata.title;
|
||||
let content_final = content.unwrap();
|
||||
// println!("{:?}", title);
|
||||
|
||||
// Insert into database
|
||||
let results = sqlx::query_as::<_, InsertPosts>(
|
||||
|
@ -1 +1,3 @@
|
||||
pub mod import_posts;
|
||||
pub mod upload_rss;
|
||||
pub mod upload_sitemap;
|
||||
|
40
backend/task/src/tasks/upload_rss.rs
Normal file
40
backend/task/src/tasks/upload_rss.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use sqlx::{Pool, Postgres};
|
||||
|
||||
use crate::utils::{
|
||||
request::{Request, Response},
|
||||
task_log,
|
||||
{upload::S3ClientConfig, *},
|
||||
};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
let p = pool.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = upload_rss(&p).await;
|
||||
});
|
||||
}
|
||||
|
||||
async fn upload_rss(pool: &sqlx::Pool<sqlx::Postgres>) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// start task logging
|
||||
task_log::start(2, pool).await?;
|
||||
|
||||
// get request and request the things
|
||||
let request = Request::new();
|
||||
let rss_url = format!("{}/posts/rss", request.base_url);
|
||||
let rss_result = request.request_url::<String>(&rss_url).await.unwrap();
|
||||
|
||||
// upload the sucker to obj storage
|
||||
if let Response::Xml(rss) = rss_result {
|
||||
let client_config = S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = upload::create_s3_client(&client_config).await.unwrap();
|
||||
let _ = upload::upload(
|
||||
&s3_client,
|
||||
client_config.bucket.as_str(),
|
||||
"feed.xml",
|
||||
rss.as_str(),
|
||||
)
|
||||
.await;
|
||||
println!("Finished uploading RSS feed");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
40
backend/task/src/tasks/upload_sitemap.rs
Normal file
40
backend/task/src/tasks/upload_sitemap.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use crate::utils::{
|
||||
request::{Request, Response},
|
||||
task_log,
|
||||
{upload::S3ClientConfig, *},
|
||||
};
|
||||
|
||||
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>) {
|
||||
let p = pool.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = upload_sitemap(&p).await;
|
||||
});
|
||||
}
|
||||
|
||||
async fn upload_sitemap(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO:: get sitemap and upload it to bucket??
|
||||
task_log::start(3, pool).await?;
|
||||
|
||||
// get request and request the things
|
||||
let request = Request::new();
|
||||
let sitemap_url = format!("{}/posts/sitemap", request.base_url);
|
||||
let sitemap_result = request.request_url::<String>(&sitemap_url).await;
|
||||
|
||||
// upload the sucker to obj storage
|
||||
if let Response::Xml(sitemap) = sitemap_result {
|
||||
let client_config = S3ClientConfig::from_env().unwrap();
|
||||
let s3_client = upload::create_s3_client(&client_config).await.unwrap();
|
||||
let _ = upload::upload(
|
||||
&s3_client,
|
||||
client_config.bucket.as_str(),
|
||||
"sitemap.xml",
|
||||
sitemap.as_str(),
|
||||
)
|
||||
.await;
|
||||
println!("Finished uploading sitemap!");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
@ -21,10 +21,7 @@ impl YamlFrontMatter {
|
||||
markdown: &str,
|
||||
) -> Result<Document, Box<dyn std::error::Error>> {
|
||||
let yaml = YamlFrontMatter::extract(markdown)?;
|
||||
println!("File front matter metadata (raw): {:?}", yaml.0);
|
||||
// println!("File content: {:?}", yaml.1);
|
||||
let clean_yaml = YamlFrontMatter::unescape_str(&yaml.0);
|
||||
println!("File front matter metadata (clean): {:?}", clean_yaml);
|
||||
let metadata = match YamlFrontMatter::from_yaml_str(clean_yaml.as_str()) {
|
||||
Ok(m) => m,
|
||||
Err(e) => {
|
||||
|
@ -1,2 +1,4 @@
|
||||
pub mod front_matter;
|
||||
pub mod request;
|
||||
pub mod task_log;
|
||||
pub mod upload;
|
||||
|
85
backend/task/src/utils/request.rs
Normal file
85
backend/task/src/utils/request.rs
Normal file
@ -0,0 +1,85 @@
|
||||
use reqwest::StatusCode;
|
||||
use std::env;
|
||||
use std::time::Duration;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Request<'a> {
|
||||
pub client: reqwest::Client,
|
||||
pub base_url: Box<str>,
|
||||
pub full_url: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Response<T> {
|
||||
Json(T),
|
||||
Xml(String),
|
||||
Text(String),
|
||||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
impl<'a> Request<'a> {
|
||||
pub fn new() -> Self {
|
||||
Request {
|
||||
client: reqwest::ClientBuilder::new()
|
||||
.use_rustls_tls()
|
||||
.timeout(Duration::from_secs(30))
|
||||
.build()
|
||||
.unwrap(),
|
||||
base_url: env::var("BASE_URI_API")
|
||||
.expect("Environment variable BASE_URI_API is not found")
|
||||
.into_boxed_str(),
|
||||
full_url: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn request_url<T>(
|
||||
&self,
|
||||
url: &String,
|
||||
) -> Result<Response<T>, Box<dyn std::error::Error>>
|
||||
where
|
||||
T: for<'de> serde::Deserialize<'de>,
|
||||
{
|
||||
println!("{}", url);
|
||||
let api_result = match self.client.get(url).send().await {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Err(Box::new(e)),
|
||||
};
|
||||
|
||||
match api_result.status() {
|
||||
StatusCode::OK => {
|
||||
// TODO: handle errors here
|
||||
let content_type = api_result
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap();
|
||||
|
||||
if content_type.contains("application/json") {
|
||||
match api_result.json::<T>().await {
|
||||
Ok(j) => Ok(Response::Json(j)),
|
||||
Err(e) => return Err(Box::new(e)),
|
||||
}
|
||||
} else if content_type.contains("application/xml") {
|
||||
match api_result.text().await {
|
||||
Ok(x) => Ok(Response::Xml(x)),
|
||||
Err(e) => return Err(Box::new(e)),
|
||||
}
|
||||
} else if content_type.starts_with("text/") {
|
||||
match api_result.text().await {
|
||||
Ok(t) => Ok(Response::Text(t)),
|
||||
Err(e) => return Err(Box::new(e)),
|
||||
}
|
||||
} else {
|
||||
match api_result.bytes().await {
|
||||
Ok(b) => Ok(Response::Bytes(b.to_vec())),
|
||||
Err(e) => Err(Box::new(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
status => Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!("Unexpected status code: {}", status),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
73
backend/task/src/utils/upload.rs
Normal file
73
backend/task/src/utils/upload.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use aws_config::{BehaviorVersion, Region};
|
||||
use aws_sdk_s3::{config::Credentials, Client, Config};
|
||||
use std::env;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct S3ClientConfig {
|
||||
pub access_key: String,
|
||||
secret_key: String,
|
||||
endpoint: String,
|
||||
pub bucket: String,
|
||||
region: String,
|
||||
}
|
||||
|
||||
impl S3ClientConfig {
|
||||
pub fn from_env() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
Ok(S3ClientConfig {
|
||||
access_key: env::var("LINODE_ACCESS_KEY")
|
||||
.map_err(|_| "LINODE_ACCESS_KEY environment variable not set")?,
|
||||
secret_key: env::var("LINODE_SECRET_KEY")
|
||||
.map_err(|_| "LINODE_SECRET_KEY environment variable not set")?,
|
||||
endpoint: env::var("LINODE_ENDPOINT")
|
||||
.unwrap_or_else(|_| "us-ord-1.linodeobjects.com".to_string()),
|
||||
bucket: env::var("LINODE_BUCKET")
|
||||
.map_err(|_| "LINODE_BUCKET environment variable not set")?,
|
||||
region: env::var("LINODE_REGION").unwrap_or_else(|_| "us-ord".to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_s3_client(
|
||||
config: &S3ClientConfig,
|
||||
) -> Result<Client, Box<dyn std::error::Error>> {
|
||||
let credentials = Credentials::new(
|
||||
&config.access_key,
|
||||
&config.secret_key,
|
||||
None,
|
||||
None,
|
||||
"linode-object-storage",
|
||||
);
|
||||
|
||||
let s3_config = Config::builder()
|
||||
.behavior_version(BehaviorVersion::latest())
|
||||
.region(Region::new(config.region.clone()))
|
||||
.endpoint_url(format!("https://{}", config.endpoint))
|
||||
.credentials_provider(credentials)
|
||||
.force_path_style(false)
|
||||
.build();
|
||||
|
||||
Ok(Client::from_conf(s3_config))
|
||||
}
|
||||
|
||||
pub async fn upload(
|
||||
client: &Client,
|
||||
bucket: &str,
|
||||
key: &str,
|
||||
content: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("Uploading to Linode Object Storage...");
|
||||
println!("Bucket: {}", bucket);
|
||||
|
||||
let put_object_req = client
|
||||
.put_object()
|
||||
.bucket(bucket)
|
||||
.key(key)
|
||||
.body(content.as_bytes().to_vec().into())
|
||||
.acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead)
|
||||
.content_type("application/rss+xml")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
println!("Upload successful! ETag: {:?}", put_object_req.e_tag());
|
||||
Ok(())
|
||||
}
|
Reference in New Issue
Block a user