Files
my-website-v2/backend/task/src/tasks/upload_rss.rs

69 lines
1.9 KiB
Rust
Raw Normal View History

2025-06-29 23:41:20 -04:00
use crate::utils::{
request::{Request, Response},
task_log,
2025-06-30 22:58:52 -04:00
};
use cache::KeysInterface;
use storage::services::{aws::S3Client, ObjectStorageClient};
2025-06-29 23:41:20 -04:00
pub fn register(pool: &sqlx::Pool<sqlx::Postgres>, cache: &cache::Pool, s3_client: &S3Client) {
2025-06-29 23:41:20 -04:00
let p = pool.clone();
let c = cache.clone();
let s3 = s3_client.to_owned();
2025-06-29 23:41:20 -04:00
tokio::spawn(async move {
let _ = upload_rss(&p, &c, s3).await;
2025-06-29 23:41:20 -04:00
});
}
async fn upload_rss(
pool: &sqlx::Pool<sqlx::Postgres>,
cache: &cache::Pool,
s3_client: S3Client,
) -> Result<(), Box<dyn std::error::Error>> {
2025-06-29 23:41:20 -04:00
// start task logging
task_log::start(2, pool).await?;
// get request and request the things
let request = Request::new();
let rss_url = format!("{}/posts/rss", request.base_url);
let rss_result = request.request_url::<String>(&rss_url).await.unwrap();
// upload the sucker to obj storage
if let Response::Xml(rss) = rss_result {
let cached: &Option<String> = &cache.get(String::from("rss")).await.unwrap_or(None);
let cache_clone = cache.clone();
if let Some(cached_value) = cached {
if *cached_value == rss {
println!("Response is the same in the cache, exiting");
return Ok(());
}
}
let r = rss.clone();
2025-06-30 22:58:52 -04:00
let _ = s3_client
.put_object(
s3_client.client_config.bucket.as_str(),
2025-06-30 22:58:52 -04:00
"feed.xml",
rss.as_bytes().to_vec(),
)
.await?;
tokio::spawn(async move {
cache_clone
.set::<String, String, &String>(
String::from("rss"),
&r,
Some(cache::Expiration::EX(3600)),
None,
false,
)
.await
.unwrap();
});
2025-06-29 23:41:20 -04:00
println!("Finished uploading RSS feed");
}
Ok(())
}