From 91b18785c3c19812946a2001b44d88bbb6ea784f Mon Sep 17 00:00:00 2001 From: itsscb Date: Mon, 23 Dec 2024 00:04:43 +0100 Subject: [PATCH] challenge: 19 --- ...ce37058ce783b0f269a5d65a7f8f7caa966b8.json | 47 +++++ ...559fe37d45100170b9b81f1014906b95a09fd.json | 20 ++ ...2260f9549a3ae31818a58a1b6b68ebef2d1c1.json | 14 ++ ...7ea855ef6918b1bc38c84d5448111dbd37407.json | 46 +++++ ...ddbbfc3baaf7c6721d8bb26d95f53141500af.json | 48 +++++ ...363d3c53e699c393ce0927c54aca5eac39e77.json | 47 +++++ Cargo.toml | 15 +- docker-compose.yml | 10 + flake.nix | 3 + migrations/20241222182819_creation.down.sql | 2 + migrations/20241222182819_creation.up.sql | 8 + src/lib.rs | 19 +- src/main.rs | 14 ++ src/routes/mod.rs | 5 +- src/routes/task_nineteen/cite.rs | 27 +++ src/routes/task_nineteen/db.rs | 99 +++++++++ src/routes/task_nineteen/draft.rs | 30 +++ src/routes/task_nineteen/list.rs | 96 +++++++++ src/routes/task_nineteen/mod.rs | 39 ++++ src/routes/task_nineteen/remove.rs | 27 +++ src/routes/task_nineteen/reset.rs | 16 ++ src/routes/task_nineteen/undo.rs | 41 ++++ tests/task_nineteen/main.rs | 192 +++++++++++++++++- 23 files changed, 857 insertions(+), 8 deletions(-) create mode 100644 .sqlx/query-3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8.json create mode 100644 .sqlx/query-445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd.json create mode 100644 .sqlx/query-aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1.json create mode 100644 .sqlx/query-b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407.json create mode 100644 .sqlx/query-c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af.json create mode 100644 .sqlx/query-c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77.json create mode 100644 docker-compose.yml create mode 100644 migrations/20241222182819_creation.down.sql create mode 100644 migrations/20241222182819_creation.up.sql create mode 100644 src/routes/task_nineteen/cite.rs create mode 100644 src/routes/task_nineteen/db.rs create mode 100644 src/routes/task_nineteen/draft.rs create mode 100644 src/routes/task_nineteen/list.rs create mode 100644 src/routes/task_nineteen/mod.rs create mode 100644 src/routes/task_nineteen/remove.rs create mode 100644 src/routes/task_nineteen/reset.rs create mode 100644 src/routes/task_nineteen/undo.rs diff --git a/.sqlx/query-3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8.json b/.sqlx/query-3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8.json new file mode 100644 index 0000000..c97fd2a --- /dev/null +++ b/.sqlx/query-3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO quotes (author, quote) VALUES ($1, $2) RETURNING *", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "author", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "quote", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "version", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8" +} diff --git a/.sqlx/query-445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd.json b/.sqlx/query-445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd.json new file mode 100644 index 0000000..2438083 --- /dev/null +++ b/.sqlx/query-445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT COUNT(*) FROM quotes", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd" +} diff --git a/.sqlx/query-aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1.json b/.sqlx/query-aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1.json new file mode 100644 index 0000000..3f8e86f --- /dev/null +++ b/.sqlx/query-aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM quotes WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1" +} diff --git a/.sqlx/query-b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407.json b/.sqlx/query-b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407.json new file mode 100644 index 0000000..7e27ddd --- /dev/null +++ b/.sqlx/query-b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT * FROM quotes WHERE id = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "author", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "quote", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "version", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407" +} diff --git a/.sqlx/query-c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af.json b/.sqlx/query-c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af.json new file mode 100644 index 0000000..b7becc0 --- /dev/null +++ b/.sqlx/query-c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af.json @@ -0,0 +1,48 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE quotes SET author = $2, quote = $3, version = version + 1 WHERE id = $1 RETURNING *", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "author", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "quote", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "version", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af" +} diff --git a/.sqlx/query-c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77.json b/.sqlx/query-c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77.json new file mode 100644 index 0000000..8bdad3b --- /dev/null +++ b/.sqlx/query-c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT * FROM quotes ORDER BY created_at LIMIT $1 OFFSET $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "author", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "quote", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "version", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77" +} diff --git a/Cargo.toml b/Cargo.toml index 7d48d9d..bc66c97 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -axum = { version = "0.7.4", features = ["query"] } +axum = { version = "0.7.4", features = ["query", "macros"] } serde = { version = "1.0.215", features = ["derive"] } serde_json = "1.0.133" shuttle-axum = "0.49.0" @@ -18,12 +18,23 @@ rand = { version = "0.8.5", optional = true } axum-extra = { version = "0.9.6", features = ["cookie"] } chrono = "0.4.39" tracing = "0.1.41" +shuttle-shared-db = { version = "0.49.0", features = [ + "postgres", + "sqlx", +], optional = true } +sqlx = { version = "0.8.2", features = [ + "runtime-tokio", + "uuid", + "chrono", + "migrate", +] } +uuid = { version = "1.11.0", features = ["v4"] } [dev-dependencies] axum-test = "16.4.0" [features] -default = ["jsonwebtoken"] +default = ["shuttle-shared-db"] task1-9 = ["cargo-manifest", "serde_yml", "toml"] task12 = ["rand"] task16 = ["jsonwebtoken"] diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..d7fb371 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,10 @@ +version: '3' +services: + db: + image: postgres:17 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: my_database + ports: + - "5432:5432" diff --git a/flake.nix b/flake.nix index 613f068..53899f6 100644 --- a/flake.nix +++ b/flake.nix @@ -25,6 +25,7 @@ rustToolchain clippy + sqlx-cli cargo-shuttle cargo-edit @@ -36,6 +37,8 @@ ]; shellHook = '' + export SQLX_OFFLINE=true + export DATABASE_URL="postgres://postgres:postgres@localhost:5432/my_database" export PATH=${rustToolchain}/bin:$PATH export RUSTC_VERSION=$(rustc --version) export RUST_SRC_PATH="${rustToolchain}/lib/rustlib/src/rust/library" diff --git a/migrations/20241222182819_creation.down.sql b/migrations/20241222182819_creation.down.sql new file mode 100644 index 0000000..403ab52 --- /dev/null +++ b/migrations/20241222182819_creation.down.sql @@ -0,0 +1,2 @@ +-- Add down migration script here +DROP TABLE IF EXISTS quotes; \ No newline at end of file diff --git a/migrations/20241222182819_creation.up.sql b/migrations/20241222182819_creation.up.sql new file mode 100644 index 0000000..b30a5aa --- /dev/null +++ b/migrations/20241222182819_creation.up.sql @@ -0,0 +1,8 @@ +-- Add up migration script here +CREATE TABLE IF NOT EXISTS quotes ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + author TEXT NOT NULL, + quote TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + version INT NOT NULL DEFAULT 1 +); \ No newline at end of file diff --git a/src/lib.rs b/src/lib.rs index 6582fe4..3166b4e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,7 +1,10 @@ mod routes; +use axum::routing::{delete, put}; +use routes::task_nineteen::{cite, draft, list, remove, reset, undo}; #[cfg(feature = "task12")] use routes::{board, place, random_board, reset, Board}; +#[cfg(feature = "task16")] use routes::{decode, unwrap, wrap}; #[cfg(feature = "task1-9")] use routes::{ @@ -10,7 +13,7 @@ use routes::{ }; #[allow(unused_imports)] -pub fn router() -> axum::Router { +pub fn router(pool: Option) -> axum::Router { use axum::{ routing::{get, post}, Router, @@ -41,8 +44,20 @@ pub fn router() -> axum::Router { .route("/12/random-board", get(random_board)) .with_state(Board::new()); + #[cfg(feature = "task16")] Router::new() .route("/16/wrap", post(wrap)) .route("/16/unwrap", get(unwrap)) - .route("/16/decode", post(decode)) + .route("/16/decode", post(decode)); + + pool.map_or_else(Router::new, |pool| { + Router::new() + .route("/19/reset", post(reset)) + .route("/19/draft", post(draft)) + .route("/19/undo/:id", put(undo)) + .route("/19/remove/:id", delete(remove)) + .route("/19/cite/:id", get(cite)) + .route("/19/list", get(list)) + .with_state(pool) + }) } diff --git a/src/main.rs b/src/main.rs index 325584d..880008e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,6 @@ use itsscb_shuttlings_cch24::router; +#[cfg(feature = "task12")] #[shuttle_runtime::main] #[allow(clippy::unused_async)] async fn main() -> shuttle_axum::ShuttleAxum { @@ -7,3 +8,16 @@ async fn main() -> shuttle_axum::ShuttleAxum { Ok(router.into()) } + +#[shuttle_runtime::main] +#[allow(clippy::unused_async)] +async fn main(#[shuttle_shared_db::Postgres] pool: sqlx::PgPool) -> shuttle_axum::ShuttleAxum { + sqlx::migrate!() + .run(&pool) + .await + .expect("Failed to migrate database"); + + let router = router(Some(pool)); + + Ok(router.into()) +} diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 884e58c..b9607ef 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -3,10 +3,13 @@ mod task_twelve; #[cfg(feature = "task12")] pub use task_twelve::{board, game::Board, place, random_board, reset}; -// #[cfg(feature = "task16")] +#[cfg(feature = "task16")] mod task_sixteen; +#[cfg(feature = "task16")] pub use task_sixteen::{decode, unwrap, wrap}; +pub mod task_nineteen; + #[cfg(feature = "task1-9")] mod hello_bird; diff --git a/src/routes/task_nineteen/cite.rs b/src/routes/task_nineteen/cite.rs new file mode 100644 index 0000000..dbb9d27 --- /dev/null +++ b/src/routes/task_nineteen/cite.rs @@ -0,0 +1,27 @@ +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::IntoResponse, +}; +use tracing::{error, instrument, trace}; + +use super::db; + +#[instrument(skip(pool))] +#[axum::debug_handler] +pub async fn cite(State(pool): State, Path(id): Path) -> impl IntoResponse { + let id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(e) => return (StatusCode::NOT_FOUND, e.to_string()).into_response(), + }; + match db::get(&pool, id).await.map_err(|e| { + error!("Error: {e}"); + (StatusCode::NOT_FOUND, e.to_string()) + }) { + Ok(quote) => { + trace!("{}", quote.to_string()); + (StatusCode::OK, quote.to_string()).into_response() + } + Err((_, message)) => (StatusCode::NOT_FOUND, message).into_response(), + } +} diff --git a/src/routes/task_nineteen/db.rs b/src/routes/task_nineteen/db.rs new file mode 100644 index 0000000..b3a52f9 --- /dev/null +++ b/src/routes/task_nineteen/db.rs @@ -0,0 +1,99 @@ +use super::Quote; + +#[tracing::instrument(skip(pool))] +pub async fn reset_db(pool: &sqlx::PgPool) -> Result<(), sqlx::Error> { + sqlx::query("DELETE FROM quotes") + .execute(pool) + .await + .map(|_| ()) +} + +#[tracing::instrument(skip(pool))] +pub async fn draft(pool: &sqlx::PgPool, author: &str, quote: &str) -> Result { + let quote = sqlx::query_as!( + Quote, + "INSERT INTO quotes (author, quote) VALUES ($1, $2) RETURNING *", + author, + quote + ) + .fetch_one(pool) + .await?; + + Ok(quote) +} + +#[tracing::instrument(skip(pool))] +pub async fn undo( + pool: &sqlx::PgPool, + id: uuid::Uuid, + author: &str, + quote: &str, +) -> Result { + let quote = sqlx::query_as!( + Quote, + "UPDATE quotes SET author = $2, quote = $3, version = version + 1 WHERE id = $1 RETURNING *", + id, + author, + quote + ) + .fetch_one(pool) + .await?; + + Ok(quote) +} + +#[tracing::instrument(skip(pool))] +pub async fn get(pool: &sqlx::PgPool, id: uuid::Uuid) -> Result { + let quote = sqlx::query_as!(Quote, "SELECT * FROM quotes WHERE id = $1", id) + .fetch_one(pool) + .await?; + + Ok(quote) +} + +#[tracing::instrument(skip(pool))] +pub async fn list( + pool: &sqlx::PgPool, + page: Option, +) -> Result<(Vec, u32, Option), sqlx::Error> { + let limit = 3i64; + #[allow(clippy::cast_lossless)] + let offset = page.map_or(0i64, |page| (limit * (page - 1) as i64)); + + let quotes = sqlx::query_as!( + Quote, + "SELECT * FROM quotes ORDER BY created_at LIMIT $1 OFFSET $2", + limit, + offset + ) + .fetch_all(pool) + .await?; + let quotes_count: i64 = sqlx::query_scalar!("SELECT COUNT(*) FROM quotes") + .fetch_one(pool) + .await? + .expect("Failed to get count"); + let page = page.unwrap_or(1); + + #[allow( + clippy::cast_lossless, + clippy::cast_sign_loss, + clippy::cast_possible_truncation + )] + let (page, next_page) = if quotes_count > (page * limit as u32).into() { + (page, Some(page + 1)) + } else { + (page, None) + }; + + Ok((quotes, page, next_page)) +} + +#[tracing::instrument(skip(pool))] +pub async fn remove(pool: &sqlx::PgPool, id: uuid::Uuid) -> Result { + let quote = get(pool, id).await?; + sqlx::query_as!(Quote, "DELETE FROM quotes WHERE id = $1", id) + .execute(pool) + .await?; + + Ok(quote) +} diff --git a/src/routes/task_nineteen/draft.rs b/src/routes/task_nineteen/draft.rs new file mode 100644 index 0000000..448b506 --- /dev/null +++ b/src/routes/task_nineteen/draft.rs @@ -0,0 +1,30 @@ +use axum::{extract::State, http::StatusCode, response::IntoResponse, Json}; +use serde::{Deserialize, Serialize}; +use tracing::{error, info, instrument}; + +use super::db; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DraftRequest { + author: String, + quote: String, +} + +#[instrument(skip(pool))] +pub async fn draft( + State(pool): State, + Json(request): Json, +) -> impl IntoResponse { + match db::draft(&pool, &request.author, &request.quote) + .await + .map_err(|e| { + error!("Error: {e}"); + (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()) + }) { + Ok(quote) => { + info!("{}", quote.to_string()); + (StatusCode::CREATED, quote.to_string()).into_response() + } + Err((status, message)) => (status, message).into_response(), + } +} diff --git a/src/routes/task_nineteen/list.rs b/src/routes/task_nineteen/list.rs new file mode 100644 index 0000000..623d72e --- /dev/null +++ b/src/routes/task_nineteen/list.rs @@ -0,0 +1,96 @@ +use std::{ + collections::HashMap, + fmt::{self, Display, Formatter}, + str::FromStr, +}; + +use axum::{ + extract::{Query, State}, + http::StatusCode, + response::IntoResponse, +}; +use serde::{de, Deserialize, Deserializer, Serialize}; +use tracing::{error, info, instrument}; + +use super::{db, Quote}; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ListResponse { + page: u32, + next_token: Option, + quotes: Vec, +} + +impl ListResponse { + pub fn new(page: u32, next_page: Option, quotes: Vec) -> Self { + let next_token = next_page.map(|page| format!("{page:0>16}")); + Self { + page, + next_token, + quotes, + } + } +} + +impl Display for ListResponse { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!( + f, + "{}", + serde_json::to_string(self).expect("Failed to serialize ListResponse") + ) + } +} + +fn empty_string_as_none<'de, D, T>(de: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: FromStr, + T::Err: fmt::Display, +{ + let opt = Option::::deserialize(de)?; + match opt.as_deref() { + None | Some("") => Ok(None), + Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some), + } +} +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Token(#[serde(default, deserialize_with = "empty_string_as_none")] Option); + +#[instrument(skip(pool))] +#[axum::debug_handler] +pub async fn list( + State(pool): State, + Query(token): Query>, +) -> impl IntoResponse { + let next_page = match token.get("token") { + Some(token) => { + let t = token + .chars() + .last() + .and_then(|c| c.to_string().parse::().ok()); + + if t.is_some() { + t + } else { + return (StatusCode::BAD_REQUEST, "Invalid token".to_string()).into_response(); + } + } + None => None, + }; + + match db::list(&pool, next_page).await.map_err(|e| { + error!("Error: {e}"); + (StatusCode::NOT_FOUND, e.to_string()) + }) { + Ok((quotes, page, next)) => { + let resp = ListResponse::new(page, next, quotes); + info!("{}", resp.to_string()); // Changed from error to info + (StatusCode::OK, resp.to_string()).into_response() + } + Err((code, message)) => { + error!("{}: {}", code, message); + (code, message).into_response() + } + } +} diff --git a/src/routes/task_nineteen/mod.rs b/src/routes/task_nineteen/mod.rs new file mode 100644 index 0000000..b72e1cd --- /dev/null +++ b/src/routes/task_nineteen/mod.rs @@ -0,0 +1,39 @@ +mod cite; +mod db; +mod draft; +mod list; +mod remove; +mod reset; +mod undo; + +use std::fmt::{self, Display, Formatter}; + +pub use cite::cite; +pub use draft::draft; +pub use list::list; +pub use remove::remove; +pub use undo::undo; + +pub use reset::reset; +use serde::{Deserialize, Serialize}; +use sqlx::types::uuid; + +#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)] +pub struct Quote { + id: uuid::Uuid, + author: String, + #[allow(clippy::struct_field_names)] + quote: String, + created_at: chrono::DateTime, + version: i32, +} + +impl Display for Quote { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!( + f, + "{}", + serde_json::to_string(self).expect("Failed to serialize Quote") + ) + } +} diff --git a/src/routes/task_nineteen/remove.rs b/src/routes/task_nineteen/remove.rs new file mode 100644 index 0000000..e4ca18a --- /dev/null +++ b/src/routes/task_nineteen/remove.rs @@ -0,0 +1,27 @@ +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::IntoResponse, +}; +use tracing::{error, info, instrument}; + +use super::db; + +#[instrument(skip(pool))] +#[axum::debug_handler] +pub async fn remove(State(pool): State, Path(id): Path) -> impl IntoResponse { + let id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(e) => return (StatusCode::NOT_FOUND, e.to_string()).into_response(), + }; + match db::remove(&pool, id).await.map_err(|e| { + error!("Error: {e}"); + (StatusCode::NOT_FOUND, e.to_string()) + }) { + Ok(quote) => { + info!("{}", quote.to_string()); + (StatusCode::OK, quote.to_string()).into_response() + } + Err((_, message)) => (StatusCode::NOT_FOUND, message).into_response(), + } +} diff --git a/src/routes/task_nineteen/reset.rs b/src/routes/task_nineteen/reset.rs new file mode 100644 index 0000000..6ffa7dd --- /dev/null +++ b/src/routes/task_nineteen/reset.rs @@ -0,0 +1,16 @@ +use axum::{extract::State, http::StatusCode, response::IntoResponse}; +use tracing::{error, instrument}; + +use super::db; + +#[instrument(skip(pool))] +pub async fn reset(State(pool): State) -> impl IntoResponse { + db::reset_db(&pool) + .await + .map_err(|e| { + error!("Error: {e}"); + (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()) + }) + .unwrap(); + StatusCode::OK +} diff --git a/src/routes/task_nineteen/undo.rs b/src/routes/task_nineteen/undo.rs new file mode 100644 index 0000000..73f47ab --- /dev/null +++ b/src/routes/task_nineteen/undo.rs @@ -0,0 +1,41 @@ +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::IntoResponse, + Json, +}; +use serde::{Deserialize, Serialize}; +use tracing::{error, info, instrument}; + +use super::db; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct UndoRequest { + author: String, + quote: String, +} + +#[instrument(skip(pool))] +#[axum::debug_handler] +pub async fn undo( + State(pool): State, + Path(id): Path, + Json(request): Json, +) -> impl IntoResponse { + let id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(e) => return (StatusCode::BAD_REQUEST, e.to_string()).into_response(), + }; + match db::undo(&pool, id, &request.author, &request.quote) + .await + .map_err(|e| { + error!("Error: {e}"); + (StatusCode::NOT_FOUND, e.to_string()) + }) { + Ok(quote) => { + info!("{}", quote.to_string()); + (StatusCode::OK, quote.to_string()).into_response() + } + Err((_, message)) => (StatusCode::NOT_FOUND, message).into_response(), + } +} diff --git a/tests/task_nineteen/main.rs b/tests/task_nineteen/main.rs index db1fb5f..33afd36 100644 --- a/tests/task_nineteen/main.rs +++ b/tests/task_nineteen/main.rs @@ -1,9 +1,195 @@ -#[cfg(feature = "task19")] +// #[cfg(feature = "task19")] mod task_nineteen { + use axum::http::StatusCode; use axum_test::TestServer; use itsscb_shuttlings_cch24::router; + use serde_json::json; + use sqlx::postgres::PgPoolOptions; - fn test_server() -> TestServer { - TestServer::new(router()).unwrap() + async fn test_server() -> TestServer { + let pool = PgPoolOptions::new() + .max_connections(5) + .connect("postgres://postgres:postgres@localhost/my_database") + .await + .unwrap(); + + sqlx::migrate!() + .run(&pool) + .await + .expect("Failed to migrate database"); + + TestServer::new(router(Some(pool))).unwrap() + } + + #[tokio::test] + async fn test_reset() { + let server = test_server().await; + let response = server.post("/19/reset").await; + response.assert_status_ok(); + } + + #[tokio::test] + async fn test_draft() { + let server = test_server().await; + let response = server + .post("/19/draft") + .text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status(StatusCode::CREATED); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + } + + #[tokio::test] + async fn test_cite() { + let server = test_server().await; + let response = server + .post("/19/draft") + .text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status(StatusCode::CREATED); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + + let id = response.json::()["id"] + .as_str() + .unwrap() + .to_string(); + + let response = server.get(&format!("/19/cite/{id}")).await; + // dbg!(&response); + + response.assert_status_ok(); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + + let response = server.get("/19/cite/asdfasdf").await; + response.assert_status_not_found(); + } + + #[tokio::test] + // #[ignore] + async fn test_list() { + let server = test_server().await; + let response = server + .post("/19/draft") + .text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status(StatusCode::CREATED); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + + let response = server.get("/19/list?token=fadsfasdfasf").await; + dbg!(&response); + + response.assert_status_ok(); + } + + #[tokio::test] + async fn test_remove() { + let server = test_server().await; + let response = server + .post("/19/draft") + .text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status(StatusCode::CREATED); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + + let id = response.json::()["id"] + .as_str() + .unwrap() + .to_string(); + + let response = server.delete(&format!("/19/remove/{id}")).await; + + response.assert_status_ok(); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + + let response = server.delete("/19/remove/asdfasdf").await; + response.assert_status_not_found(); + } + + #[tokio::test] + async fn test_undo() { + let server = test_server().await; + let response = server + .post("/19/draft") + .text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status(StatusCode::CREATED); + + let val = json!({ + "author": "Santa", + "quote": "TEST QUOTE" + }); + response.assert_json_contains(&val); + + let id = response.json::()["id"] + .as_str() + .unwrap() + .to_string(); + let response = server + .put(&format!("/19/undo/{id}")) + .text(r#"{"author":"Santa","quote":"updated TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status_ok(); + + let val = json!({ + "author": "Santa", + "quote": "updated TEST QUOTE" + }); + response.assert_json_contains(&val); + + let response = server + .put(&format!("/19/undo/{id}")) + .text(r#"{"author":"NOT SANTA","quote":""}"#) + .content_type("application/json") + .await; + response.assert_status_ok(); + + let val = json!({ + "author": "NOT SANTA", + "quote": "" + }); + response.assert_json_contains(&val); + + let response = server + .put("/19/undo/asdfasdf") + .text(r#"{"author":"Santa","quote":"updated TEST QUOTE"}"#) + .content_type("application/json") + .await; + response.assert_status_bad_request(); } }