challenge: 19
This commit is contained in:
parent
ff4210d032
commit
91b18785c3
47
.sqlx/query-3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8.json
generated
Normal file
47
.sqlx/query-3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8.json
generated
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO quotes (author, quote) VALUES ($1, $2) RETURNING *",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "quote",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "version",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "3b7ef5200818a0235c556f6745cce37058ce783b0f269a5d65a7f8f7caa966b8"
|
||||
}
|
20
.sqlx/query-445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd.json
generated
Normal file
20
.sqlx/query-445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd.json
generated
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT COUNT(*) FROM quotes",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "445e2280a7a844bce2e925b1607559fe37d45100170b9b81f1014906b95a09fd"
|
||||
}
|
14
.sqlx/query-aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1.json
generated
Normal file
14
.sqlx/query-aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1.json
generated
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "DELETE FROM quotes WHERE id = $1",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "aa85e5321f177323f64b90e9d7d2260f9549a3ae31818a58a1b6b68ebef2d1c1"
|
||||
}
|
46
.sqlx/query-b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407.json
generated
Normal file
46
.sqlx/query-b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407.json
generated
Normal file
@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM quotes WHERE id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "quote",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "version",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b0c4505a0e16eb591480b09ffdf7ea855ef6918b1bc38c84d5448111dbd37407"
|
||||
}
|
48
.sqlx/query-c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af.json
generated
Normal file
48
.sqlx/query-c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af.json
generated
Normal file
@ -0,0 +1,48 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "UPDATE quotes SET author = $2, quote = $3, version = version + 1 WHERE id = $1 RETURNING *",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "quote",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "version",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Text",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "c3c72bb6489000f201b396dfcbcddbbfc3baaf7c6721d8bb26d95f53141500af"
|
||||
}
|
47
.sqlx/query-c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77.json
generated
Normal file
47
.sqlx/query-c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77.json
generated
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT * FROM quotes ORDER BY created_at LIMIT $1 OFFSET $2",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "author",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "quote",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "version",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "c49ca2c648ff4739bee0a110a54363d3c53e699c393ce0927c54aca5eac39e77"
|
||||
}
|
15
Cargo.toml
15
Cargo.toml
@ -4,7 +4,7 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.7.4", features = ["query"] }
|
||||
axum = { version = "0.7.4", features = ["query", "macros"] }
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
shuttle-axum = "0.49.0"
|
||||
@ -18,12 +18,23 @@ rand = { version = "0.8.5", optional = true }
|
||||
axum-extra = { version = "0.9.6", features = ["cookie"] }
|
||||
chrono = "0.4.39"
|
||||
tracing = "0.1.41"
|
||||
shuttle-shared-db = { version = "0.49.0", features = [
|
||||
"postgres",
|
||||
"sqlx",
|
||||
], optional = true }
|
||||
sqlx = { version = "0.8.2", features = [
|
||||
"runtime-tokio",
|
||||
"uuid",
|
||||
"chrono",
|
||||
"migrate",
|
||||
] }
|
||||
uuid = { version = "1.11.0", features = ["v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
axum-test = "16.4.0"
|
||||
|
||||
[features]
|
||||
default = ["jsonwebtoken"]
|
||||
default = ["shuttle-shared-db"]
|
||||
task1-9 = ["cargo-manifest", "serde_yml", "toml"]
|
||||
task12 = ["rand"]
|
||||
task16 = ["jsonwebtoken"]
|
||||
|
10
docker-compose.yml
Normal file
10
docker-compose.yml
Normal file
@ -0,0 +1,10 @@
|
||||
version: '3'
|
||||
services:
|
||||
db:
|
||||
image: postgres:17
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: my_database
|
||||
ports:
|
||||
- "5432:5432"
|
@ -25,6 +25,7 @@
|
||||
rustToolchain
|
||||
clippy
|
||||
|
||||
sqlx-cli
|
||||
|
||||
cargo-shuttle
|
||||
cargo-edit
|
||||
@ -36,6 +37,8 @@
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
export SQLX_OFFLINE=true
|
||||
export DATABASE_URL="postgres://postgres:postgres@localhost:5432/my_database"
|
||||
export PATH=${rustToolchain}/bin:$PATH
|
||||
export RUSTC_VERSION=$(rustc --version)
|
||||
export RUST_SRC_PATH="${rustToolchain}/lib/rustlib/src/rust/library"
|
||||
|
2
migrations/20241222182819_creation.down.sql
Normal file
2
migrations/20241222182819_creation.down.sql
Normal file
@ -0,0 +1,2 @@
|
||||
-- Add down migration script here
|
||||
DROP TABLE IF EXISTS quotes;
|
8
migrations/20241222182819_creation.up.sql
Normal file
8
migrations/20241222182819_creation.up.sql
Normal file
@ -0,0 +1,8 @@
|
||||
-- Add up migration script here
|
||||
CREATE TABLE IF NOT EXISTS quotes (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
author TEXT NOT NULL,
|
||||
quote TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
version INT NOT NULL DEFAULT 1
|
||||
);
|
19
src/lib.rs
19
src/lib.rs
@ -1,7 +1,10 @@
|
||||
mod routes;
|
||||
|
||||
use axum::routing::{delete, put};
|
||||
use routes::task_nineteen::{cite, draft, list, remove, reset, undo};
|
||||
#[cfg(feature = "task12")]
|
||||
use routes::{board, place, random_board, reset, Board};
|
||||
#[cfg(feature = "task16")]
|
||||
use routes::{decode, unwrap, wrap};
|
||||
#[cfg(feature = "task1-9")]
|
||||
use routes::{
|
||||
@ -10,7 +13,7 @@ use routes::{
|
||||
};
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub fn router() -> axum::Router {
|
||||
pub fn router(pool: Option<sqlx::PgPool>) -> axum::Router {
|
||||
use axum::{
|
||||
routing::{get, post},
|
||||
Router,
|
||||
@ -41,8 +44,20 @@ pub fn router() -> axum::Router {
|
||||
.route("/12/random-board", get(random_board))
|
||||
.with_state(Board::new());
|
||||
|
||||
#[cfg(feature = "task16")]
|
||||
Router::new()
|
||||
.route("/16/wrap", post(wrap))
|
||||
.route("/16/unwrap", get(unwrap))
|
||||
.route("/16/decode", post(decode))
|
||||
.route("/16/decode", post(decode));
|
||||
|
||||
pool.map_or_else(Router::new, |pool| {
|
||||
Router::new()
|
||||
.route("/19/reset", post(reset))
|
||||
.route("/19/draft", post(draft))
|
||||
.route("/19/undo/:id", put(undo))
|
||||
.route("/19/remove/:id", delete(remove))
|
||||
.route("/19/cite/:id", get(cite))
|
||||
.route("/19/list", get(list))
|
||||
.with_state(pool)
|
||||
})
|
||||
}
|
||||
|
14
src/main.rs
14
src/main.rs
@ -1,5 +1,6 @@
|
||||
use itsscb_shuttlings_cch24::router;
|
||||
|
||||
#[cfg(feature = "task12")]
|
||||
#[shuttle_runtime::main]
|
||||
#[allow(clippy::unused_async)]
|
||||
async fn main() -> shuttle_axum::ShuttleAxum {
|
||||
@ -7,3 +8,16 @@ async fn main() -> shuttle_axum::ShuttleAxum {
|
||||
|
||||
Ok(router.into())
|
||||
}
|
||||
|
||||
#[shuttle_runtime::main]
|
||||
#[allow(clippy::unused_async)]
|
||||
async fn main(#[shuttle_shared_db::Postgres] pool: sqlx::PgPool) -> shuttle_axum::ShuttleAxum {
|
||||
sqlx::migrate!()
|
||||
.run(&pool)
|
||||
.await
|
||||
.expect("Failed to migrate database");
|
||||
|
||||
let router = router(Some(pool));
|
||||
|
||||
Ok(router.into())
|
||||
}
|
||||
|
@ -3,10 +3,13 @@ mod task_twelve;
|
||||
#[cfg(feature = "task12")]
|
||||
pub use task_twelve::{board, game::Board, place, random_board, reset};
|
||||
|
||||
// #[cfg(feature = "task16")]
|
||||
#[cfg(feature = "task16")]
|
||||
mod task_sixteen;
|
||||
#[cfg(feature = "task16")]
|
||||
pub use task_sixteen::{decode, unwrap, wrap};
|
||||
|
||||
pub mod task_nineteen;
|
||||
|
||||
#[cfg(feature = "task1-9")]
|
||||
mod hello_bird;
|
||||
|
||||
|
27
src/routes/task_nineteen/cite.rs
Normal file
27
src/routes/task_nineteen/cite.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use tracing::{error, instrument, trace};
|
||||
|
||||
use super::db;
|
||||
|
||||
#[instrument(skip(pool))]
|
||||
#[axum::debug_handler]
|
||||
pub async fn cite(State(pool): State<sqlx::PgPool>, Path(id): Path<String>) -> impl IntoResponse {
|
||||
let id = match uuid::Uuid::parse_str(&id) {
|
||||
Ok(id) => id,
|
||||
Err(e) => return (StatusCode::NOT_FOUND, e.to_string()).into_response(),
|
||||
};
|
||||
match db::get(&pool, id).await.map_err(|e| {
|
||||
error!("Error: {e}");
|
||||
(StatusCode::NOT_FOUND, e.to_string())
|
||||
}) {
|
||||
Ok(quote) => {
|
||||
trace!("{}", quote.to_string());
|
||||
(StatusCode::OK, quote.to_string()).into_response()
|
||||
}
|
||||
Err((_, message)) => (StatusCode::NOT_FOUND, message).into_response(),
|
||||
}
|
||||
}
|
99
src/routes/task_nineteen/db.rs
Normal file
99
src/routes/task_nineteen/db.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use super::Quote;
|
||||
|
||||
#[tracing::instrument(skip(pool))]
|
||||
pub async fn reset_db(pool: &sqlx::PgPool) -> Result<(), sqlx::Error> {
|
||||
sqlx::query("DELETE FROM quotes")
|
||||
.execute(pool)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(pool))]
|
||||
pub async fn draft(pool: &sqlx::PgPool, author: &str, quote: &str) -> Result<Quote, sqlx::Error> {
|
||||
let quote = sqlx::query_as!(
|
||||
Quote,
|
||||
"INSERT INTO quotes (author, quote) VALUES ($1, $2) RETURNING *",
|
||||
author,
|
||||
quote
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
Ok(quote)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(pool))]
|
||||
pub async fn undo(
|
||||
pool: &sqlx::PgPool,
|
||||
id: uuid::Uuid,
|
||||
author: &str,
|
||||
quote: &str,
|
||||
) -> Result<Quote, sqlx::Error> {
|
||||
let quote = sqlx::query_as!(
|
||||
Quote,
|
||||
"UPDATE quotes SET author = $2, quote = $3, version = version + 1 WHERE id = $1 RETURNING *",
|
||||
id,
|
||||
author,
|
||||
quote
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
Ok(quote)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(pool))]
|
||||
pub async fn get(pool: &sqlx::PgPool, id: uuid::Uuid) -> Result<Quote, sqlx::Error> {
|
||||
let quote = sqlx::query_as!(Quote, "SELECT * FROM quotes WHERE id = $1", id)
|
||||
.fetch_one(pool)
|
||||
.await?;
|
||||
|
||||
Ok(quote)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(pool))]
|
||||
pub async fn list(
|
||||
pool: &sqlx::PgPool,
|
||||
page: Option<u32>,
|
||||
) -> Result<(Vec<Quote>, u32, Option<u32>), sqlx::Error> {
|
||||
let limit = 3i64;
|
||||
#[allow(clippy::cast_lossless)]
|
||||
let offset = page.map_or(0i64, |page| (limit * (page - 1) as i64));
|
||||
|
||||
let quotes = sqlx::query_as!(
|
||||
Quote,
|
||||
"SELECT * FROM quotes ORDER BY created_at LIMIT $1 OFFSET $2",
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
let quotes_count: i64 = sqlx::query_scalar!("SELECT COUNT(*) FROM quotes")
|
||||
.fetch_one(pool)
|
||||
.await?
|
||||
.expect("Failed to get count");
|
||||
let page = page.unwrap_or(1);
|
||||
|
||||
#[allow(
|
||||
clippy::cast_lossless,
|
||||
clippy::cast_sign_loss,
|
||||
clippy::cast_possible_truncation
|
||||
)]
|
||||
let (page, next_page) = if quotes_count > (page * limit as u32).into() {
|
||||
(page, Some(page + 1))
|
||||
} else {
|
||||
(page, None)
|
||||
};
|
||||
|
||||
Ok((quotes, page, next_page))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(pool))]
|
||||
pub async fn remove(pool: &sqlx::PgPool, id: uuid::Uuid) -> Result<Quote, sqlx::Error> {
|
||||
let quote = get(pool, id).await?;
|
||||
sqlx::query_as!(Quote, "DELETE FROM quotes WHERE id = $1", id)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
Ok(quote)
|
||||
}
|
30
src/routes/task_nineteen/draft.rs
Normal file
30
src/routes/task_nineteen/draft.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use axum::{extract::State, http::StatusCode, response::IntoResponse, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{error, info, instrument};
|
||||
|
||||
use super::db;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct DraftRequest {
|
||||
author: String,
|
||||
quote: String,
|
||||
}
|
||||
|
||||
#[instrument(skip(pool))]
|
||||
pub async fn draft(
|
||||
State(pool): State<sqlx::PgPool>,
|
||||
Json(request): Json<DraftRequest>,
|
||||
) -> impl IntoResponse {
|
||||
match db::draft(&pool, &request.author, &request.quote)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!("Error: {e}");
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, e.to_string())
|
||||
}) {
|
||||
Ok(quote) => {
|
||||
info!("{}", quote.to_string());
|
||||
(StatusCode::CREATED, quote.to_string()).into_response()
|
||||
}
|
||||
Err((status, message)) => (status, message).into_response(),
|
||||
}
|
||||
}
|
96
src/routes/task_nineteen/list.rs
Normal file
96
src/routes/task_nineteen/list.rs
Normal file
@ -0,0 +1,96 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fmt::{self, Display, Formatter},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use serde::{de, Deserialize, Deserializer, Serialize};
|
||||
use tracing::{error, info, instrument};
|
||||
|
||||
use super::{db, Quote};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct ListResponse {
|
||||
page: u32,
|
||||
next_token: Option<String>,
|
||||
quotes: Vec<Quote>,
|
||||
}
|
||||
|
||||
impl ListResponse {
|
||||
pub fn new(page: u32, next_page: Option<u32>, quotes: Vec<Quote>) -> Self {
|
||||
let next_token = next_page.map(|page| format!("{page:0>16}"));
|
||||
Self {
|
||||
page,
|
||||
next_token,
|
||||
quotes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ListResponse {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::to_string(self).expect("Failed to serialize ListResponse")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
T: FromStr,
|
||||
T::Err: fmt::Display,
|
||||
{
|
||||
let opt = Option::<String>::deserialize(de)?;
|
||||
match opt.as_deref() {
|
||||
None | Some("") => Ok(None),
|
||||
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Token(#[serde(default, deserialize_with = "empty_string_as_none")] Option<String>);
|
||||
|
||||
#[instrument(skip(pool))]
|
||||
#[axum::debug_handler]
|
||||
pub async fn list(
|
||||
State(pool): State<sqlx::PgPool>,
|
||||
Query(token): Query<HashMap<String, String>>,
|
||||
) -> impl IntoResponse {
|
||||
let next_page = match token.get("token") {
|
||||
Some(token) => {
|
||||
let t = token
|
||||
.chars()
|
||||
.last()
|
||||
.and_then(|c| c.to_string().parse::<u32>().ok());
|
||||
|
||||
if t.is_some() {
|
||||
t
|
||||
} else {
|
||||
return (StatusCode::BAD_REQUEST, "Invalid token".to_string()).into_response();
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
match db::list(&pool, next_page).await.map_err(|e| {
|
||||
error!("Error: {e}");
|
||||
(StatusCode::NOT_FOUND, e.to_string())
|
||||
}) {
|
||||
Ok((quotes, page, next)) => {
|
||||
let resp = ListResponse::new(page, next, quotes);
|
||||
info!("{}", resp.to_string()); // Changed from error to info
|
||||
(StatusCode::OK, resp.to_string()).into_response()
|
||||
}
|
||||
Err((code, message)) => {
|
||||
error!("{}: {}", code, message);
|
||||
(code, message).into_response()
|
||||
}
|
||||
}
|
||||
}
|
39
src/routes/task_nineteen/mod.rs
Normal file
39
src/routes/task_nineteen/mod.rs
Normal file
@ -0,0 +1,39 @@
|
||||
mod cite;
|
||||
mod db;
|
||||
mod draft;
|
||||
mod list;
|
||||
mod remove;
|
||||
mod reset;
|
||||
mod undo;
|
||||
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
pub use cite::cite;
|
||||
pub use draft::draft;
|
||||
pub use list::list;
|
||||
pub use remove::remove;
|
||||
pub use undo::undo;
|
||||
|
||||
pub use reset::reset;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::types::uuid;
|
||||
|
||||
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
|
||||
pub struct Quote {
|
||||
id: uuid::Uuid,
|
||||
author: String,
|
||||
#[allow(clippy::struct_field_names)]
|
||||
quote: String,
|
||||
created_at: chrono::DateTime<chrono::Utc>,
|
||||
version: i32,
|
||||
}
|
||||
|
||||
impl Display for Quote {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
serde_json::to_string(self).expect("Failed to serialize Quote")
|
||||
)
|
||||
}
|
||||
}
|
27
src/routes/task_nineteen/remove.rs
Normal file
27
src/routes/task_nineteen/remove.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use tracing::{error, info, instrument};
|
||||
|
||||
use super::db;
|
||||
|
||||
#[instrument(skip(pool))]
|
||||
#[axum::debug_handler]
|
||||
pub async fn remove(State(pool): State<sqlx::PgPool>, Path(id): Path<String>) -> impl IntoResponse {
|
||||
let id = match uuid::Uuid::parse_str(&id) {
|
||||
Ok(id) => id,
|
||||
Err(e) => return (StatusCode::NOT_FOUND, e.to_string()).into_response(),
|
||||
};
|
||||
match db::remove(&pool, id).await.map_err(|e| {
|
||||
error!("Error: {e}");
|
||||
(StatusCode::NOT_FOUND, e.to_string())
|
||||
}) {
|
||||
Ok(quote) => {
|
||||
info!("{}", quote.to_string());
|
||||
(StatusCode::OK, quote.to_string()).into_response()
|
||||
}
|
||||
Err((_, message)) => (StatusCode::NOT_FOUND, message).into_response(),
|
||||
}
|
||||
}
|
16
src/routes/task_nineteen/reset.rs
Normal file
16
src/routes/task_nineteen/reset.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use axum::{extract::State, http::StatusCode, response::IntoResponse};
|
||||
use tracing::{error, instrument};
|
||||
|
||||
use super::db;
|
||||
|
||||
#[instrument(skip(pool))]
|
||||
pub async fn reset(State(pool): State<sqlx::PgPool>) -> impl IntoResponse {
|
||||
db::reset_db(&pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!("Error: {e}");
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, e.to_string())
|
||||
})
|
||||
.unwrap();
|
||||
StatusCode::OK
|
||||
}
|
41
src/routes/task_nineteen/undo.rs
Normal file
41
src/routes/task_nineteen/undo.rs
Normal file
@ -0,0 +1,41 @@
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
Json,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{error, info, instrument};
|
||||
|
||||
use super::db;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct UndoRequest {
|
||||
author: String,
|
||||
quote: String,
|
||||
}
|
||||
|
||||
#[instrument(skip(pool))]
|
||||
#[axum::debug_handler]
|
||||
pub async fn undo(
|
||||
State(pool): State<sqlx::PgPool>,
|
||||
Path(id): Path<String>,
|
||||
Json(request): Json<UndoRequest>,
|
||||
) -> impl IntoResponse {
|
||||
let id = match uuid::Uuid::parse_str(&id) {
|
||||
Ok(id) => id,
|
||||
Err(e) => return (StatusCode::BAD_REQUEST, e.to_string()).into_response(),
|
||||
};
|
||||
match db::undo(&pool, id, &request.author, &request.quote)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!("Error: {e}");
|
||||
(StatusCode::NOT_FOUND, e.to_string())
|
||||
}) {
|
||||
Ok(quote) => {
|
||||
info!("{}", quote.to_string());
|
||||
(StatusCode::OK, quote.to_string()).into_response()
|
||||
}
|
||||
Err((_, message)) => (StatusCode::NOT_FOUND, message).into_response(),
|
||||
}
|
||||
}
|
@ -1,9 +1,195 @@
|
||||
#[cfg(feature = "task19")]
|
||||
// #[cfg(feature = "task19")]
|
||||
mod task_nineteen {
|
||||
use axum::http::StatusCode;
|
||||
use axum_test::TestServer;
|
||||
use itsscb_shuttlings_cch24::router;
|
||||
use serde_json::json;
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
|
||||
fn test_server() -> TestServer {
|
||||
TestServer::new(router()).unwrap()
|
||||
async fn test_server() -> TestServer {
|
||||
let pool = PgPoolOptions::new()
|
||||
.max_connections(5)
|
||||
.connect("postgres://postgres:postgres@localhost/my_database")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sqlx::migrate!()
|
||||
.run(&pool)
|
||||
.await
|
||||
.expect("Failed to migrate database");
|
||||
|
||||
TestServer::new(router(Some(pool))).unwrap()
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_reset() {
|
||||
let server = test_server().await;
|
||||
let response = server.post("/19/reset").await;
|
||||
response.assert_status_ok();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_draft() {
|
||||
let server = test_server().await;
|
||||
let response = server
|
||||
.post("/19/draft")
|
||||
.text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status(StatusCode::CREATED);
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cite() {
|
||||
let server = test_server().await;
|
||||
let response = server
|
||||
.post("/19/draft")
|
||||
.text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status(StatusCode::CREATED);
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let id = response.json::<serde_json::Value>()["id"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let response = server.get(&format!("/19/cite/{id}")).await;
|
||||
// dbg!(&response);
|
||||
|
||||
response.assert_status_ok();
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let response = server.get("/19/cite/asdfasdf").await;
|
||||
response.assert_status_not_found();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
// #[ignore]
|
||||
async fn test_list() {
|
||||
let server = test_server().await;
|
||||
let response = server
|
||||
.post("/19/draft")
|
||||
.text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status(StatusCode::CREATED);
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let response = server.get("/19/list?token=fadsfasdfasf").await;
|
||||
dbg!(&response);
|
||||
|
||||
response.assert_status_ok();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_remove() {
|
||||
let server = test_server().await;
|
||||
let response = server
|
||||
.post("/19/draft")
|
||||
.text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status(StatusCode::CREATED);
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let id = response.json::<serde_json::Value>()["id"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let response = server.delete(&format!("/19/remove/{id}")).await;
|
||||
|
||||
response.assert_status_ok();
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let response = server.delete("/19/remove/asdfasdf").await;
|
||||
response.assert_status_not_found();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_undo() {
|
||||
let server = test_server().await;
|
||||
let response = server
|
||||
.post("/19/draft")
|
||||
.text(r#"{"author":"Santa","quote":"TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status(StatusCode::CREATED);
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let id = response.json::<serde_json::Value>()["id"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
let response = server
|
||||
.put(&format!("/19/undo/{id}"))
|
||||
.text(r#"{"author":"Santa","quote":"updated TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status_ok();
|
||||
|
||||
let val = json!({
|
||||
"author": "Santa",
|
||||
"quote": "updated TEST QUOTE"
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let response = server
|
||||
.put(&format!("/19/undo/{id}"))
|
||||
.text(r#"{"author":"NOT SANTA","quote":""}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status_ok();
|
||||
|
||||
let val = json!({
|
||||
"author": "NOT SANTA",
|
||||
"quote": ""
|
||||
});
|
||||
response.assert_json_contains(&val);
|
||||
|
||||
let response = server
|
||||
.put("/19/undo/asdfasdf")
|
||||
.text(r#"{"author":"Santa","quote":"updated TEST QUOTE"}"#)
|
||||
.content_type("application/json")
|
||||
.await;
|
||||
response.assert_status_bad_request();
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user