From 8678106283b871e3b962af8e91a2e5a1053fd11e Mon Sep 17 00:00:00 2001 From: David Westgate Date: Mon, 27 May 2024 18:14:12 -0700 Subject: [PATCH] checkpoint for postgres. 70% there --- .cargo/config.toml | 1 - src/api.rs | 74 ++++++++++++------ src/json_store.rs | 178 -------------------------------------------- src/main.rs | 37 ++++----- src/pg_store.rs | 176 +++++++++++++++++++++++++++++++++++++++++++ src/question.rs | 52 ++++++------- src/question_tag.rs | 16 ++++ src/tag.rs | 13 ++++ 8 files changed, 294 insertions(+), 253 deletions(-) delete mode 100644 src/json_store.rs create mode 100644 src/pg_store.rs create mode 100644 src/question_tag.rs create mode 100644 src/tag.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index 0a40fa3..c4b7457 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -2,4 +2,3 @@ POSTGRES_USER = "postgres" POSTGRES_PASSWORD = "DB_PASS_HERE" POSTGRES_HOST = "localhost" -POSTGRES_DBNAME = "questionanswer" \ No newline at end of file diff --git a/src/api.rs b/src/api.rs index fe7a6bf..5ad73bb 100644 --- a/src/api.rs +++ b/src/api.rs @@ -1,10 +1,14 @@ /// All API route handlers of the application -use self::{answer::AnswerDTO, question::QuestionDTO, json_store::Store}; +use self::{ + answer::AnswerDTO, + pg_store::Store, + question::{NewQuestion, Question, QuestionDTO}, +}; use crate::*; const DEFAULT_PAGE: usize = 0; const DEFAULT_PAGE_SIZE: usize = 10; -/// Fetches a single question using a provided id +/// Fetches a single question using a provided id. First we fetch the question, then the questions tags, then build that into a response DTO /// # Parameters /// `id`: Path parmater Id of the question to lookup /// # Returns @@ -14,13 +18,24 @@ pub async fn read_question( State(store): State>>, Path(id): Path, ) -> Response { - match store.read().await.fetch_one_question(id) { - Ok(question) => question.to_dto(id).into_response(), - Err(e) => (StatusCode::NOT_FOUND, e).into_response(), + match store.read().await.fetch_one_question_by_id(id).await { + Some(question) => { + // We could do this in one DB hit with a purpose built store function and different underlying structures, but two hits isn't that bad. + let tags = store + .read() + .await + .fetch_tags_by_property(question.id, "question_id") + .await + .unwrap_or(vec![]); + let response_dto: QuestionDTO = QuestionDTO::new(question, tags); + response_dto.into_response() + } + None => (StatusCode::NOT_FOUND).into_response(), } } /// Fetches all questions +/// Load the questions, then load the tags for each question and build the json response /// # Parameters /// 'page' and 'size' as query paramateres /// # Returns @@ -32,11 +47,25 @@ pub async fn read_questions( let page: usize = pagination.page.unwrap_or(DEFAULT_PAGE); let size: usize = pagination.size.unwrap_or(DEFAULT_PAGE_SIZE); let start: usize = page * size; - ( - StatusCode::OK, - Json(store.read().await.fetch_many_questions(start, size)), - ) - .into_response() + let questions_option = store.read().await.fetch_many_questions(start, size).await; + match questions_option { + Some(questions) => { + let mut response_vec_dto: Vec = vec![]; + for question in questions { + //Not ideal - hitting the database serially for the tags of each invidual question. Can be optimized with more complex sql + let tags = store + .read() + .await + .fetch_tags_by_property(question.id, "question_id") + .await + .unwrap_or(vec![]); + let question_dto: QuestionDTO = QuestionDTO::new(question, tags); + response_vec_dto.push(question_dto) + } + (StatusCode::OK, Json(response_vec_dto)).into_response() + } + None => StatusCode::NO_CONTENT.into_response(), + } } /// Creates a new question @@ -48,19 +77,21 @@ pub async fn read_questions( /// Http Unprocessable Entity 422 is returned (implicitlly) for a malformed body pub async fn create_question( State(store): State>>, - Json(question_dto): Json, + Json(new_question): Json, ) -> Response { - //Normally, the server should generate the id, user provided id's (and the whole request) should be rejected. - //QuestionDTO id then would be an option, but that makes to/from entity conversion more tricky.. todo - let (id, question) = question_dto.to_entity(); - match store.write().await.add_question(id, question) { - Ok(question) => (StatusCode::CREATED, Json(&question.to_dto(id))).into_response(), + //Step 1: See which of supplied tags already exists + //Step 2: Create the non-existing tags + //Step 3: Create the question + //Step 4: Create the join listing + //Step 5: Return result + //Step 6: Ponder the use of ORMs + match store.write().await.add_question(new_question).await { + Ok(question) => (StatusCode::CREATED, Json(&question)).into_response(), Err(e) => (StatusCode::CONFLICT, e).into_response(), } } /// Updates an existing question -/// At present, questions cannot be 'partially updated' - all fields must be included /// # Parameters /// `QuestionDTO` A JSON representation of a Question including its id /// # Returns @@ -69,11 +100,10 @@ pub async fn create_question( /// Http Unprocessable Entity 422 is returned (implicitlly) for a malformed body pub async fn update_question( State(store): State>>, - Json(question_dto): Json, + Json(question): Json, ) -> Response { - let (id, question) = question_dto.to_entity(); - match store.write().await.update_question(id, question) { - Ok(question) => question.to_dto(id).into_response(), + match store.write().await.update_question(question) { + Ok(question) => question.into_response(), Err(e) => (StatusCode::NOT_FOUND, e).into_response(), } } @@ -89,7 +119,7 @@ pub async fn delete_question( Path(id): Path, ) -> Response { match store.write().await.remove_question(id) { - Ok(question) => question.to_dto(id).into_response(), + Ok(question) => question.into_response(), Err(e) => (StatusCode::NOT_FOUND, e).into_response(), } } diff --git a/src/json_store.rs b/src/json_store.rs deleted file mode 100644 index 92769bf..0000000 --- a/src/json_store.rs +++ /dev/null @@ -1,178 +0,0 @@ -/// Store is responsible for manageing the in-memory hashmap of questions by providing initialization read/write functions, -/// and file I/O operations to persist these questions -/// TODO - Results returning errors should use specified types, not strings -use self::{ - answer::{Answer, AnswerDTO}, - question::{Question, QuestionDTO}, -}; -use crate::*; - -const QUESTIONS_DB_PATH: &str = "./questions.json"; -const ANSWERS_DB_PATH: &str = "./answers.json"; - -#[derive(Debug)] -pub struct Store { - answers_file: File, - questions_file: File, - answers: HashMap, - questions: HashMap, -} - -impl Store { - // Upon initialization, we need to read a questions.json ans anwers.json if they exist and populate our hashmaps from them. - // Otherwise we create both files. - // JSON formatting and I/O errors possible here are semi-handled with a message, but ultimetly we will panic in those cases - // TODO - make this less copy/paste like - pub fn new() -> Self { - let questions_file: File = File::create_new(QUESTIONS_DB_PATH) - .or_else(|e| { - if e.kind() == ErrorKind::AlreadyExists { - File::options() - .read(true) - .write(true) - .open(QUESTIONS_DB_PATH) - } else { - Err(e) - } - }) - .unwrap(); - let questions_json = std::io::read_to_string(&questions_file) - .expect("could not get json from questions file"); - // perhaps there is a more efficient/clever way aside from reading the json to a vector and mapping the vector to a hashmap. - let questions_vec: Vec = - serde_json::from_str(&questions_json).expect("can't read questions.json"); - let questions: HashMap = questions_vec - .into_iter() - .map(|question_dto: QuestionDTO| question_dto.to_entity()) - .collect(); - - let answers_file: File = File::create_new(ANSWERS_DB_PATH) - .or_else(|e| { - if e.kind() == ErrorKind::AlreadyExists { - File::options().read(true).write(true).open(ANSWERS_DB_PATH) - } else { - Err(e) - } - }) - .unwrap(); - let answers_json = - std::io::read_to_string(&answers_file).expect("could not get json from answers file"); - let answers_vec: Vec = - serde_json::from_str(&answers_json).expect("can't read answers.json"); - let answers: HashMap = answers_vec - .into_iter() - .map(|answer_dto: AnswerDTO| answer_dto.to_entity()) - .collect(); - - Store { - questions, - answers, - answers_file, - questions_file, - } - } - - // Take the content of the questions hashmap, convert it to a vector of question DTOs and overwrite the file with these contents - // Not the most efficient approach if we are just adding or deleting a single question, but it does the job at our current scale - // TODO - pretty print before writing - fn write_questions_file(&mut self) { - let questions: &HashMap = &self.questions; - let questions_vec: Vec = questions - .iter() - .map(|q: (&u8, &Question)| q.1.to_dto(*q.0)) - .collect(); - let json: String = serde_json::to_string(&questions_vec).unwrap(); - let mut f: &File = &self.questions_file; - match f - .rewind() - .and(f.write_all(json.as_bytes())) - .and(f.sync_all()) - .and(f.set_len(f.stream_position().unwrap())) - { - Ok(()) => (), - _ => panic!("Could not write file"), - } - } - - // Take the content of the answers hashmap, convert it to a vector of answer DTOs and overwrite the file with these contents - fn write_answers_file(&mut self) { - let answers: &HashMap = &self.answers; - let answers_vec: Vec = answers - .iter() - .map(|q: (&u8, &Answer)| q.1.to_dto(*q.0)) - .collect(); - let json: String = serde_json::to_string(&answers_vec).unwrap(); - let mut f: &File = &self.answers_file; - match f - .rewind() - .and(f.write_all(json.as_bytes())) - .and(f.sync_all()) - .and(f.set_len(f.stream_position().unwrap())) - { - Ok(()) => (), - _ => panic!("Could not write file"), - } - } - - pub fn add_question(&mut self, id: u8, question: Question) -> Result { - if self.questions.contains_key(&id) { - return Err(format!("Question with id {} already exists", id)); - } - match self.questions.insert(id, question.clone()) { - None => { - self.write_questions_file(); - Ok(question) - } //Looks backwards, but insert must return none since key cannot already exist - _ => Err("Server Error".to_string()), - } - } - pub fn remove_question(&mut self, id: u8) -> Result { - match self.questions.remove(&id) { - Some(question) => { - self.write_questions_file(); - Ok(question) - } - None => Err(format!("Question with id {} does not exist", id)), - } - } - pub fn fetch_one_question(&self, id: u8) -> Result { - match self.questions.get(&id) { - Some(question) => Ok(question.clone()), - None => Err(format!("Question with id {} does not exists", id)), - } - } - //by nature of the hashmap, pagination does not follow id order - pub fn fetch_many_questions(&self, start: usize, size: usize) -> Vec { - self.questions - .iter() - .map(|q| q.1.to_dto(*q.0)) - .skip(start) - .take(size) - .collect() - } - pub fn update_question(&mut self, id: u8, question: Question) -> Result { - if !self.questions.contains_key(&id) { - return Err(format!("Question with id {} does not exists", id)); - } - match self.questions.insert(id, question) { - Some(question) => { - self.write_questions_file(); - Ok(question) - } - None => Err("Server Error".to_string()), - } - } - - pub fn add_answer(&mut self, id: u8, answer: Answer) -> Result { - if self.answers.contains_key(&id) { - return Err(format!("Answer with id {} already exists", id)); - } - match self.answers.insert(id, answer.clone()) { - None => { - self.write_answers_file(); - Ok(answer) - } - _ => Err("Server Error".to_string()), - } - } -} diff --git a/src/main.rs b/src/main.rs index 9390cd5..8472092 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,9 @@ mod answer; mod api; +mod pg_store; mod question; -mod json_store; +mod question_tag; +mod tag; use axum::{ extract::{Path, Query, State}, http::{StatusCode, Uri}, @@ -12,19 +14,10 @@ use axum::{ use serde::{Deserialize, Serialize}; use sqlx::{ self, - postgres::{PgPool, Postgres}, - Pool, + postgres::{PgPool, PgPoolOptions, PgRow, Postgres}, + Pool, Row, }; -use std::{ - collections::HashMap, - env::var, - fs::File, - io::{ErrorKind, Read, Seek, Write}, - net::SocketAddr, - path::PathBuf, - sync::Arc, -}; -use json_store::Store; +use std::{env::var, fs::File, io::Read, net::SocketAddr, path::PathBuf, sync::Arc}; use tokio::sync::RwLock; // generic handler for any not supported route/method combination @@ -54,20 +47,21 @@ async fn serve_file(uri: Uri) -> impl IntoResponse { } } -async fn db_connection() -> Result, sqlx::Error> { - let url: String = format!( - "postgres://{}:{}@{}:5432"/*{}*/, +//We could create/select a particular database with the URL path, but this would complicate setup, add another env, and provides little value +fn db_string() -> String { + format!( + "postgres://{}:{}@{}:5432", var("POSTGRES_USER").unwrap(), var("POSTGRES_PASSWORD").unwrap(), var("POSTGRES_HOST").unwrap(), - // var("POSTGRES_DBNAME").unwrap() - ); - PgPool::connect(&url).await + ) } #[tokio::main] async fn main() { - let store: Arc> = Arc::new(RwLock::new(json_store::Store::new())); + let db_url: String = db_string(); + let store: Arc> = + Arc::new(RwLock::new(pg_store::Store::new(&db_url).await)); let ip: SocketAddr = SocketAddr::new([127, 0, 0, 1].into(), 3000); let listener: tokio::net::TcpListener = tokio::net::TcpListener::bind(ip).await.unwrap(); let apis = Router::new() @@ -83,7 +77,6 @@ async fn main() { .route("/", get(serve_file)) .route("/*path", get(serve_file)) .fallback(handler_404); - let db: Pool = db_connection().await.unwrap(); - sqlx::migrate!().run(&db).await.unwrap(); + axum::serve(listener, app).await.unwrap(); } diff --git a/src/pg_store.rs b/src/pg_store.rs new file mode 100644 index 0000000..556eb58 --- /dev/null +++ b/src/pg_store.rs @@ -0,0 +1,176 @@ +/// Store is responsible for manageing the in-memory hashmap of questions by providing initialization read/write functions, +/// and file I/O operations to persist these questions +/// TODO - Results returning errors should use specified types, not strings +use self::{ + answer::Answer, + question::{NewQuestion, Question}, + question_tag::QuestionTag, + tag::Tag, +}; +use crate::*; + +#[derive(Debug)] +pub struct Store { + pub connection: PgPool, +} + +impl Store { + /// Helper to deal with unwrapping postgres fields as u8 which we want. Not gracefull if you pass in the wrong field name - so do not do that + fn id_to_u8(pg_row: &PgRow, id_name: &str) -> u8 { + pg_row.try_get::(id_name).unwrap() as u8 + } + + pub async fn new(db_url: &str) -> Self { + let db_pool: Pool = match PgPoolOptions::new() + .max_connections(5) + .connect(db_url) + .await + { + Ok(pool) => pool, + Err(_) => panic!("Could not estable database connection"), + }; + sqlx::migrate!().run(&db_pool).await.unwrap(); + Store { + connection: db_pool, + } + } + + pub async fn _remove_question_tag( + &mut self, + question_id: u8, + tag_id: u8, + ) -> Result { + let result = sqlx::query("DELETE FROM question_tag WHERE question_id = $1 AND tag_id = $2") + .bind(question_id.to_string()) + .bind(tag_id.to_string()) + .execute(&self.connection) + .await; + match result { + Ok(_) => Ok(true), + Err(e) => Err(e.to_string()), + } + } + pub async fn _add_question_tag( + &mut self, + question_id: u8, + tag_id: u8, + ) -> Result { + let result = + sqlx::query("INSERT INTO question_tag VALUES ($1,$2) RETURNING question_id, tag_id") + .bind(question_id.to_string()) + .bind(tag_id.to_string()) + .fetch_one(&self.connection) + .await; + match result { + Ok(pg_row) => Ok(QuestionTag::new( + Store::id_to_u8(&pg_row, "question_id"), + Store::id_to_u8(&pg_row, "tag_id"), + )), + Err(e) => Err(e.to_string()), + } + } + + pub async fn _add_tags(&mut self, tag_labels: Vec) -> Result, String> { + let insert_values: Vec = tag_labels + .iter() + .map(|label| format!("({}),", label)) + .collect(); + + let result = sqlx::query("INSERT INTO tags (label) VALUES $1 RETURNING id, label") + .bind(insert_values) + .fetch_all(&self.connection) + .await; + match result { + Ok(pg_rows) => Ok(pg_rows + .iter() + .map(|pg_row| Tag::new(Store::id_to_u8(pg_row, "id"), pg_row.get("label"))) + .collect()), + Err(e) => Err(e.to_string()), + } + } + + // TODO: property should be a strong type + pub async fn fetch_tags_by_property( + &self, + propert_id: u8, + property_type: &str, + ) -> Option> { + let result = sqlx::query("SELECT * FROM tags WHERE $1 = $2") + .bind(property_type) + .bind(propert_id.to_string()) + .fetch_all(&self.connection) + .await; + match result { + Ok(tag_rows) => { + let tags: Vec = tag_rows + .iter() + .map(|pg_row| Tag::new(Store::id_to_u8(pg_row, "id"), pg_row.get("label"))) + .collect(); + Some(tags) + } + _ => None, + } + } + + pub async fn add_question(&mut self, new_question: NewQuestion) -> Result { + let result: Result = sqlx::query("INSERT INTO questions (title, content, tags) VALUES ($1, $2, $3) RETURNING id, title, content, tags") + .bind(new_question.title).bind(new_question.content).bind(new_question.tags).fetch_one(&self.connection).await; + match result { + Ok(pg_row) => Ok(Question::new( + Store::id_to_u8(&pg_row, "id"), + pg_row.get("title"), + pg_row.get("content"), + )), + Err(e) => Err(e.to_string()), + } + } + + pub fn remove_question(&mut self, _id: u8) -> Result { + Err("To Do".to_string()) + } + + pub async fn fetch_one_question_by_id(&self, id: u8) -> Option { + let row_result = sqlx::query("SELECT * FROM questions WHERE id = $1") + .bind(id.to_string()) + .fetch_one(&self.connection) + .await; + match row_result { + Ok(pg_row) => Some(Question::new( + Store::id_to_u8(&pg_row, "id"), + pg_row.get("title"), + pg_row.get("content"), + )), + _ => None, + } + } + + pub async fn fetch_many_questions(&self, start: usize, size: usize) -> Option> { + let rows_result: Result, sqlx::Error> = + sqlx::query("SELECT * FROM questions ORDER BY id LIMIT $1 OFFSET $2") + .bind(size.to_string()) + .bind(start.to_string()) + .fetch_all(&self.connection) + .await; + match rows_result { + Ok(pg_rows) => { + let mut result: Vec = vec![]; + for pg_row in pg_rows { + result.push(Question::new( + Store::id_to_u8(&pg_row, "id"), + pg_row.get("title"), + pg_row.get("content"), + )) + } + Some(result) + } + _ => None, + } + } + pub fn update_question(&mut self, _question: Question) -> Result { + Err("To Do".to_string()) + } + + pub fn add_answer(&mut self, _id: u8, _answer: Answer) -> Result { + Err("To Do".to_string()) + } +} diff --git a/src/question.rs b/src/question.rs index 8d83cdd..b3771ec 100644 --- a/src/question.rs +++ b/src/question.rs @@ -1,25 +1,30 @@ +use tag::Tag; + /// Contains struct definitions regarding questions use crate::*; +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct NewQuestion { + pub title: String, + pub content: String, + pub tags: Option>, +} + #[derive(Deserialize, Serialize, Clone, Debug)] pub struct QuestionDTO { pub id: u8, pub title: String, pub content: String, - pub tags: Option>, + pub tags: Vec, } - -/// Question Data Transfer Object, a representation of the expected serialized JSON formated of questions regarding requests, responses, and our question json file impl QuestionDTO { - pub fn to_entity(&self) -> (u8, Question) { - ( - self.id, - Question { - title: self.title.clone(), - content: self.content.clone(), - tags: self.tags.clone(), - }, - ) + pub fn new(question: Question, tags: Vec) -> Self { + QuestionDTO { + id: question.id, + title: question.title, + content: question.content, + tags: tags.iter().map(|tag| tag.label.clone()).collect(), + } } } impl IntoResponse for &QuestionDTO { @@ -28,29 +33,16 @@ impl IntoResponse for &QuestionDTO { } } -/// Question 'entity' used for in-memory interactions of questions by the store #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Question { - title: String, - content: String, - tags: Option>, + pub id: u8, + pub title: String, + pub content: String, } impl Question { - pub fn _new(_id: u8, title: String, content: String, tags: Option>) -> Self { - Question { - title, - content, - tags, - } - } - pub fn to_dto(&self, id: u8) -> QuestionDTO { - QuestionDTO { - id, - title: self.title.clone(), - content: self.content.clone(), - tags: self.tags.clone(), - } + pub fn new(id: u8, title: String, content: String) -> Self { + Question { id, title, content } } } impl IntoResponse for &Question { diff --git a/src/question_tag.rs b/src/question_tag.rs new file mode 100644 index 0000000..880c8c2 --- /dev/null +++ b/src/question_tag.rs @@ -0,0 +1,16 @@ +use crate::*; + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct QuestionTag { + pub question_id: u8, + pub tag_id: u8, +} + +impl QuestionTag { + pub fn new(question_id: u8, tag_id: u8) -> Self { + QuestionTag { + question_id, + tag_id, + } + } +} diff --git a/src/tag.rs b/src/tag.rs new file mode 100644 index 0000000..465db5b --- /dev/null +++ b/src/tag.rs @@ -0,0 +1,13 @@ +use crate::*; + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct Tag { + pub id: u8, + pub label: String, +} + +impl Tag { + pub fn new(id: u8, label: String) -> Self { + Tag { id, label } + } +}