From a23abfbcc5941d0b96e1237b5edcb8017166c16b Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 11:45:52 -0800 Subject: [PATCH 1/9] Remove all whitespace from tokenizer input There was some weird inconsistencies when tokenizing large whitespace (e.g. ' ' * 20) where some cases the first space was stripped, so resolving by ignoring input whitespace. Whitespace chars like \n, \t are still preserved! --- data_services/tokenserver.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/data_services/tokenserver.py b/data_services/tokenserver.py index faa60c8..0df504d 100644 --- a/data_services/tokenserver.py +++ b/data_services/tokenserver.py @@ -61,6 +61,7 @@ def tokenize_str(s): """ # Convert to Simplified, then tokenize s = trad_converter.convert(s) + s = s.replace(' ', '') # remove whitespace (large whitespace is inconsistently tokenized) tokens = tokenizer(s) # Get NER component as set (if any) token_entities = nlp(' '.join([str(t) for t in tokens])).ents @@ -70,22 +71,19 @@ def tokenize_str(s): j = 0 for i in range(len(tokens)): t = str(tokens[i]) - if t == ' ': - continue - elif t in CEDICT_SET or not entire_phrase_is_chinese(t): + if t in CEDICT_SET or not entire_phrase_is_chinese(t): str_tokens[j] = t j += 1 else: # use divide-and-conquer approach: recursively split until all tokens are accounted for subtokens = break_down_large_token_into_subtoken_list(t) n_st = len(subtokens) - str_tokens[j: n_st] = subtokens + str_tokens[j: j+n_st] = subtokens j += n_st while str_tokens[-1] == '': str_tokens.pop() # Handle special characters to match tokenizer output # for special characters within an alphanumeric phrase, tokenizer splits it but pfmt doesn't - # for spaces, tokenizer ignores but pfmt doesn't n_pinyin = len(s) init_pinyin_list = flatten_list(pfmt(s, style=Style.TONE3, neutral_tone_with_five=True)) raw_pinyin_list = [''] * n_pinyin # pre-allocate since known size From 7fada9e0a95eb4b8ee4c13a456b175ffe9cbb76a Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 11:51:33 -0800 Subject: [PATCH 2/9] Handle None query case --- app/src/lib.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/app/src/lib.rs b/app/src/lib.rs index feb12a2..83fc68b 100644 --- a/app/src/lib.rs +++ b/app/src/lib.rs @@ -113,6 +113,8 @@ pub trait DatabaseItem { } /// From the first result matching the query_doc, returns the values from input fields /// as a Vec (with matching indices as the input fields). + /// In the case of a failed lookup, a single item (String::new()) is returned in the Vec. + /// Thus, the len of resulting Vec is always >=1. fn get_values_from_query(db: &Database, query_doc: Document, fields: Vec<&str>) -> Vec { let coll = (*db).collection(Self::collection_name()); let valid_fields = Self::all_field_names(); @@ -125,6 +127,8 @@ pub trait DatabaseItem { res_vec.push(String::new()); } } + } else { + res_vec.push(String::new()); } return res_vec; } From 76677e933bbe9bc6f7245daa96b3c353b9cf6ed4 Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 13:32:00 -0800 Subject: [PATCH 3/9] Move download link removal logic to JS To achieve this, updated UserVocabList.unique_phrase_list -> UserVocabList.unique_uid_list. This removes the HashSet dependency for html::convert_string_to_tokenized_html as well! --- app/src/html.rs | 15 +----- app/src/lib.rs | 6 ++- app/src/models/sandbox.rs | 2 +- app/src/models/user.rs | 68 ++++++++++++---------------- app/src/routes/users.rs | 25 +++++----- app/templates/reader.html.tera | 8 +++- app/templates/static/handlePopups.js | 17 +++++++ app/templates/static/template.js | 14 ------ 8 files changed, 74 insertions(+), 81 deletions(-) diff --git a/app/src/html.rs b/app/src/html.rs index 6920c96..b455e22 100644 --- a/app/src/html.rs +++ b/app/src/html.rs @@ -24,7 +24,6 @@ use mongodb::{ use regex::Regex; use std::{ io::prelude::*, - collections::HashSet, net::TcpStream }; @@ -93,7 +92,7 @@ pub fn render_phrase_html(entry: &CnEnDictEntry, cn_type: &CnType, cn_phonetics: /// Renders the HTML using the given CnType and CnPhonetics. /// Refer to tokenizer_string() for formatting details. -pub async fn convert_string_to_tokenized_html(s: &str, cn_type: &CnType, cn_phonetics: &CnPhonetics, user_saved_phrases: Option>) -> String { +pub async fn convert_string_to_tokenized_html(s: &str, cn_type: &CnType, cn_phonetics: &CnPhonetics) -> String { const PHRASE_DELIM: char = '$'; const PINYIN_DELIM: char = '`'; let mut conn = connect_to_redis().await.unwrap(); @@ -101,10 +100,6 @@ pub async fn convert_string_to_tokenized_html(s: &str, cn_type: &CnType, cn_phon let n_phrases = tokenized_string.matches(PHRASE_DELIM).count(); // Estimate pre-allocated size: max ~2100 chars per phrase (conservitively 2500), 1 usize per char let mut res = String::with_capacity(n_phrases * 2500); - let user_saved_phrases: HashSet = match user_saved_phrases { - Some(set) => set, - None => HashSet::new() - }; for token in tokenized_string.split(PHRASE_DELIM) { let token_vec: Vec<&str> = token.split(PINYIN_DELIM).collect(); let phrase = token_vec[0]; // If Chinese, then Simplified @@ -128,13 +123,7 @@ pub async fn convert_string_to_tokenized_html(s: &str, cn_type: &CnType, cn_phon if entry.lookup_failed() { res += generate_html_for_not_found_phrase(phrase).as_str(); } else { - let include_sound_link = true; - let phrase = match cn_type { - CnType::Traditional => &entry.trad, - CnType::Simplified => &entry.simp - }; - let include_download_link = !(user_saved_phrases.contains(phrase)); - res += render_phrase_html(&entry, cn_type, cn_phonetics, include_sound_link, include_download_link).as_str(); + res += render_phrase_html(&entry, cn_type, cn_phonetics, true, true).as_str(); } } } diff --git a/app/src/lib.rs b/app/src/lib.rs index 83fc68b..3a92454 100644 --- a/app/src/lib.rs +++ b/app/src/lib.rs @@ -114,7 +114,7 @@ pub trait DatabaseItem { /// From the first result matching the query_doc, returns the values from input fields /// as a Vec (with matching indices as the input fields). /// In the case of a failed lookup, a single item (String::new()) is returned in the Vec. - /// Thus, the len of resulting Vec is always >=1. + /// Thus, the len of resulting Vec is always == the len of the fields Vec. fn get_values_from_query(db: &Database, query_doc: Document, fields: Vec<&str>) -> Vec { let coll = (*db).collection(Self::collection_name()); let valid_fields = Self::all_field_names(); @@ -128,7 +128,9 @@ pub trait DatabaseItem { } } } else { - res_vec.push(String::new()); + for _ in fields { + res_vec.push(String::new()); + } } return res_vec; } diff --git a/app/src/models/sandbox.rs b/app/src/models/sandbox.rs index 1af003c..297b10f 100644 --- a/app/src/models/sandbox.rs +++ b/app/src/models/sandbox.rs @@ -44,7 +44,7 @@ impl SandboxDoc { let doc_id = Uuid::new_v4().to_string(); let cn_type = CnType::from_str(&cn_type).unwrap(); let cn_phonetics = CnPhonetics::from_str(&cn_phonetics).unwrap(); - let body_html = html_rendering::convert_string_to_tokenized_html(&body, &cn_type, &cn_phonetics, None).await; + let body_html = html_rendering::convert_string_to_tokenized_html(&body, &cn_type, &cn_phonetics).await; let created_on = Utc::now().to_string(); let new_doc = SandboxDoc { doc_id, body, body_html, source, cn_type, cn_phonetics, created_on }; return new_doc; diff --git a/app/src/models/user.rs b/app/src/models/user.rs index 42897e3..2071438 100644 --- a/app/src/models/user.rs +++ b/app/src/models/user.rs @@ -24,10 +24,7 @@ use mongodb::{ }; use rand::{self, Rng}; use serde::{Serialize, Deserialize}; -use std::{ - collections::HashSet, - error::Error -}; +use std::error::Error; #[derive(Serialize, Deserialize, Debug)] pub struct User { @@ -206,13 +203,13 @@ impl UserDoc { return UserDoc::new(db, username, title_text, body_text, url); } /// Attempts to delete a matching object in MongoDB. - pub fn try_delete(db: &Database, username: &str, title: &str) -> bool { + pub async fn try_delete(db: &Database, username: &str, title: &str) -> bool { let (cn_type, cn_phonetics) = User::get_user_settings(db, username); let coll = (*db).collection(USER_DOC_COLL_NAME); let query_doc = doc! { "username": username, "title": title, "cn_type": cn_type.as_str(), "cn_phonetics": cn_phonetics.as_str() }; let res = match coll.delete_one(query_doc, None) { Ok(_) => { - match UserVocab::try_delete_all_from_title(db, username, title, &cn_type) { + match UserVocab::try_delete_all_from_title(db, username, title, &cn_type).await { Ok(b) => b, Err(_) => false } @@ -243,7 +240,7 @@ impl DatabaseItem for UserVocab { let coll = (*db).collection(Self::collection_name()); let new_doc = self.as_document(); coll.insert_one(new_doc, None)?; - UserVocabList::append_to_user_vocab_list(db, &self.username, &self.phrase, &self.cn_type)?; + UserVocabList::append_to_user_vocab_list(db, &self.username, &self.phrase, &self.cn_type, &self.uid)?; return Ok(String::from(self.primary_key())); } fn collection_name() -> &'static str { return USER_VOCAB_COLL_NAME; } @@ -287,12 +284,18 @@ impl UserVocab { return (phrase, defn, phrase_phonetics, phrase_html); } /// Attempts to delete the given UserVocab item. - pub fn try_delete(db: &Database, username: &str, phrase: &str, cn_type: &CnType) -> bool { + pub async fn try_delete(db: &Database, username: &str, uid: &str, cn_type: &CnType) -> bool { let coll = (*db).collection(USER_VOCAB_COLL_NAME); + let mut conn = connect_to_redis().await.unwrap(); + let entry = CnEnDictEntry::from_uid(&mut conn, String::from(uid)).await; + let phrase = match cn_type { + CnType::Traditional => &entry.trad, + CnType::Simplified => &entry.simp + }; let query_doc = doc! { "username": username, "phrase": phrase, "cn_type": cn_type.as_str() }; let res = match coll.delete_one(query_doc, None) { Ok(_) => { - match UserVocabList::remove_from_user_vocab_list(db, username, phrase, cn_type) { + match UserVocabList::remove_from_user_vocab_list(db, username, phrase, cn_type, uid) { Ok(_) => true, Err(_) => false } @@ -302,7 +305,7 @@ impl UserVocab { return res; } /// Attempts to delete all UserVocab linked to a given UserDoc. - pub fn try_delete_all_from_title(db: &Database, username: &str, from_doc_title: &str, cn_type: &CnType) -> Result> { + pub async fn try_delete_all_from_title(db: &Database, username: &str, from_doc_title: &str, cn_type: &CnType) -> Result> { let coll = (*db).collection(USER_VOCAB_COLL_NAME); let query_doc = doc! { "username": username, "from_doc_title": from_doc_title }; let mut res = true; @@ -310,7 +313,7 @@ impl UserVocab { for item in cursor { let doc = item?; let phrase = doc.get_str("phrase")?; - if UserVocab::try_delete(db, username, phrase, cn_type) == false { + if UserVocab::try_delete(db, username, phrase, cn_type).await == false { res = false; eprintln!("Error: could not delete phrase: {}", phrase); } @@ -324,33 +327,22 @@ impl UserVocab { pub struct UserVocabList { username: String, unique_char_list: String, - unique_phrase_list: String, + unique_uid_list: String, cn_type: CnType } impl DatabaseItem for UserVocabList { fn collection_name() -> &'static str { return USER_VOCAB_LIST_COLL_NAME; } fn all_field_names() -> Vec<&'static str> { - return vec!["username", "unique_char_list", "unique_phrase_list", "cn_type"]; + return vec!["username", "unique_char_list", "unique_uid_list", "cn_type"]; } /// Note: this is not necessarily unique per user, a unique primary key is username + cn_type fn primary_key(&self) -> &str { return &self.username; } } impl UserVocabList { - /// Gets HashSet of phrases that the user has saved for given CnType. - pub fn get_phrase_list_as_hashset(db: &Database, username: &str, cn_type: &CnType) -> HashSet { - let list = UserVocab::get_values_from_query(db, - doc!{ "username": username, "cn_type": cn_type.as_str() }, - vec!["unique_phrase_list"])[0].to_owned(); - let mut res: HashSet = HashSet::new(); - for c in list.split(',') { - res.insert(c.to_string()); - } - return res; - } /// Updates UserVocabList object for given username with information form new_phrase. - fn append_to_user_vocab_list(db: &Database, username: &str, new_phrase: &str, cn_type: &CnType) -> Result<(), Box> { + fn append_to_user_vocab_list(db: &Database, username: &str, new_phrase: &str, cn_type: &CnType, uid: &str) -> Result<(), Box> { let append_to_char_list = |list: &mut String, phrase: &str| { for c in phrase.chars() { if !(*list).contains(c) { @@ -366,29 +358,29 @@ impl UserVocabList { let prev_doc: UserVocabList = from_bson(Bson::Document(doc)).unwrap(); let mut unique_char_list = prev_doc.unique_char_list.clone(); append_to_char_list(&mut unique_char_list, new_phrase); - let mut unique_phrase_list = prev_doc.unique_phrase_list.clone(); - unique_phrase_list += new_phrase; - unique_phrase_list += ","; + let mut unique_uid_list = prev_doc.unique_uid_list.clone(); + unique_uid_list += uid; + unique_uid_list += ","; // Write to db prev_doc.try_update(db, - vec!["unique_char_list", "unique_phrase_list"], - vec![&unique_char_list, &unique_phrase_list])?; + vec!["unique_char_list", "unique_uid_list"], + vec![&unique_char_list, &unique_uid_list])?; } else { // Create new instance with unique chars let mut unique_char_list = String::with_capacity(50); append_to_char_list(&mut unique_char_list, new_phrase); - let mut unique_phrase_list = String::from(new_phrase); - unique_phrase_list += ","; + let mut unique_uid_list = String::from(uid); + unique_uid_list += ","; // Write to db let username = username.to_string(); let cn_type = CnType::from_str(cn_type_str).unwrap(); - let new_doc = UserVocabList { username, unique_char_list, unique_phrase_list, cn_type }; + let new_doc = UserVocabList { username, unique_char_list, unique_uid_list, cn_type }; new_doc.try_insert(db)?; }; return Ok(()); } /// Removes information in UserVocabList object from username based on phrase_to_remove. - fn remove_from_user_vocab_list(db: &Database, username: &str, phrase_to_remove: &str, cn_type: &CnType) -> Result<(), Box> { + fn remove_from_user_vocab_list(db: &Database, username: &str, phrase_to_remove: &str, cn_type: &CnType, uid: &str) -> Result<(), Box> { let query_res = UserVocabList::try_lookup(db, doc! {"username": username, "cn_type": cn_type.as_str() }); if let Some(doc) = query_res { let prev_doc: UserVocabList = from_bson(Bson::Document(doc)).unwrap(); @@ -401,12 +393,12 @@ impl UserVocabList { unique_char_list = unique_char_list.replace(&c_with_comma, ""); } } - let phrase_with_comma = format!("{},", phrase_string); - let unique_phrase_list = prev_doc.unique_phrase_list.replace(&phrase_with_comma, ""); + let uid_with_comma = format!("{},", uid); + let unique_uid_list = prev_doc.unique_uid_list.replace(&uid_with_comma, ""); // Write to db prev_doc.try_update(db, - vec!["unique_char_list", "unique_phrase_list"], - vec![&unique_char_list, &unique_phrase_list])?; + vec!["unique_char_list", "unique_uid_list"], + vec![&unique_char_list, &unique_uid_list])?; } else { } return Ok(()); } diff --git a/app/src/routes/users.rs b/app/src/routes/users.rs index 4341a39..faee37e 100644 --- a/app/src/routes/users.rs +++ b/app/src/routes/users.rs @@ -93,13 +93,15 @@ pub fn user_view_doc(cookies: Cookies, db: State, rt: State, r let doc_body = UserDoc::get_values_from_query(&db, doc!{ "username": &username, "title": &title}, vec!["body"])[0].to_owned(); - let vocab_set = UserVocabList::get_phrase_list_as_hashset(&db, &username, &cn_type); - let doc_html_res = rt.block_on(html_rendering::convert_string_to_tokenized_html(&doc_body, &cn_type, &cn_phonetics, Some(vocab_set))); - let user_char_list_string = UserVocabList::get_values_from_query(&db, + let doc_html_res = rt.block_on(html_rendering::convert_string_to_tokenized_html(&doc_body, &cn_type, &cn_phonetics)); + let query_res = UserVocabList::get_values_from_query(&db, doc! { "username": &username, "cn_type": cn_type.as_str() }, - vec!["unique_char_list"])[0].to_owned(); + vec!["unique_char_list", "unique_uid_list"]); + let user_char_list_string = query_res[0].to_owned(); + let user_uid_list_string = query_res[1].to_owned(); context.insert("paragraph_html", doc_html_res); context.insert("user_char_list_string", user_char_list_string); + context.insert("user_uid_list_string", user_uid_list_string); context.insert("cn_phonetics", cn_phonetics.to_string()); } }, @@ -112,20 +114,19 @@ pub fn user_view_doc(cookies: Cookies, db: State, rt: State, r } /// /api/delete-doc/ #[get("/api/delete-doc/")] -pub fn delete_user_doc(cookies: Cookies, db: State, doc_title: &RawStr) -> Redirect { +pub fn delete_user_doc(cookies: Cookies, db: State, rt: State, doc_title: &RawStr) -> Redirect { let title = convert_rawstr_to_string(doc_title); let username = get_username_from_cookie(&db, cookies.get(JWT_NAME)).unwrap(); - UserDoc::try_delete(&db, &username, &title); + rt.block_on(UserDoc::try_delete(&db, &username, &title)); return Redirect::to(uri!(user_profile: username)); } -/// /api/delete-vocab/ -#[get("/api/delete-vocab/")] -pub fn delete_user_vocab(cookies: Cookies, db: State, vocab_phrase: &RawStr) -> Redirect { - let phrase_string = convert_rawstr_to_string(vocab_phrase); +/// /api/delete-vocab/ +#[get("/api/delete-vocab/")] +pub fn delete_user_vocab(cookies: Cookies, db: State, rt: State, vocab_uid: &RawStr) -> Redirect { + let phrase_uid = convert_rawstr_to_string(vocab_uid); let username = get_username_from_cookie(&db, cookies.get(JWT_NAME)).unwrap(); - let (cn_type, _) = User::get_user_settings(&db, &username); - UserVocab::try_delete(&db, &username, &phrase_string, &cn_type); + rt.block_on(UserVocab::try_delete(&db, &username, &phrase_uid, &cn_type)); return Redirect::to(uri!(user_profile: username)); } /// /api/logout diff --git a/app/templates/reader.html.tera b/app/templates/reader.html.tera index c6229ff..4c755a6 100644 --- a/app/templates/reader.html.tera +++ b/app/templates/reader.html.tera @@ -56,12 +56,18 @@ let current_url = window.location.href.split("/"); document.title = decodeURIComponent(current_url[current_url.length - 1]); {% endif %} - /// Button Functionality + /// "Hide Saved Pinyin" Functionality {% if user_char_list_string is undefined %} let user_saved_char_list = []; {% else %} let user_saved_char_list = "{{user_char_list_string}}".split(','); {% endif %} + /// Remove download links from saved phrases + {% if user_uid_list_string is undefined%} + let user_saved_uid_list = []; + {% else %} + let user_saved_uid_list = "{{user_uid_list_string}}".split(','); + {% endif %} diff --git a/app/templates/static/handlePopups.js b/app/templates/static/handlePopups.js index abf7cb9..529303d 100644 --- a/app/templates/static/handlePopups.js +++ b/app/templates/static/handlePopups.js @@ -1,3 +1,20 @@ +/** + * Removes the download link after a user saves a phrase. + * @param {String} uid Phrase uid (currently: simplified+raw_pinyin) + */ +let removeDownloadLink = (uid) => { + download_link = ` `; + let spans = document.getElementsByClassName(uid); + const title_attr = "data-bs-original-title"; + for (let i=0; i < spans.length; i++) { + let new_title = spans[i].getAttribute(title_attr).replace(download_link, ""); + spans[i].setAttribute(title_attr, new_title); + } +} + +/// Remove download link for all saved phrases (defined in reader.html.tera) +user_saved_uid_list.forEach(removeDownloadLink); + /// Closes active popovers (by clicking) let close_active_popovers = (event) => { let active_elements = document.querySelectorAll("span[aria-describedby]"); diff --git a/app/templates/static/template.js b/app/templates/static/template.js index 8d4e14a..cc43cf7 100644 --- a/app/templates/static/template.js +++ b/app/templates/static/template.js @@ -103,20 +103,6 @@ let postNewVocab = (hash_string) => { xhr.send(params); } -/** - * Removes the download link after a user saves a phrase. - * @param {String} uid Phrase uid (currently: simplified+raw_pinyin) - */ -let removeDownloadLink = (uid) => { - download_link = ` `; - let spans = document.getElementsByClassName(uid); - const title_attr = "data-bs-original-title"; - for (let i=0; i < spans.length; i++) { - let new_title = spans[i].getAttribute(title_attr).replace(download_link, ""); - spans[i].setAttribute(title_attr, new_title); - } -} - /** * Handles the hash updating logic. */ From dac28e2bfb5e509a1ee75d1db77387dd4345dfef Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 14:06:22 -0800 Subject: [PATCH 4/9] Bring-back UserDoc.body_html --- app/src/models/user.rs | 10 ++++++---- app/src/routes/users.rs | 11 +++++------ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/app/src/models/user.rs b/app/src/models/user.rs index 2071438..0940941 100644 --- a/app/src/models/user.rs +++ b/app/src/models/user.rs @@ -154,6 +154,7 @@ pub struct UserDoc { username: String, pub title: String, pub body: String, + pub body_html: String, pub source: String, cn_type: CnType, cn_phonetics: CnPhonetics, @@ -163,7 +164,7 @@ pub struct UserDoc { impl DatabaseItem for UserDoc { fn collection_name() -> &'static str { return USER_DOC_COLL_NAME; } fn all_field_names() -> Vec<&'static str> { - return vec!["username", "title", "body", + return vec!["username", "title", "body", "body_html", "source", "cn_type", "cn_phonetics", "created_on"] } /// Note: this is not unique per document, a unique primary_key is username + title. @@ -172,8 +173,9 @@ impl DatabaseItem for UserDoc { impl UserDoc { /// Generates a new UserDoc. For title collisions, a new title is automatically generated (appended by -#). - pub fn new(db: &Database, username: String, desired_title: String, body: String, source: String) -> Self { + pub async fn new(db: &Database, username: String, desired_title: String, body: String, source: String) -> Self { let (cn_type, cn_phonetics) = User::get_user_settings(db, &username); + let body_html = html_rendering::convert_string_to_tokenized_html(&body, &cn_type, &cn_phonetics).await; let desired_title = desired_title.replace(" ", ""); // If title is non-unique, try appending digits until match let coll = (*db).collection(USER_DOC_COLL_NAME); @@ -194,13 +196,13 @@ impl UserDoc { false => desired_title }; let created_on = Utc::now().to_string(); - let new_doc = UserDoc { username, title, body, source, cn_type, cn_phonetics, created_on }; + let new_doc = UserDoc { username, title, body, body_html, source, cn_type, cn_phonetics, created_on }; return new_doc; } /// Generates a new UserDoc with HTML-parsed title + text from the given URL. pub async fn from_url(db: &Database, username: String, url: String) -> Self { let (title_text, body_text) = scrape_text_from_url(&url).await; - return UserDoc::new(db, username, title_text, body_text, url); + return UserDoc::new(db, username, title_text, body_text, url).await; } /// Attempts to delete a matching object in MongoDB. pub async fn try_delete(db: &Database, username: &str, title: &str) -> bool { diff --git a/app/src/routes/users.rs b/app/src/routes/users.rs index faee37e..b24a268 100644 --- a/app/src/routes/users.rs +++ b/app/src/routes/users.rs @@ -80,7 +80,7 @@ pub fn user_profile(cookies: Cookies, db: State, raw_username: &RawStr } /// /u// #[get("/u//")] -pub fn user_view_doc(cookies: Cookies, db: State, rt: State, raw_username: &RawStr, doc_title: &RawStr) -> Template { +pub fn user_view_doc(cookies: Cookies, db: State, raw_username: &RawStr, doc_title: &RawStr) -> Template { let mut context: HashMap<&str, String> = HashMap::new(); // Note: <&str, String> makes more sense than <&str, &str> due to variable lifetimes let username = convert_rawstr_to_string(raw_username); // Compare username with logged-in username from JWT @@ -90,10 +90,9 @@ pub fn user_view_doc(cookies: Cookies, db: State, rt: State, r // Get html to render let (cn_type, cn_phonetics) = User::get_user_settings(&db, &username); let title = convert_rawstr_to_string(doc_title); - let doc_body = UserDoc::get_values_from_query(&db, + let doc_html_res = UserDoc::get_values_from_query(&db, doc!{ "username": &username, "title": &title}, - vec!["body"])[0].to_owned(); - let doc_html_res = rt.block_on(html_rendering::convert_string_to_tokenized_html(&doc_body, &cn_type, &cn_phonetics)); + vec!["body_html"])[0].to_owned(); let query_res = UserVocabList::get_values_from_query(&db, doc! { "username": &username, "cn_type": cn_type.as_str() }, vec!["unique_char_list", "unique_uid_list"]); @@ -258,7 +257,7 @@ pub struct UserDocumentForm<'f> { } /// /api/upload #[post("/api/upload", data="")] -pub fn user_doc_upload(cookies: Cookies, db: State, user_doc: Form>) -> Redirect { +pub fn user_doc_upload(cookies: Cookies, db: State, rt: State, user_doc: Form>) -> Redirect { let UserDocumentForm { title, source, body } = user_doc.into_inner(); let title = convert_rawstr_to_string(title); let body = convert_rawstr_to_string(body); @@ -266,7 +265,7 @@ pub fn user_doc_upload(cookies: Cookies, db: State, user_doc: Form { - let new_doc = UserDoc::new(&db, username, title, body, source); + let new_doc = rt.block_on(UserDoc::new(&db, username, title, body, source)); match new_doc.try_insert(&db) { Ok(username) => Redirect::to(uri!(user_profile: username)), Err(e) => { From 47b3f29885eb906733b5bfd15ac63fb22641799a Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 14:51:26 -0800 Subject: [PATCH 5/9] Fix assorted UserVocab uid links --- app/src/html.rs | 4 ++-- app/src/models/user.rs | 7 ++----- app/templates/index.html.tera | 7 +++++++ app/templates/static/handlePopups.js | 12 ++++++++++-- app/templates/static/template.js | 8 +------- 5 files changed, 22 insertions(+), 16 deletions(-) diff --git a/app/src/html.rs b/app/src/html.rs index b455e22..a07838d 100644 --- a/app/src/html.rs +++ b/app/src/html.rs @@ -189,9 +189,9 @@ pub fn render_vocab_table(db: &Database, username: &str) -> String { for item in cursor { // unwrap BSON document let user_doc = item.unwrap(); - let UserVocab { phrase, from_doc_title, phrase_html, created_on, radical_map, .. } = bson::from_bson(Bson::Document(user_doc)).unwrap(); + let UserVocab { uid, from_doc_title, phrase_html, created_on, radical_map, .. } = bson::from_bson(Bson::Document(user_doc)).unwrap(); let from_doc_title = format!("{}", username, from_doc_title, from_doc_title); - let delete_button = format!("", phrase, TRASH_ICON); + let delete_button = format!("", uid, TRASH_ICON); let row = format!("{}{}{}{}{}\n", phrase_html, &from_doc_title, radical_map, &created_on[0..10], &delete_button); res += &row; } diff --git a/app/src/models/user.rs b/app/src/models/user.rs index 0940941..36f64df 100644 --- a/app/src/models/user.rs +++ b/app/src/models/user.rs @@ -314,11 +314,8 @@ impl UserVocab { let cursor = coll.find(query_doc, None)?; for item in cursor { let doc = item?; - let phrase = doc.get_str("phrase")?; - if UserVocab::try_delete(db, username, phrase, cn_type).await == false { - res = false; - eprintln!("Error: could not delete phrase: {}", phrase); - } + let uid = doc.get_str("uid")?; + res = res && UserVocab::try_delete(db, username, uid, cn_type).await; } return Ok(res); } diff --git a/app/templates/index.html.tera b/app/templates/index.html.tera index b4f22b3..630c7b7 100644 --- a/app/templates/index.html.tera +++ b/app/templates/index.html.tera @@ -44,5 +44,12 @@ Thank you to Martin Kess for providing the initial implementation (the 中文读机) which provided a well-scoped architecture and very strong foundation to build on.
Visit the Github Repo to view the stack and other notes on the project. + {% endblock content %} \ No newline at end of file diff --git a/app/templates/static/handlePopups.js b/app/templates/static/handlePopups.js index 529303d..553dea9 100644 --- a/app/templates/static/handlePopups.js +++ b/app/templates/static/handlePopups.js @@ -1,9 +1,15 @@ +/// Enable pop-ups +let popoverTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="popover"]')) +let popoverList = popoverTriggerList.map(function (popoverTriggerEl) { + return new bootstrap.Popover(popoverTriggerEl) +}) + /** * Removes the download link after a user saves a phrase. * @param {String} uid Phrase uid (currently: simplified+raw_pinyin) */ let removeDownloadLink = (uid) => { - download_link = ` `; + const download_link = ` `; let spans = document.getElementsByClassName(uid); const title_attr = "data-bs-original-title"; for (let i=0; i < spans.length; i++) { @@ -13,7 +19,9 @@ let removeDownloadLink = (uid) => { } /// Remove download link for all saved phrases (defined in reader.html.tera) -user_saved_uid_list.forEach(removeDownloadLink); +for (let i=0; i < user_saved_uid_list.length; i++) { + removeDownloadLink(user_saved_uid_list[i]); +} /// Closes active popovers (by clicking) let close_active_popovers = (event) => { diff --git a/app/templates/static/template.js b/app/templates/static/template.js index cc43cf7..4a3ccf2 100644 --- a/app/templates/static/template.js +++ b/app/templates/static/template.js @@ -1,10 +1,4 @@ /// General Handling -/// Enable pop-ups -let popoverTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="popover"]')) -let popoverList = popoverTriggerList.map(function (popoverTriggerEl) { - return new bootstrap.Popover(popoverTriggerEl) -}) - /// Update to Loading Button onsubmit let switchToLoadingButton = (id) => { let button = document.getElementById(id) @@ -90,7 +84,7 @@ let postNewVocab = (hash_string) => { xhr.onload = () => { if (xhr.status == 202) { alert(`Successfully added ${hash_string} to your dictionary!`); - try { user_saved_phrase_list = user_saved_phrase_list.concat(hash_string.split('')); } + try { user_saved_uid_list = user_saved_uid_list.concat(hash_string); } finally { switchOffWordVisibility(hash_string); } } else { From 5a1fd4d9d2516f09989b14c39d447de5511cb88f Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 15:05:48 -0800 Subject: [PATCH 6/9] Add CnType to linked UserVocab removal --- app/src/models/user.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/src/models/user.rs b/app/src/models/user.rs index 36f64df..4b090c1 100644 --- a/app/src/models/user.rs +++ b/app/src/models/user.rs @@ -309,7 +309,7 @@ impl UserVocab { /// Attempts to delete all UserVocab linked to a given UserDoc. pub async fn try_delete_all_from_title(db: &Database, username: &str, from_doc_title: &str, cn_type: &CnType) -> Result> { let coll = (*db).collection(USER_VOCAB_COLL_NAME); - let query_doc = doc! { "username": username, "from_doc_title": from_doc_title }; + let query_doc = doc! { "username": username, "from_doc_title": from_doc_title, "cn_type": cn_type.as_str() }; let mut res = true; let cursor = coll.find(query_doc, None)?; for item in cursor { From 77f2bb2c5073227b501b9e6a0dee9b02c0d9aca2 Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 15:10:12 -0800 Subject: [PATCH 7/9] Update README doc link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b7735f0..1d5784d 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # DuGuo -[![docs: 0.1.0](https://img.shields.io/badge/Docs-0.1.0-blue)](https://duguo-app.com/static/docs/duguo/index.html) +[![docs: 0.1.0](https://img.shields.io/badge/Docs-0.1.0-blue)](https://duguo-app.com/static/doc/duguo/index.html) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) ## Overview From 902e4a17a4e7e7c4e5c087ac8c66e799ae79b2ae Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 16:23:45 -0800 Subject: [PATCH 8/9] Fix duplicate username/email registration handling --- app/src/models/user.rs | 4 +++- app/src/routes/users.rs | 3 +-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/app/src/models/user.rs b/app/src/models/user.rs index 4b090c1..91faf7b 100644 --- a/app/src/models/user.rs +++ b/app/src/models/user.rs @@ -47,8 +47,10 @@ impl DatabaseItem for User { Ok(_) => { }, Err(e) => { return Err(Box::new(e)); } } + return Ok(String::from(self.primary_key())); } - return Ok(String::from(self.primary_key())); + return Err(Box::new(std::io::Error::new(std::io::ErrorKind::PermissionDenied, + "Username and/or Email are taken."))); } fn collection_name() -> &'static str { return USER_COLL_NAME; } fn all_field_names() -> Vec<&'static str> { diff --git a/app/src/routes/users.rs b/app/src/routes/users.rs index b24a268..e8dfceb 100644 --- a/app/src/routes/users.rs +++ b/app/src/routes/users.rs @@ -236,7 +236,6 @@ pub fn register_form(mut cookies: Cookies, db: State, user_input: Form let username = convert_rawstr_to_string(username); let password = convert_rawstr_to_string(password); let email = convert_rawstr_to_string(email); - let new_user = User::new(username.clone(), password.clone(), email); let res_status = match new_user.try_insert(&db) { Ok(_) => { @@ -244,7 +243,7 @@ pub fn register_form(mut cookies: Cookies, db: State, user_input: Form cookies.add(new_cookie); Status::Accepted }, - Err(_) => { Status::UnprocessableEntity } + Err(_) => Status::UnprocessableEntity }; return res_status; } From c6c6c27532f6ddaec7b9acb0794da1fb5cba24e1 Mon Sep 17 00:00:00 2001 From: ericpan64 Date: Tue, 9 Feb 2021 16:24:12 -0800 Subject: [PATCH 9/9] Allow for backspaces --- app/templates/static/template.js | 1 + 1 file changed, 1 insertion(+) diff --git a/app/templates/static/template.js b/app/templates/static/template.js index 4a3ccf2..41ecc12 100644 --- a/app/templates/static/template.js +++ b/app/templates/static/template.js @@ -106,6 +106,7 @@ let parseHashChange = () => { hash_string = decodeURIComponent(hash_string); // Remove the hash selector. From: https://stackoverflow.com/a/5298684/13073731 history.replaceState("", document.title, window.location.pathname + window.location.search); + history.back(); // If starts with ~: try Text-to-Speech // If starts with $: try User settings update // Otherwise : try to save as UserVocab