summaryrefslogtreecommitdiff
path: root/server/src
diff options
context:
space:
mode:
authorJoel Klinghed <the_jk@spawned.biz>2025-06-12 09:11:18 +0200
committerJoel Klinghed <the_jk@spawned.biz>2025-06-19 00:19:37 +0200
commit2b54f5c51ff9a26d4077037631ed39d62ed2b3fb (patch)
tree8544278dba24645a063472a3005a3021879a4bf1 /server/src
parentbaa7c85ff3db2366d67ac875fca48ad6dcabf212 (diff)
Initial support for translation reviews
Diffstat (limited to 'server/src')
-rw-r--r--server/src/git_root.rs67
-rw-r--r--server/src/main.rs367
-rw-r--r--server/src/tests.rs44
-rw-r--r--server/src/trans.rs312
4 files changed, 747 insertions, 43 deletions
diff --git a/server/src/git_root.rs b/server/src/git_root.rs
index 71ad96f..f68ed92 100644
--- a/server/src/git_root.rs
+++ b/server/src/git_root.rs
@@ -9,6 +9,7 @@ use rocket_db_pools::{sqlx, Database, Pool};
use std::borrow::Cow;
use std::collections::HashMap;
use std::fs::Permissions;
+use std::io::BufReader;
use std::ops::Deref;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
@@ -22,6 +23,7 @@ use crate::api_model;
use crate::fs_utils;
use crate::git;
use crate::git_socket;
+use crate::trans;
use crate::Db;
type DbPool = <Db as Database>::Pool;
@@ -70,6 +72,71 @@ impl Roots {
Ok(())
}
+ pub async fn new_translation_review(
+ &self,
+ db: &Db,
+ project_id: &str,
+ translation_reviewid: u64,
+ base: &str,
+ ) -> Result<(), anyhow::Error> {
+ let repo;
+ {
+ let data = self.data.lock().unwrap();
+ if let Some(tmp_repo) = data.project_repo.get(project_id) {
+ repo = tmp_repo.clone();
+ } else {
+ return Err(anyhow::Error::msg("No such repo"));
+ }
+ }
+
+ let mut entries = repo
+ .ls_tree(base, true)
+ .await
+ .map_err(|e| anyhow::Error::new(e))?;
+ entries.retain(|e| e.object_type == git::ObjectType::BLOB);
+ let grits = entries
+ .iter()
+ .filter(|x| x.path.ends_with(".grd"))
+ .map(|x| x.path.to_string())
+ .collect::<Vec<String>>();
+ let entries = Arc::new(entries);
+ let strings = trans::collect_strings_with_opener(grits, move |path| {
+ for entry in &*entries {
+ if entry.path == path {
+ let rt = tokio::runtime::Handle::current();
+ let object_name = entry.object_name.clone();
+ let repo = repo.clone();
+ return rt.block_on(async move {
+ repo.cat_file(git::ObjectType::BLOB, object_name)
+ .await
+ .map(|x| BufReader::new(x))
+ .map_err(|e| anyhow::Error::new(e))
+ });
+ }
+ }
+ Err(anyhow::Error::msg(format!("No such file: {path}")))
+ })
+ .await?;
+
+ trans::review_add_strings(db, translation_reviewid, strings, true).await?;
+
+ Ok(())
+ }
+
+ pub async fn fetch_branch(&self, project_id: &str, branch: &str) -> anyhow::Result<String> {
+ let repo;
+ {
+ let data = self.data.lock().unwrap();
+ if let Some(tmp_repo) = data.project_repo.get(project_id) {
+ repo = tmp_repo.clone();
+ } else {
+ return Err(anyhow::Error::msg("No such repo"));
+ }
+ }
+
+ repo.fetch(branch).await.map_err(|e| anyhow::Error::new(e))
+ }
+
pub async fn del_branch(&self, project_id: &str, branch: &str) -> Result<(), git::Error> {
let repo;
{
diff --git a/server/src/main.rs b/server/src/main.rs
index 66faec3..8c59b23 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -8,7 +8,7 @@ use rocket::http::Status;
use rocket::response::status::{Custom, NotFound};
use rocket::serde::json::Json;
use rocket::{futures, Build, Rocket, State};
-use rocket_db_pools::{sqlx, Connection, Database, Pool};
+use rocket_db_pools::{sqlx, Connection, Database};
use sqlx::Acquire;
use std::path::PathBuf;
use utoipa::OpenApi;
@@ -55,6 +55,9 @@ struct Db(sqlx::MySqlPool);
user_key_get,
user_key_del,
user_keys,
+ translation_review_new,
+ translation_review_strings,
+ translation_reviews,
),
modifiers(&AuthApiAddon),
)]
@@ -122,7 +125,7 @@ fn abbrivate_key(key: &str) -> String {
}
async fn get_project(
- db: &mut <<Db as Database>::Pool as Pool>::Connection,
+ mut db: Connection<Db>,
projectid: &str,
) -> Result<Json<api_model::Project>, NotFound<&'static str>> {
let users = sqlx::query!(
@@ -173,12 +176,11 @@ async fn get_project(
)]
#[get("/project/<projectid>")]
async fn project(
- db: &Db,
+ db: Connection<Db>,
_session: auth::Session,
projectid: &str,
) -> Result<Json<api_model::Project>, NotFound<&'static str>> {
- let mut conn = db.get().await.unwrap();
- get_project(&mut conn, projectid).await
+ get_project(db, projectid).await
}
// Remove linebreaks and potential openssh wrapper
@@ -206,6 +208,7 @@ fn cleanup_key(key: &str) -> String {
#[post("/project/<projectid>/new", data = "<data>")]
async fn project_new(
db: &Db,
+ conn: Connection<Db>,
git_roots_config: &State<git_root::Config<'_>>,
roots_state: &State<git_root::Roots>,
session: auth::Session,
@@ -219,9 +222,8 @@ async fn project_new(
};
let main_branch = data.main_branch.unwrap_or("main");
- let mut conn = db.get().await.unwrap();
{
- let mut tx = conn.begin().await.unwrap();
+ let mut tx = db.begin().await.unwrap();
sqlx::query!(
"INSERT INTO projects (id, title, description, remote, remote_key, main_branch) VALUES (?, ?, ?, ?, ?, ?)",
@@ -258,7 +260,7 @@ async fn project_new(
.map_err(|e| Custom(Status::InternalServerError, format!("{e}")))
.await?;
- Ok(get_project(&mut conn, projectid).await.unwrap())
+ Ok(get_project(conn, projectid).await.unwrap())
}
async fn project_check_maintainer(
@@ -536,15 +538,14 @@ async fn reviews(
}))
}
-async fn get_review_users(
- mut db: Connection<Db>,
+async fn get_project_default_review_users<'a>(
+ db: &mut Connection<Db>,
projectid: &str,
- reviewid: u64,
) -> Vec<api_model::ReviewUserEntry> {
- let mut users = sqlx::query!(
+ sqlx::query!(
"SELECT id,name,dn,project_users.default_role AS role FROM users JOIN project_users ON project_users.user=id WHERE project_users.project=? ORDER BY role,id",
projectid)
- .fetch(&mut **db)
+ .fetch(&mut ***db)
.map_ok(|r| api_model::ReviewUserEntry {
user: api_model::User {
id: r.id,
@@ -555,7 +556,15 @@ async fn get_review_users(
})
.try_collect::<Vec<_>>()
.await
- .unwrap();
+ .unwrap()
+}
+
+async fn get_review_users(
+ mut db: Connection<Db>,
+ projectid: &str,
+ reviewid: u64,
+) -> Vec<api_model::ReviewUserEntry> {
+ let mut users = get_project_default_review_users(&mut db, projectid).await;
let override_users = sqlx::query!(
"SELECT id,name,dn,review_users.role AS role FROM users JOIN review_users ON review_users.user=id WHERE review_users.review=? ORDER BY role,id",
@@ -1116,6 +1125,333 @@ fn healthcheck() -> &'static str {
""
}
+async fn get_translation_review(
+ mut db: Connection<Db>,
+ projectid: &str,
+ translation_reviewid: u64,
+) -> Result<Json<api_model::TranslationReview>, NotFound<&'static str>> {
+ let mut translation_review = sqlx::query!(
+ "SELECT title,description,state,progress,archived,base,head,users.id AS user_id,users.name AS name,users.dn AS user_dn FROM translation_reviews JOIN users ON users.id=owner WHERE project=? AND translation_reviews.id=?",
+ projectid, translation_reviewid)
+ .fetch_one(&mut **db)
+ .map_ok(|r| {
+ api_model::TranslationReview {
+ id: translation_reviewid,
+ title: r.title,
+ description: r.description,
+ owner: api_model::User {
+ id: r.user_id,
+ name: r.name,
+ active: r.user_dn.is_some(),
+ },
+ users: Vec::new(),
+ state: api_model::ReviewState::try_from(r.state).unwrap(),
+ progress: r.progress,
+ archived: r.archived != 0,
+ base: r.base,
+ head: r.head,
+ }
+ })
+ .map_err(|_| NotFound("No such review"))
+ .await?;
+
+ translation_review.users =
+ get_translation_review_users(db, projectid, translation_reviewid).await;
+
+ Ok(Json(translation_review))
+}
+
+async fn get_translation_review_users(
+ mut db: Connection<Db>,
+ projectid: &str,
+ translation_reviewid: u64,
+) -> Vec<api_model::ReviewUserEntry> {
+ let mut users = get_project_default_review_users(&mut db, projectid).await;
+
+ let override_users = sqlx::query!(
+ "SELECT id,name,dn,translation_review_users.role AS role FROM users JOIN translation_review_users ON translation_review_users.user=id WHERE translation_review_users.translation_review=? ORDER BY role,id",
+ translation_reviewid)
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::ReviewUserEntry {
+ user: api_model::User {
+ id: r.id,
+ name: r.name,
+ active: r.dn.is_some(),
+ },
+ role: api_model::UserReviewRole::try_from(r.role).unwrap(),
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+
+ for override_user in override_users {
+ if let Some(user) = users
+ .iter_mut()
+ .find(|ue| ue.user.id == override_user.user.id)
+ {
+ user.role = override_user.role;
+ } else {
+ users.push(override_user);
+ }
+ }
+
+ users
+}
+
+#[utoipa::path(
+ responses(
+ (status = 200, description = "Translation review created", body = api_model::TranslationReview),
+ ),
+ security(
+ ("session" = []),
+ ),
+)]
+#[post("/translation/<projectid>/new", data = "<data>")]
+async fn translation_review_new(
+ db: &Db,
+ mut conn: Connection<Db>,
+ roots_state: &State<git_root::Roots>,
+ session: auth::Session,
+ projectid: &str,
+ data: Json<api_model::TranslationReviewData<'_>>,
+) -> Result<Json<api_model::TranslationReview>, Custom<String>> {
+ let title = if data.title == "" {
+ "Unnamed"
+ } else {
+ data.title.as_str()
+ };
+ let mut base = data.base.unwrap_or("").to_string();
+ let translation_reviewid: u64;
+
+ {
+ let mut tx = conn.begin().await.unwrap();
+
+ if base == "" {
+ let main_branch =
+ sqlx::query!("SELECT main_branch FROM projects WHERE id=?", projectid)
+ .fetch_one(&mut *tx)
+ .map_ok(|r| r.main_branch)
+ .map_err(|_| Custom(Status::NotFound, "No such project".to_string()))
+ .await?;
+
+ base = roots_state
+ .fetch_branch(projectid, main_branch.as_str())
+ .map_err(|_| Custom(Status::InternalServerError, "git error".to_string()))
+ .await?;
+ }
+
+ let r = sqlx::query!(
+ "INSERT INTO translation_reviews (project, owner, title, description, base, head) VALUES (?, ?, ?, ?, ?, ?)",
+ projectid,
+ session.user_id,
+ title,
+ data.description,
+ base,
+ base,
+ )
+ .execute(&mut *tx)
+ .map_err(|e| Custom(Status::InternalServerError, format!("Database error: {e:?}")))
+ .await?;
+
+ translation_reviewid = r.last_insert_id();
+
+ tx.commit().await.unwrap();
+ }
+
+ roots_state
+ .new_translation_review(db, projectid, translation_reviewid, base.as_str())
+ .map_err(|e| Custom(Status::InternalServerError, format!("{e}")))
+ .await?;
+
+ Ok(
+ get_translation_review(conn, projectid, translation_reviewid)
+ .await
+ .unwrap(),
+ )
+}
+
+#[utoipa::path(
+ responses(
+ (status = 200, description = "Get all translation reviews for project", body = api_model::TranslationReviews),
+ (status = 404, description = "No such project"),
+ ),
+ security(
+ ("session" = []),
+ ),
+)]
+#[get("/project/<projectid>/translations?<limit>&<offset>")]
+async fn translation_reviews(
+ mut db: Connection<Db>,
+ _session: auth::Session,
+ projectid: &str,
+ limit: Option<u32>,
+ offset: Option<u32>,
+) -> Result<Json<api_model::TranslationReviews>, NotFound<&'static str>> {
+ let uw_offset = offset.unwrap_or(0);
+ let uw_limit = limit.unwrap_or(10);
+ let entries = sqlx::query!(
+ "SELECT translation_reviews.id AS id,title,state,progress,base,head,users.id AS user_id,users.name AS name,users.dn AS user_dn FROM translation_reviews JOIN users ON users.id=owner WHERE project=? ORDER BY id DESC LIMIT ? OFFSET ?",
+ projectid, uw_limit, uw_offset)
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::TranslationReviewEntry {
+ id: r.id,
+ title: r.title,
+ owner: api_model::User {
+ id: r.user_id,
+ name: r.name,
+ active: r.user_dn.is_some(),
+ },
+ state: api_model::ReviewState::try_from(r.state).unwrap(),
+ progress: r.progress,
+ base: r.base,
+ head: r.head,
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+
+ let count = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM translation_reviews WHERE project=?",
+ projectid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+
+ if count == 0 {
+ let projects = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM projects WHERE id=?",
+ projectid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+ if projects == 0 {
+ return Err(NotFound("No such project"));
+ }
+ }
+
+ let u32_count = u32::try_from(count).unwrap();
+
+ Ok(Json(api_model::TranslationReviews {
+ offset: uw_offset,
+ limit: uw_limit,
+ total_count: u32_count,
+ more: uw_offset + uw_limit < u32_count,
+ reviews: entries,
+ }))
+}
+
+#[utoipa::path(
+ responses(
+ (status = 200, description = "Get all strings for a translation review", body = api_model::LocalizationStrings),
+ (status = 404, description = "No such translation review"),
+ ),
+ security(
+ ("session" = []),
+ ),
+)]
+#[get("/translation/<translation_reviewid>/strings?<limit>&<offset>")]
+async fn translation_review_strings(
+ mut db: Connection<Db>,
+ _session: auth::Session,
+ translation_reviewid: u64,
+ limit: Option<u32>,
+ offset: Option<u32>,
+) -> Result<Json<api_model::LocalizationStrings>, NotFound<&'static str>> {
+ let uw_offset = offset.unwrap_or(0);
+ let uw_limit = limit.unwrap_or(10);
+ let (ids, mut entries) = sqlx::query!(
+ "SELECT id,name,file,description,meaning,source,placeholder_offsets FROM localization_strings WHERE translation_review=? ORDER BY id ASC LIMIT ? OFFSET ?",
+ translation_reviewid, uw_limit, uw_offset)
+ .fetch(&mut **db)
+ .try_fold((Vec::new(), Vec::new()), async move |mut vecs, r| {
+ vecs.0.push(r.id);
+ vecs.1.push(api_model::LocalizationString {
+ id: r.name,
+ file: r.file,
+ description: r.description,
+ meaning: r.meaning,
+ source: r.source,
+ placeholders: Vec::new(),
+ placeholder_offset: r.placeholder_offsets.split_terminator(',').map(|x| x.parse::<usize>().unwrap()).collect(),
+ translations: Vec::new(),
+ });
+ Ok(vecs)
+ })
+ .await
+ .unwrap();
+
+ for (i, entry) in entries.iter_mut().enumerate() {
+ if !entry.placeholder_offset.is_empty() {
+ entry.placeholders = sqlx::query!(
+ "SELECT name,content,example FROM localization_placeholders WHERE localization_string=? ORDER BY id ASC",
+ ids[i])
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::LocalizationPlaceholder {
+ id: r.name,
+ content: r.content,
+ example: r.example,
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+ }
+
+ entry.translations = sqlx::query!(
+ "SELECT language,head_translation,head_placeholder_offsets,state,comment FROM translation_strings WHERE localization_string=? ORDER BY language ASC",
+ ids[i])
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::TranslationString {
+ language: r.language,
+ translation: r.head_translation,
+ placeholder_offset: r.head_placeholder_offsets.split_terminator(',').map(|x| x.parse::<usize>().unwrap()).collect(),
+ state: api_model::TranslationState::try_from(r.state).unwrap(),
+ comment: r.comment,
+ // TODO
+ reviewer: None,
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+ }
+
+ let count = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM localization_strings WHERE translation_review=?",
+ translation_reviewid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+
+ if count == 0 {
+ let reviews = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM translation_reviews WHERE id=?",
+ translation_reviewid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+ if reviews == 0 {
+ return Err(NotFound("No such translation review"));
+ }
+ }
+
+ let u32_count = u32::try_from(count).unwrap();
+
+ Ok(Json(api_model::LocalizationStrings {
+ offset: uw_offset,
+ limit: uw_limit,
+ total_count: u32_count,
+ more: uw_offset + uw_limit < u32_count,
+ strings: entries,
+ }))
+}
+
async fn run_migrations(rocket: Rocket<Build>) -> fairing::Result {
match Db::fetch(&rocket) {
Some(db) => match sqlx::migrate!().run(&**db).await {
@@ -1158,6 +1494,9 @@ fn rocket_from_config(figment: Figment) -> Rocket<Build> {
user_key_get,
user_key_del,
user_keys,
+ translation_review_new,
+ translation_review_strings,
+ translation_reviews,
],
)
.attach(auth::stage(basepath))
diff --git a/server/src/tests.rs b/server/src/tests.rs
index c7c1d8a..44d56d5 100644
--- a/server/src/tests.rs
+++ b/server/src/tests.rs
@@ -995,7 +995,6 @@ async fn test_collect_strings() {
source: "Extra title".to_string(),
placeholders: vec![],
placeholder_offset: vec![],
- translation_id: 3567801202192813433,
translations: vec![],
},
api_model::LocalizationString {
@@ -1006,27 +1005,38 @@ async fn test_collect_strings() {
source: "Bookmarks".to_string(),
placeholders: vec![],
placeholder_offset: vec![],
- translation_id: 8820817407110198400,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "Bookmarks".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my".to_string(),
translation: "ဝက်ဘ်လိပ်စာ မှတ်ထားမှုများ".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my-zg".to_string(),
translation: "ဝက္ဘ္လိပ္စာ မွတ္ထားမွုမ်ား".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "sv".to_string(),
translation: "Bokmärken".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
@@ -1044,27 +1054,38 @@ async fn test_collect_strings() {
},
],
placeholder_offset: vec![11],
- translation_id: 8443102241046796905,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "Welcome to ".to_string(),
placeholder_offset: vec![11],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my".to_string(),
translation: " မှ ကြိုဆိုပါသည်".to_string(),
placeholder_offset: vec![0],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my-zg".to_string(),
translation: " မွ ႀကိဳဆိုပါသည္".to_string(),
placeholder_offset: vec![0],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "sv".to_string(),
translation: "Välkommen till ".to_string(),
placeholder_offset: vec![16],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
@@ -1097,27 +1118,38 @@ async fn test_collect_strings() {
},
],
placeholder_offset: vec![54, 70, 140, 157],
- translation_id: 2466140279568640908,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "By using this application you are agreeing to Opera's Terms of Service. Also, you can learn how Opera handles and protects your data in our Privacy Statement.".to_string(),
placeholder_offset: vec![54, 70, 140, 157],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my".to_string(),
translation: "ဤအပလီကေးရှင်းကို အသုံးပြုခြင်းဖြင့် သင်သည် Opera ၏ ဝန်ဆောင်မှုစည်းမျဉ်းများ ကို သဘောတူရာ ရောက်ပါသည်။ ထို့အပြင် ကျွန်ုပ်တို့၏ကိုယ်ရေးလုံခြုံမှု ထုတ်ပြန်ချက် ထဲတွင် သင့်ဒေတာများကို Opera ၏ ကိုင်တွယ်ပုံနှင့် ကာကွယ်ပုံတို့ကိုလည်း လေ့လာနိုင်သည်။".to_string(),
placeholder_offset: vec![133, 205, 342, 433],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my-zg".to_string(),
translation: "ဤအပလီေကးရွင္းကို အသုံးျပဳျခင္းျဖင့္ သင္သည္ Opera ၏ ဝန္ေဆာင္မွုစည္းမ်ဥ္းမ်ား ကို သေဘာတူရာ ေရာက္ပါသည္။ ထို႔အျပင္ ကၽြန္ုပ္တို႔၏ကိုယ္ေရးလုံျခဳံမွု ထုတ္ျပန္ခ်က္ ထဲတြင္ သင့္ေဒတာမ်ားကို Opera ၏ ကိုင္တြယ္ပုံႏွင့္ ကာကြယ္ပုံတို႔ကိုလည္း ေလ့လာနိုင္သည္။".to_string(),
placeholder_offset: vec![133, 205, 342, 433],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "sv".to_string(),
translation: "I och med din användning av det här programmet samtycker du till Operas Licensvillkor. Du kan också läsa om hur Opera hanterar och skyddar dina data i vårt Sekretessmeddelande.".to_string(),
placeholder_offset: vec![74, 87, 161, 180],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
@@ -1150,12 +1182,14 @@ async fn test_collect_strings() {
},
],
placeholder_offset: vec![34, 65, 98, 132],
- translation_id: 7770247413830876286,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "{BOOKMARKS, plural,\n one { folder deleted}\n few { folders deleted}\n many { folders deleted}\n other { folders deleted}}".to_string(),
placeholder_offset: vec![35, 67, 101, 136],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
diff --git a/server/src/trans.rs b/server/src/trans.rs
index 6a18e27..c4e3b45 100644
--- a/server/src/trans.rs
+++ b/server/src/trans.rs
@@ -1,27 +1,46 @@
+#![allow(dead_code)]
+
use anyhow;
+use futures::stream::TryStreamExt;
+use rocket_db_pools::{sqlx, Database, Pool};
+use sorted_insert::SortedInsertByKey;
+use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
+use std::fs::File;
+use std::io::{BufReader, Read};
use std::iter::{repeat, IntoIterator};
-use std::path::{Path, PathBuf};
+use std::path::Path;
use tokio::task::JoinSet;
+use crate::Db;
use eyeballs_api::api_model;
use eyeballs_common::grit;
-fn schedule_translations(
+type DbPool = <Db as Database>::Pool;
+type DbConnection = <DbPool as Pool>::Connection;
+
+fn schedule_translations<F, R>(
tasks: &mut JoinSet<anyhow::Result<grit::TranslationFile>>,
known: &mut HashSet<String>,
- path: &Path,
+ opener: &F,
files: &Vec<grit::IfFile>,
-) {
+) where
+ F: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + 'static,
+ R: Read,
+{
for file in files {
match file {
grit::IfFile::File(file) => {
if known.insert(file.path.to_string()) {
- tasks.spawn(grit::parse_xlf(path.join(file.path.as_str())));
+ let path = file.path.to_string();
+ let opener = opener.clone();
+ tasks.spawn(async {
+ grit::parse_xlf_with_opener(move || opener(path.as_str())).await
+ });
}
}
grit::IfFile::If { expr: _, file } => {
- schedule_translations(tasks, known, path, file);
+ schedule_translations(tasks, known, opener, file);
}
}
}
@@ -29,6 +48,7 @@ fn schedule_translations(
fn push_strings(
strings: &mut Vec<api_model::LocalizationString>,
+ translation_ids: &mut Vec<i64>,
file: &String,
messages: Vec<grit::IfMessagePart>,
) {
@@ -71,15 +91,15 @@ fn push_strings(
source,
placeholders,
placeholder_offset,
- translation_id,
translations: Vec::<api_model::TranslationString>::new(),
});
+ translation_ids.push(translation_id);
}
grit::IfMessagePart::If { expr: _, message } => {
- push_strings(strings, file, message);
+ push_strings(strings, translation_ids, file, message);
}
grit::IfMessagePart::Part { file, messages } => {
- push_strings(strings, &file, messages);
+ push_strings(strings, translation_ids, &file, messages);
}
}
}
@@ -126,52 +146,87 @@ fn push_translation(
}
}
- string.translations.push(api_model::TranslationString {
- language: language.to_string(),
- translation,
- placeholder_offset,
- })
+ string.translations.sorted_insert_asc_by_key(
+ api_model::TranslationString {
+ language: language.to_string(),
+ translation,
+ placeholder_offset,
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
+ },
+ |e| &e.language,
+ );
}
pub async fn collect_strings(
base: impl AsRef<Path>,
grits: impl IntoIterator<Item = String>,
) -> anyhow::Result<Vec<api_model::LocalizationString>> {
+ let base = base.as_ref().to_path_buf();
+ collect_strings_with_opener(grits, move |x| {
+ let path = base.join(x);
+ Ok(BufReader::new(File::open(path)?))
+ })
+ .await
+}
+
+pub async fn collect_strings_with_opener<F, R>(
+ grits: impl IntoIterator<Item = String>,
+ opener: F,
+) -> anyhow::Result<Vec<api_model::LocalizationString>>
+where
+ // TODO: Would like to avoid Sync here, it was possible in grit but not here
+ // for some reason.
+ F: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + Sync + 'static,
+ R: Read,
+{
let mut grit_tasks = JoinSet::new();
for grit_name in grits {
- let grit_path = base.as_ref().join(grit_name.as_str());
+ let opener_copy = opener.clone();
+ let grit_name_copy = grit_name.clone();
+ let grit_opener = move || opener_copy(grit_name_copy.as_str());
+
+ let part_opener = opener.clone();
grit_tasks.spawn(async move {
- let tmp = grit::parse_grit_with_parts(grit_path.as_path()).await;
- (grit_path, grit_name, tmp)
+ let tmp = grit::parse_grit_with_parts_and_opener(grit_opener, part_opener).await;
+ (grit_name, tmp)
});
}
let mut parsed_grits =
- Vec::<(PathBuf, String, anyhow::Result<grit::Grit>)>::with_capacity(grit_tasks.len());
+ Vec::<(String, anyhow::Result<grit::Grit>)>::with_capacity(grit_tasks.len());
while let Some(res) = grit_tasks.join_next().await {
parsed_grits.push(res?);
}
let mut strings = Vec::<api_model::LocalizationString>::new();
+ let mut translation_ids = Vec::<i64>::new();
let mut translation_tasks = JoinSet::new();
let mut known_translations = HashSet::<String>::new();
- for (grit_path, grit_name, maybe_grit) in parsed_grits {
+ for (grit_name, maybe_grit) in parsed_grits {
let grit = maybe_grit?;
schedule_translations(
&mut translation_tasks,
&mut known_translations,
- grit_path.parent().unwrap(),
+ &opener,
&grit.translations.file,
);
let first_index = strings.len();
- push_strings(&mut strings, &grit_name, grit.release.messages.messages);
+ push_strings(
+ &mut strings,
+ &mut translation_ids,
+ &grit_name,
+ grit.release.messages.messages,
+ );
- let mut id_to_string = HashMap::<i64, usize>::with_capacity(strings.len() - first_index);
- for i in first_index..strings.len() {
- id_to_string.insert(strings[i].translation_id, i);
+ let mut id_to_string =
+ HashMap::<i64, usize>::with_capacity(translation_ids.len() - first_index);
+ for i in first_index..translation_ids.len() {
+ id_to_string.insert(translation_ids[i], i);
}
while let Some(res) = translation_tasks.join_next().await {
@@ -190,3 +245,212 @@ pub async fn collect_strings(
Ok(strings)
}
+
+#[derive(Hash, PartialEq, Eq)]
+struct LocalizationStringKey<'a> {
+ file: Cow<'a, str>,
+ name: Cow<'a, str>,
+ meaning: Cow<'a, str>,
+}
+
+struct TranslationString {
+ base_translation: Option<String>,
+ base_placeholder_offsets: Option<String>,
+ head_translation: String,
+ head_placeholder_offsets: String,
+ state: api_model::TranslationState,
+}
+
+pub async fn review_add_strings(
+ db: &Db,
+ translation_reviewid: u64,
+ strings: Vec<api_model::LocalizationString>,
+ base: bool,
+) -> anyhow::Result<()> {
+ {
+ let mut tx = db.begin().await?;
+
+ let existing = sqlx::query!(
+ "SELECT id, name, file, meaning FROM localization_strings WHERE translation_review=?",
+ translation_reviewid
+ )
+ .fetch(&mut *tx)
+ .try_fold(HashMap::new(), async move |mut ex, r| {
+ ex.insert(
+ LocalizationStringKey {
+ file: r.file.into(),
+ name: r.name.into(),
+ meaning: r.meaning.into(),
+ },
+ r.id,
+ );
+ Ok(ex)
+ })
+ .await
+ .unwrap();
+
+ for string in strings {
+ let key = LocalizationStringKey {
+ file: string.file.as_str().into(),
+ name: string.id.as_str().into(),
+ meaning: string.meaning.as_str().into(),
+ };
+ let id: u64;
+ let placeholder_offsets = string
+ .placeholder_offset
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect::<Vec<String>>()
+ .join(",");
+ if let Some(existing_id) = existing.get(&key) {
+ sqlx::query!(
+ "UPDATE localization_strings SET description=?, source=?, placeholder_offsets=? WHERE id=?",
+ string.description,
+ string.source,
+ placeholder_offsets,
+ existing_id)
+ .execute(&mut *tx)
+ .await
+ .unwrap();
+ // TODO: Might be worth checking what needs updating but meh.
+ sqlx::query!(
+ "DELETE FROM localization_placeholders WHERE localization_string=?",
+ existing_id
+ )
+ .execute(&mut *tx)
+ .await
+ .unwrap();
+ id = *existing_id;
+ } else {
+ let result = sqlx::query!(
+ "INSERT INTO localization_strings (translation_review, name, file, description, meaning, source, placeholder_offsets) VALUES (?, ?, ?, ?, ?, ?, ?)",
+ translation_reviewid,
+ string.id,
+ string.file,
+ string.description,
+ string.meaning,
+ string.source,
+ placeholder_offsets)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ id = result.last_insert_id();
+ }
+
+ for placeholder in string.placeholders {
+ sqlx::query!(
+ "INSERT INTO localization_placeholders (localization_string, name, content, example) VALUES (?, ?, ?, ?)",
+ id,
+ placeholder.id,
+ placeholder.content,
+ placeholder.example)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+
+ if base {
+ sqlx::query!(
+ "DELETE FROM translation_strings WHERE localization_string=?",
+ id
+ )
+ .execute(&mut *tx)
+ .await
+ .unwrap();
+
+ for translation in string.translations {
+ let placeholder_offsets = translation
+ .placeholder_offset
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect::<Vec<String>>()
+ .join(",");
+ // Mark all as Unchanged as base == head here.
+ sqlx::query!(
+ "INSERT INTO translation_strings (localization_string, language, base_translation, base_placeholder_offsets, head_translation, head_placeholder_offsets, state) VALUES (?, ?, ?, ?, ?, ?, ?)",
+ id,
+ translation.language,
+ translation.translation,
+ placeholder_offsets,
+ translation.translation,
+ placeholder_offsets,
+ u8::from(api_model::TranslationState::Unchanged))
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+ } else {
+ let existing = sqlx::query!("SELECT language, base_translation, base_placeholder_offsets, head_translation, head_placeholder_offsets, state FROM translation_strings WHERE localization_string=?", id)
+ .fetch(&mut *tx)
+ .try_fold(HashMap::new(), async move |mut ex, r| {
+ ex.insert(r.language,
+ TranslationString {
+ base_translation: r.base_translation,
+ base_placeholder_offsets: r.base_placeholder_offsets,
+ head_translation: r.head_translation,
+ head_placeholder_offsets: r.head_placeholder_offsets,
+ state: api_model::TranslationState::try_from(r.state).unwrap_or(api_model::TranslationState::Unreviewed),
+ });
+ Ok(ex)
+ })
+ .await
+ .unwrap();
+
+ for translation in string.translations {
+ let placeholder_offsets = translation
+ .placeholder_offset
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect::<Vec<String>>()
+ .join(",");
+
+ if let Some(existing_translation) = existing.get(translation.language.as_str())
+ {
+ if existing_translation.head_translation != translation.translation
+ || existing_translation.head_placeholder_offsets != placeholder_offsets
+ {
+ // Reset state whenever translation changes
+ let new_state = if existing_translation
+ .base_translation
+ .as_ref()
+ .is_some_and(|x| *x == translation.translation)
+ && existing_translation
+ .base_placeholder_offsets
+ .as_ref()
+ .is_some_and(|x| *x == placeholder_offsets)
+ {
+ api_model::TranslationState::Unchanged
+ } else {
+ api_model::TranslationState::Unreviewed
+ };
+ sqlx::query!(
+ "UPDATE translation_strings SET head_translation=?, head_placeholder_offsets=?, state=? WHERE localization_string = ? AND language = ?",
+ translation.translation,
+ placeholder_offsets,
+ u8::from(new_state),
+ id,
+ translation.language)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+ } else {
+ sqlx::query!(
+ "INSERT INTO translation_strings (localization_string, language, head_translation, head_placeholder_offsets) VALUES (?, ?, ?, ?)",
+ id,
+ translation.language,
+ translation.translation,
+ placeholder_offsets)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+ }
+ }
+ }
+
+ tx.commit().await?;
+ }
+
+ Ok(())
+}