summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--server/.sqlx/query-1017223642074d4567738e03c40357a4a6c5dfadb77c21b99e26db113bb97339.json58
-rw-r--r--server/.sqlx/query-3d22e3c8189b094ff2c241d46e8b93bcdf2bdcc59feeec3e750e81fdb2aa122b.json25
-rw-r--r--server/.sqlx/query-9ba4679a5c8d66008a1ad6f79870365aa75d1832440f7d0e7a1b79f0f8827b9a.json124
-rw-r--r--server/.sqlx/query-c71522cbb790b5ba59d1a88c129b7b8c4fed68313bb8855702a38299d35d2bf9.json12
-rw-r--r--server/Cargo.lock7
-rw-r--r--server/Cargo.toml1
-rw-r--r--server/api/src/api_model.rs120
-rw-r--r--server/common/src/git.rs61
-rw-r--r--server/common/src/grit.rs98
-rw-r--r--server/migrations/1_initial_eyeballs.sql88
-rw-r--r--server/src/git_root.rs67
-rw-r--r--server/src/main.rs367
-rw-r--r--server/src/tests.rs44
-rw-r--r--server/src/trans.rs312
-rw-r--r--server/tests/common/mod.rs155
-rw-r--r--server/tests/integration_test.rs98
16 files changed, 1567 insertions, 70 deletions
diff --git a/server/.sqlx/query-1017223642074d4567738e03c40357a4a6c5dfadb77c21b99e26db113bb97339.json b/server/.sqlx/query-1017223642074d4567738e03c40357a4a6c5dfadb77c21b99e26db113bb97339.json
new file mode 100644
index 0000000..0c10e57
--- /dev/null
+++ b/server/.sqlx/query-1017223642074d4567738e03c40357a4a6c5dfadb77c21b99e26db113bb97339.json
@@ -0,0 +1,58 @@
+{
+ "db_name": "MySQL",
+ "query": "SELECT id,name,dn,translation_review_users.role AS role FROM users JOIN translation_review_users ON translation_review_users.user=id WHERE translation_review_users.translation_review=? ORDER BY role,id",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "id",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL | PRIMARY_KEY | NO_DEFAULT_VALUE",
+ "char_set": 224,
+ "max_size": 512
+ }
+ },
+ {
+ "ordinal": 1,
+ "name": "name",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL",
+ "char_set": 224,
+ "max_size": 4096
+ }
+ },
+ {
+ "ordinal": 2,
+ "name": "dn",
+ "type_info": {
+ "type": "VarString",
+ "flags": "",
+ "char_set": 224,
+ "max_size": 1024
+ }
+ },
+ {
+ "ordinal": 3,
+ "name": "role",
+ "type_info": {
+ "type": "Tiny",
+ "flags": "NOT_NULL | UNSIGNED",
+ "char_set": 63,
+ "max_size": 3
+ }
+ }
+ ],
+ "parameters": {
+ "Right": 1
+ },
+ "nullable": [
+ false,
+ false,
+ true,
+ false
+ ]
+ },
+ "hash": "1017223642074d4567738e03c40357a4a6c5dfadb77c21b99e26db113bb97339"
+}
diff --git a/server/.sqlx/query-3d22e3c8189b094ff2c241d46e8b93bcdf2bdcc59feeec3e750e81fdb2aa122b.json b/server/.sqlx/query-3d22e3c8189b094ff2c241d46e8b93bcdf2bdcc59feeec3e750e81fdb2aa122b.json
new file mode 100644
index 0000000..87dea51
--- /dev/null
+++ b/server/.sqlx/query-3d22e3c8189b094ff2c241d46e8b93bcdf2bdcc59feeec3e750e81fdb2aa122b.json
@@ -0,0 +1,25 @@
+{
+ "db_name": "MySQL",
+ "query": "SELECT main_branch FROM projects WHERE id=?",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "main_branch",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL | NO_DEFAULT_VALUE",
+ "char_set": 224,
+ "max_size": 4096
+ }
+ }
+ ],
+ "parameters": {
+ "Right": 1
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "3d22e3c8189b094ff2c241d46e8b93bcdf2bdcc59feeec3e750e81fdb2aa122b"
+}
diff --git a/server/.sqlx/query-9ba4679a5c8d66008a1ad6f79870365aa75d1832440f7d0e7a1b79f0f8827b9a.json b/server/.sqlx/query-9ba4679a5c8d66008a1ad6f79870365aa75d1832440f7d0e7a1b79f0f8827b9a.json
new file mode 100644
index 0000000..6f02f6b
--- /dev/null
+++ b/server/.sqlx/query-9ba4679a5c8d66008a1ad6f79870365aa75d1832440f7d0e7a1b79f0f8827b9a.json
@@ -0,0 +1,124 @@
+{
+ "db_name": "MySQL",
+ "query": "SELECT title,description,state,progress,archived,base,head,users.id AS user_id,users.name AS name,users.dn AS user_dn FROM translation_reviews JOIN users ON users.id=owner WHERE project=? AND translation_reviews.id=?",
+ "describe": {
+ "columns": [
+ {
+ "ordinal": 0,
+ "name": "title",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL | NO_DEFAULT_VALUE",
+ "char_set": 224,
+ "max_size": 4096
+ }
+ },
+ {
+ "ordinal": 1,
+ "name": "description",
+ "type_info": {
+ "type": "Blob",
+ "flags": "NOT_NULL | BLOB",
+ "char_set": 224,
+ "max_size": 67108860
+ }
+ },
+ {
+ "ordinal": 2,
+ "name": "state",
+ "type_info": {
+ "type": "Tiny",
+ "flags": "NOT_NULL | UNSIGNED",
+ "char_set": 63,
+ "max_size": 3
+ }
+ },
+ {
+ "ordinal": 3,
+ "name": "progress",
+ "type_info": {
+ "type": "Float",
+ "flags": "NOT_NULL",
+ "char_set": 63,
+ "max_size": 12
+ }
+ },
+ {
+ "ordinal": 4,
+ "name": "archived",
+ "type_info": {
+ "type": "Tiny",
+ "flags": "NOT_NULL",
+ "char_set": 63,
+ "max_size": 1
+ }
+ },
+ {
+ "ordinal": 5,
+ "name": "base",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL | NO_DEFAULT_VALUE",
+ "char_set": 224,
+ "max_size": 160
+ }
+ },
+ {
+ "ordinal": 6,
+ "name": "head",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL | NO_DEFAULT_VALUE",
+ "char_set": 224,
+ "max_size": 160
+ }
+ },
+ {
+ "ordinal": 7,
+ "name": "user_id",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL | PRIMARY_KEY | NO_DEFAULT_VALUE",
+ "char_set": 224,
+ "max_size": 512
+ }
+ },
+ {
+ "ordinal": 8,
+ "name": "name",
+ "type_info": {
+ "type": "VarString",
+ "flags": "NOT_NULL",
+ "char_set": 224,
+ "max_size": 4096
+ }
+ },
+ {
+ "ordinal": 9,
+ "name": "user_dn",
+ "type_info": {
+ "type": "VarString",
+ "flags": "",
+ "char_set": 224,
+ "max_size": 1024
+ }
+ }
+ ],
+ "parameters": {
+ "Right": 2
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true
+ ]
+ },
+ "hash": "9ba4679a5c8d66008a1ad6f79870365aa75d1832440f7d0e7a1b79f0f8827b9a"
+}
diff --git a/server/.sqlx/query-c71522cbb790b5ba59d1a88c129b7b8c4fed68313bb8855702a38299d35d2bf9.json b/server/.sqlx/query-c71522cbb790b5ba59d1a88c129b7b8c4fed68313bb8855702a38299d35d2bf9.json
new file mode 100644
index 0000000..f5932db
--- /dev/null
+++ b/server/.sqlx/query-c71522cbb790b5ba59d1a88c129b7b8c4fed68313bb8855702a38299d35d2bf9.json
@@ -0,0 +1,12 @@
+{
+ "db_name": "MySQL",
+ "query": "INSERT INTO translation_reviews (project, owner, title, description, base, head) VALUES (?, ?, ?, ?, ?, ?)",
+ "describe": {
+ "columns": [],
+ "parameters": {
+ "Right": 6
+ },
+ "nullable": []
+ },
+ "hash": "c71522cbb790b5ba59d1a88c129b7b8c4fed68313bb8855702a38299d35d2bf9"
+}
diff --git a/server/Cargo.lock b/server/Cargo.lock
index 5f952fd..4ced6c7 100644
--- a/server/Cargo.lock
+++ b/server/Cargo.lock
@@ -595,6 +595,7 @@ dependencies = [
"rocket",
"rocket_db_pools",
"serde",
+ "sorted-insert",
"sqlx",
"stdext",
"test-context",
@@ -2499,6 +2500,12 @@ dependencies = [
]
[[package]]
+name = "sorted-insert"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eec75fe132d95908f1c030f93630bc20b76f3ebaeca789a6180553b770ddcd39"
+
+[[package]]
name = "spin"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/server/Cargo.toml b/server/Cargo.toml
index 894009b..01ae7d5 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -32,6 +32,7 @@ rmp-serde.workspace = true
rocket = { version = "0.5.1", features = ["json", "secrets"] }
rocket_db_pools = { version = "0.2.0", features = ["sqlx_mysql"] }
serde.workspace = true
+sorted-insert = "0.2.6"
sqlx = { version = "0.7.0", default-features = false, features = ["macros", "migrate"] }
time = "0.3.34"
tokio = { workspace = true, features = ["process"] }
diff --git a/server/api/src/api_model.rs b/server/api/src/api_model.rs
index 7dd2a20..1c0572f 100644
--- a/server/api/src/api_model.rs
+++ b/server/api/src/api_model.rs
@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
-#[derive(Copy, Clone, Deserialize, Serialize, ToSchema)]
+#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Serialize, ToSchema)]
pub enum ReviewState {
Draft,
Open,
@@ -73,6 +73,42 @@ impl From<UserReviewRole> for u8 {
}
}
+#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Serialize, ToSchema)]
+pub enum TranslationState {
+ Unreviewed,
+ Unchanged,
+ Approved,
+ Revert,
+ Fix,
+}
+
+impl TryFrom<u8> for TranslationState {
+ type Error = &'static str;
+
+ fn try_from(value: u8) -> Result<Self, Self::Error> {
+ match value {
+ 0 => Ok(TranslationState::Unreviewed),
+ 1 => Ok(TranslationState::Unchanged),
+ 2 => Ok(TranslationState::Approved),
+ 3 => Ok(TranslationState::Revert),
+ 4 => Ok(TranslationState::Fix),
+ _ => Err("Invalid translation state"),
+ }
+ }
+}
+
+impl From<TranslationState> for u8 {
+ fn from(value: TranslationState) -> u8 {
+ match value {
+ TranslationState::Unreviewed => 0,
+ TranslationState::Unchanged => 1,
+ TranslationState::Approved => 2,
+ TranslationState::Revert => 3,
+ TranslationState::Fix => 4,
+ }
+ }
+}
+
#[derive(Debug, Deserialize, Serialize, PartialEq, ToSchema)]
pub struct User {
#[schema(example = "jsmith")]
@@ -96,7 +132,7 @@ pub struct Users {
pub users: Vec<User>,
}
-#[derive(Serialize, ToSchema)]
+#[derive(Debug, Deserialize, PartialEq, Serialize, ToSchema)]
pub struct ReviewUserEntry {
pub user: User,
#[schema(example = UserReviewRole::Reviewer)]
@@ -280,8 +316,6 @@ pub struct LocalizationString {
pub source: String,
pub placeholders: Vec<LocalizationPlaceholder>,
pub placeholder_offset: Vec<usize>,
- #[schema(example = "123456")]
- pub translation_id: i64,
pub translations: Vec<TranslationString>,
}
@@ -302,4 +336,82 @@ pub struct TranslationString {
#[schema(example = "Hej!")]
pub translation: String,
pub placeholder_offset: Vec<usize>,
+ pub state: TranslationState,
+ pub comment: String,
+ pub reviewer: Option<User>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Serialize, ToSchema)]
+pub struct TranslationReview {
+ #[schema(example = 1u64)]
+ pub id: u64,
+ #[schema(example = "FAKE-512: Update translations")]
+ pub title: String,
+ #[schema(example = "New translations")]
+ pub description: String,
+ pub owner: User,
+ pub users: Vec<ReviewUserEntry>,
+ #[schema(example = ReviewState::Open)]
+ pub state: ReviewState,
+ #[schema(example = 37.5)]
+ pub progress: f32,
+ #[schema(example = false)]
+ pub archived: bool,
+ #[schema(example = "d7c502b9c6b833060576a0c4da0287933d603011")]
+ pub base: String,
+ #[schema(example = "2cecdec660a30bf3964cee645d9cee03640ef8dc")]
+ pub head: String,
+}
+
+#[derive(Deserialize, Serialize, ToSchema)]
+pub struct TranslationReviewData<'r> {
+ #[schema(example = "FAKE-512: Update translations")]
+ pub title: String,
+ #[schema(example = "New translations")]
+ pub description: String,
+ #[schema(example = "d7c502b9c6b833060576a0c4da0287933d603011")]
+ pub base: Option<&'r str>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Serialize, ToSchema)]
+pub struct TranslationReviewEntry {
+ #[schema(example = 1u64)]
+ pub id: u64,
+ #[schema(example = "FAKE-512: Update translations")]
+ pub title: String,
+ pub owner: User,
+ #[schema(example = ReviewState::Open)]
+ pub state: ReviewState,
+ #[schema(example = 37.5)]
+ pub progress: f32,
+ #[schema(example = "d7c502b9c6b833060576a0c4da0287933d603011")]
+ pub base: String,
+ #[schema(example = "2cecdec660a30bf3964cee645d9cee03640ef8dc")]
+ pub head: String,
+}
+
+#[derive(Deserialize, Serialize, ToSchema)]
+pub struct TranslationReviews {
+ #[schema(example = 0u32)]
+ pub offset: u32,
+ #[schema(example = 10u32)]
+ pub limit: u32,
+ #[schema(example = 42u32)]
+ pub total_count: u32,
+ #[schema(example = true)]
+ pub more: bool,
+ pub reviews: Vec<TranslationReviewEntry>,
+}
+
+#[derive(Deserialize, Serialize, ToSchema)]
+pub struct LocalizationStrings {
+ #[schema(example = 0u32)]
+ pub offset: u32,
+ #[schema(example = 10u32)]
+ pub limit: u32,
+ #[schema(example = 42u32)]
+ pub total_count: u32,
+ #[schema(example = true)]
+ pub more: bool,
+ pub strings: Vec<LocalizationString>,
}
diff --git a/server/common/src/git.rs b/server/common/src/git.rs
index 8fe7863..e396d8a 100644
--- a/server/common/src/git.rs
+++ b/server/common/src/git.rs
@@ -4,6 +4,7 @@ use futures::future::TryFutureExt;
use pathdiff::diff_paths;
use std::collections::HashMap;
use std::fmt;
+use std::io::{self, Cursor, Read};
use std::path::{Path, PathBuf};
use std::process::Stdio;
use tokio::fs;
@@ -78,6 +79,24 @@ pub struct TreeEntry {
pub path: String,
}
+pub struct GitFile {
+ cursor: Cursor<Vec<u8>>,
+}
+
+impl GitFile {
+ pub fn new(data: Vec<u8>) -> Self {
+ GitFile {
+ cursor: Cursor::new(data),
+ }
+ }
+}
+
+impl Read for GitFile {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ self.cursor.read(buf)
+ }
+}
+
fn io_err(action: &str, e: std::io::Error) -> Error {
Error::new(format!("{action}: {e}"))
}
@@ -161,6 +180,8 @@ impl RepoData {
cmd.arg("--porcelain");
// This option disables this automatic tag following.
cmd.arg("--no-tags");
+ // Write out refs even if they didn't change
+ cmd.arg("--verbose");
cmd.arg("origin");
// <+ force update><remote branch>:<local branch>
cmd.arg(format!("+{branch}:{branch}"));
@@ -430,6 +451,23 @@ impl RepoData {
self.output(&mut cmd).map_ok(parse_tree_entries).await
}
+ async fn cat_file(
+ &self,
+ repo: &Repository,
+ object_type: ObjectType,
+ object_name: &str,
+ ) -> Result<GitFile, Error> {
+ let mut cmd = self.git_cmd(repo);
+ cmd.arg("cat-file")
+ .arg(match object_type {
+ ObjectType::BLOB => "blob",
+ ObjectType::COMMIT => "commit",
+ ObjectType::TREE => "tree",
+ })
+ .arg(object_name);
+ self.raw_output(&mut cmd).map_ok(GitFile::new).await
+ }
+
async fn get_log_format(
&self,
repo: &Repository,
@@ -516,6 +554,14 @@ impl RepoData {
}
async fn output(&self, cmd: &mut Command) -> Result<String, Error> {
+ match self.raw_output(cmd).await {
+ Ok(bytes) => String::from_utf8(bytes)
+ .map_err(|e| Error::new(format!("git command had invalid output: {e}"))),
+ Err(e) => Err(e),
+ }
+ }
+
+ async fn raw_output(&self, cmd: &mut Command) -> Result<Vec<u8>, Error> {
cmd.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
@@ -530,9 +576,7 @@ impl RepoData {
.await?;
if output.status.success() {
- let output_utf8 = String::from_utf8(output.stdout)
- .map_err(|e| Error::new(format!("git command had invalid output: {e}")))?;
- Ok(output_utf8)
+ Ok(output.stdout)
} else {
Err(Error::new(format!(
"git command failed with exitcode: {}\n{:?}\n{}",
@@ -693,4 +737,15 @@ impl Repository {
data.ls_tree(self, commit.as_str(), recursive).await
}
+
+ pub async fn cat_file(
+ &self,
+ object_type: ObjectType,
+ object_name: impl Into<String>,
+ ) -> Result<GitFile, Error> {
+ let object_name = object_name.into();
+ let data = self.lock.read().await;
+
+ data.cat_file(self, object_type, object_name.as_str()).await
+ }
}
diff --git a/server/common/src/grit.rs b/server/common/src/grit.rs
index ee96500..9d01dac 100644
--- a/server/common/src/grit.rs
+++ b/server/common/src/grit.rs
@@ -4,7 +4,7 @@ use anyhow::Error;
use std::collections::VecDeque;
use std::fs;
use std::io::{BufReader, Read};
-use std::path::Path;
+use std::path::{Path, PathBuf};
use tokio::task::spawn_blocking;
use xml::attribute::OwnedAttribute;
use xml::reader::{EventReader, ParserConfig, XmlEvent};
@@ -1018,9 +1018,16 @@ fn parse_grit_part_element<R: Read>(
pub async fn parse_grit(path: impl AsRef<Path>) -> anyhow::Result<Grit> {
let path = path.as_ref().to_path_buf();
+ parse_grit_with_opener(move || Ok(BufReader::new(fs::File::open(path)?))).await
+}
+
+pub async fn parse_grit_with_opener<F, R>(opener: F) -> anyhow::Result<Grit>
+where
+ F: FnOnce() -> anyhow::Result<BufReader<R>> + Send + 'static,
+ R: Read,
+{
spawn_blocking(move || {
- let file = fs::File::open(path)?;
- let reader = BufReader::new(file);
+ let reader = opener()?;
let mut ereader = ParserConfig::new()
.ignore_comments(true)
.whitespace_to_characters(true)
@@ -1064,9 +1071,16 @@ pub async fn parse_grit(path: impl AsRef<Path>) -> anyhow::Result<Grit> {
pub async fn parse_grit_part(path: impl AsRef<Path>) -> anyhow::Result<GritPart> {
let path = path.as_ref().to_path_buf();
+ parse_grit_part_with_opener(|| Ok(BufReader::new(fs::File::open(path)?))).await
+}
+
+pub async fn parse_grit_part_with_opener<F, R>(opener: F) -> anyhow::Result<GritPart>
+where
+ F: FnOnce() -> anyhow::Result<BufReader<R>> + Send + 'static,
+ R: Read,
+{
spawn_blocking(move || {
- let file = fs::File::open(path)?;
- let reader = BufReader::new(file);
+ let reader = opener()?;
let mut ereader = ParserConfig::new()
.ignore_comments(true)
.whitespace_to_characters(true)
@@ -1121,20 +1135,20 @@ fn if_message_to_if_message_part(messages: Vec<IfMessage>) -> Vec<IfMessagePart>
.collect()
}
-async fn maybe_expand_message(message: &mut IfMessagePart, basepath: &Path) -> anyhow::Result<()> {
+async fn maybe_expand_message<F, R>(message: &mut IfMessagePart, opener: &F) -> anyhow::Result<()>
+where
+ F: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + 'static,
+ R: Read,
+{
match message {
IfMessagePart::Message(_) => Ok(()),
IfMessagePart::Part {
file,
ref mut messages,
} => {
- let file_path = Path::new(file.as_str());
- let part_path = if let Some(parent) = basepath.parent() {
- parent.join(file_path)
- } else {
- file_path.to_path_buf()
- };
- let grit_part = parse_grit_part(part_path).await?;
+ let file = file.to_string();
+ let opener = opener.clone();
+ let grit_part = parse_grit_part_with_opener(move || opener(file.as_str())).await?;
*messages = if_message_to_if_message_part(grit_part.messages);
Ok(())
}
@@ -1142,23 +1156,60 @@ async fn maybe_expand_message(message: &mut IfMessagePart, basepath: &Path) -> a
expr: _,
ref mut message,
} => {
- Box::pin(expand_messages(message, basepath)).await?;
+ Box::pin(expand_messages(message, opener)).await?;
Ok(())
}
}
}
-async fn expand_messages(messages: &mut Vec<IfMessagePart>, basepath: &Path) -> anyhow::Result<()> {
+async fn expand_messages<F, R>(messages: &mut Vec<IfMessagePart>, opener: &F) -> anyhow::Result<()>
+where
+ F: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + 'static,
+ R: Read,
+{
for message in messages {
- maybe_expand_message(message, basepath).await?;
+ maybe_expand_message(message, opener).await?;
}
Ok(())
}
pub async fn parse_grit_with_parts(path: impl AsRef<Path>) -> anyhow::Result<Grit> {
let path = path.as_ref();
- let mut grit = parse_grit(path).await?;
- expand_messages(&mut grit.release.messages.messages, path).await?;
+ if let Some(basepath) = path.parent() {
+ let basepath = basepath.to_path_buf();
+ parse_grit_with_parts_and_resolver(path, move |x| basepath.join(x)).await
+ } else {
+ parse_grit_with_parts_and_resolver(path, |x| PathBuf::from(x)).await
+ }
+}
+
+pub async fn parse_grit_with_parts_and_resolver<F>(
+ path: impl AsRef<Path>,
+ path_resolver: F,
+) -> anyhow::Result<Grit>
+where
+ F: Fn(&str) -> PathBuf + Send + Clone + 'static,
+{
+ let path = path.as_ref().to_path_buf();
+ let grit_opener = || Ok(BufReader::new(fs::File::open(path)?));
+ let part_opener = move |x: &str| {
+ let part_path = path_resolver(x);
+ Ok(BufReader::new(fs::File::open(part_path)?))
+ };
+ parse_grit_with_parts_and_opener(grit_opener, part_opener).await
+}
+
+pub async fn parse_grit_with_parts_and_opener<F, G, R>(
+ grit_opener: F,
+ part_opener: G,
+) -> anyhow::Result<Grit>
+where
+ F: FnOnce() -> anyhow::Result<BufReader<R>> + Send + 'static,
+ G: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + 'static,
+ R: Read,
+{
+ let mut grit = parse_grit_with_opener(grit_opener).await?;
+ expand_messages(&mut grit.release.messages.messages, &part_opener).await?;
Ok(grit)
}
@@ -1513,9 +1564,16 @@ fn parse_xliff_element<R: Read>(
pub async fn parse_xlf(path: impl AsRef<Path>) -> anyhow::Result<TranslationFile> {
let path = path.as_ref().to_path_buf();
+ parse_xlf_with_opener(|| Ok(BufReader::new(fs::File::open(path)?))).await
+}
+
+pub async fn parse_xlf_with_opener<F, R>(opener: F) -> anyhow::Result<TranslationFile>
+where
+ F: FnOnce() -> anyhow::Result<BufReader<R>> + Send + 'static,
+ R: Read,
+{
spawn_blocking(move || {
- let file = fs::File::open(path)?;
- let reader = BufReader::new(file);
+ let reader = opener()?;
let mut ereader = ParserConfig::new()
.ignore_comments(true)
.whitespace_to_characters(true)
diff --git a/server/migrations/1_initial_eyeballs.sql b/server/migrations/1_initial_eyeballs.sql
index a39202d..3762c83 100644
--- a/server/migrations/1_initial_eyeballs.sql
+++ b/server/migrations/1_initial_eyeballs.sql
@@ -83,3 +83,91 @@ CREATE TABLE IF NOT EXISTS user_keys (
ON DELETE CASCADE
ON UPDATE RESTRICT
);
+
+CREATE TABLE IF NOT EXISTS translation_reviews (
+ id BIGINT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ project VARCHAR(128) NOT NULL,
+ owner VARCHAR(128) NOT NULL,
+ title VARCHAR(1024) NOT NULL,
+ description MEDIUMTEXT NOT NULL DEFAULT '',
+ state TINYINT UNSIGNED NOT NULL DEFAULT 0,
+ progress FLOAT NOT NULL DEFAULT 0,
+ archived BOOLEAN NOT NULL DEFAULT 0,
+ base VARCHAR(40) NOT NULL,
+ head VARCHAR(40) NOT NULL,
+
+ CONSTRAINT `fk_translation_reviews_project`
+ FOREIGN KEY (project) REFERENCES projects (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT,
+
+ CONSTRAINT `fk_translation_reviews_owner`
+ FOREIGN KEY (owner) REFERENCES users (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT
+);
+
+CREATE TABLE IF NOT EXISTS translation_review_users (
+ translation_review BIGINT UNSIGNED NOT NULL,
+ user VARCHAR(128) NOT NULL,
+ role TINYINT UNSIGNED NOT NULL DEFAULT 0,
+ PRIMARY KEY (translation_review, user),
+
+ CONSTRAINT `fk_translation_review_users_translation_review`
+ FOREIGN KEY (translation_review) REFERENCES translation_reviews (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT,
+
+ CONSTRAINT `fk_translation_review_users_user`
+ FOREIGN KEY (user) REFERENCES users (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT
+);
+
+CREATE TABLE IF NOT EXISTS localization_strings (
+ id BIGINT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ translation_review BIGINT UNSIGNED NOT NULL,
+ name VARCHAR(512) NOT NULL,
+ file VARCHAR(512) NOT NULL,
+ description MEDIUMTEXT NOT NULL DEFAULT '',
+ meaning VARCHAR(512) NOT NULL DEFAULT '',
+ source MEDIUMTEXT NOT NULL,
+ placeholder_offsets VARCHAR(512) NOT NULL DEFAULT '',
+
+ CONSTRAINT `fk_localization_strings_translation_review`
+ FOREIGN KEY (translation_review) REFERENCES translation_reviews (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT
+);
+
+CREATE TABLE IF NOT EXISTS localization_placeholders (
+ id BIGINT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
+ localization_string BIGINT UNSIGNED NOT NULL,
+ name VARCHAR(128) NOT NULL,
+ content VARCHAR(256) NOT NULL,
+ example VARCHAR(256) NOT NULL DEFAULT '',
+
+ CONSTRAINT `fk_localization_placeholder_localization_string`
+ FOREIGN KEY (localization_string) REFERENCES localization_strings (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT
+);
+
+CREATE TABLE IF NOT EXISTS translation_strings (
+ localization_string BIGINT UNSIGNED NOT NULL,
+ language VARCHAR(10) NOT NULL,
+ base_translation MEDIUMTEXT,
+ base_placeholder_offsets VARCHAR(512),
+ head_translation MEDIUMTEXT NOT NULL,
+ head_placeholder_offsets VARCHAR(512) NOT NULL DEFAULT '',
+ state TINYINT UNSIGNED NOT NULL DEFAULT 0,
+ comment MEDIUMTEXT NOT NULL DEFAULT '',
+ reviewer VARCHAR(128),
+
+ CONSTRAINT `fk_translation_string_localization_string`
+ FOREIGN KEY (localization_string) REFERENCES localization_strings (id)
+ ON DELETE CASCADE
+ ON UPDATE RESTRICT,
+
+ PRIMARY KEY (localization_string, language)
+);
diff --git a/server/src/git_root.rs b/server/src/git_root.rs
index 71ad96f..f68ed92 100644
--- a/server/src/git_root.rs
+++ b/server/src/git_root.rs
@@ -9,6 +9,7 @@ use rocket_db_pools::{sqlx, Database, Pool};
use std::borrow::Cow;
use std::collections::HashMap;
use std::fs::Permissions;
+use std::io::BufReader;
use std::ops::Deref;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
@@ -22,6 +23,7 @@ use crate::api_model;
use crate::fs_utils;
use crate::git;
use crate::git_socket;
+use crate::trans;
use crate::Db;
type DbPool = <Db as Database>::Pool;
@@ -70,6 +72,71 @@ impl Roots {
Ok(())
}
+ pub async fn new_translation_review(
+ &self,
+ db: &Db,
+ project_id: &str,
+ translation_reviewid: u64,
+ base: &str,
+ ) -> Result<(), anyhow::Error> {
+ let repo;
+ {
+ let data = self.data.lock().unwrap();
+ if let Some(tmp_repo) = data.project_repo.get(project_id) {
+ repo = tmp_repo.clone();
+ } else {
+ return Err(anyhow::Error::msg("No such repo"));
+ }
+ }
+
+ let mut entries = repo
+ .ls_tree(base, true)
+ .await
+ .map_err(|e| anyhow::Error::new(e))?;
+ entries.retain(|e| e.object_type == git::ObjectType::BLOB);
+ let grits = entries
+ .iter()
+ .filter(|x| x.path.ends_with(".grd"))
+ .map(|x| x.path.to_string())
+ .collect::<Vec<String>>();
+ let entries = Arc::new(entries);
+ let strings = trans::collect_strings_with_opener(grits, move |path| {
+ for entry in &*entries {
+ if entry.path == path {
+ let rt = tokio::runtime::Handle::current();
+ let object_name = entry.object_name.clone();
+ let repo = repo.clone();
+ return rt.block_on(async move {
+ repo.cat_file(git::ObjectType::BLOB, object_name)
+ .await
+ .map(|x| BufReader::new(x))
+ .map_err(|e| anyhow::Error::new(e))
+ });
+ }
+ }
+ Err(anyhow::Error::msg(format!("No such file: {path}")))
+ })
+ .await?;
+
+ trans::review_add_strings(db, translation_reviewid, strings, true).await?;
+
+ Ok(())
+ }
+
+ pub async fn fetch_branch(&self, project_id: &str, branch: &str) -> anyhow::Result<String> {
+ let repo;
+ {
+ let data = self.data.lock().unwrap();
+ if let Some(tmp_repo) = data.project_repo.get(project_id) {
+ repo = tmp_repo.clone();
+ } else {
+ return Err(anyhow::Error::msg("No such repo"));
+ }
+ }
+
+ repo.fetch(branch).await.map_err(|e| anyhow::Error::new(e))
+ }
+
pub async fn del_branch(&self, project_id: &str, branch: &str) -> Result<(), git::Error> {
let repo;
{
diff --git a/server/src/main.rs b/server/src/main.rs
index 66faec3..8c59b23 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -8,7 +8,7 @@ use rocket::http::Status;
use rocket::response::status::{Custom, NotFound};
use rocket::serde::json::Json;
use rocket::{futures, Build, Rocket, State};
-use rocket_db_pools::{sqlx, Connection, Database, Pool};
+use rocket_db_pools::{sqlx, Connection, Database};
use sqlx::Acquire;
use std::path::PathBuf;
use utoipa::OpenApi;
@@ -55,6 +55,9 @@ struct Db(sqlx::MySqlPool);
user_key_get,
user_key_del,
user_keys,
+ translation_review_new,
+ translation_review_strings,
+ translation_reviews,
),
modifiers(&AuthApiAddon),
)]
@@ -122,7 +125,7 @@ fn abbrivate_key(key: &str) -> String {
}
async fn get_project(
- db: &mut <<Db as Database>::Pool as Pool>::Connection,
+ mut db: Connection<Db>,
projectid: &str,
) -> Result<Json<api_model::Project>, NotFound<&'static str>> {
let users = sqlx::query!(
@@ -173,12 +176,11 @@ async fn get_project(
)]
#[get("/project/<projectid>")]
async fn project(
- db: &Db,
+ db: Connection<Db>,
_session: auth::Session,
projectid: &str,
) -> Result<Json<api_model::Project>, NotFound<&'static str>> {
- let mut conn = db.get().await.unwrap();
- get_project(&mut conn, projectid).await
+ get_project(db, projectid).await
}
// Remove linebreaks and potential openssh wrapper
@@ -206,6 +208,7 @@ fn cleanup_key(key: &str) -> String {
#[post("/project/<projectid>/new", data = "<data>")]
async fn project_new(
db: &Db,
+ conn: Connection<Db>,
git_roots_config: &State<git_root::Config<'_>>,
roots_state: &State<git_root::Roots>,
session: auth::Session,
@@ -219,9 +222,8 @@ async fn project_new(
};
let main_branch = data.main_branch.unwrap_or("main");
- let mut conn = db.get().await.unwrap();
{
- let mut tx = conn.begin().await.unwrap();
+ let mut tx = db.begin().await.unwrap();
sqlx::query!(
"INSERT INTO projects (id, title, description, remote, remote_key, main_branch) VALUES (?, ?, ?, ?, ?, ?)",
@@ -258,7 +260,7 @@ async fn project_new(
.map_err(|e| Custom(Status::InternalServerError, format!("{e}")))
.await?;
- Ok(get_project(&mut conn, projectid).await.unwrap())
+ Ok(get_project(conn, projectid).await.unwrap())
}
async fn project_check_maintainer(
@@ -536,15 +538,14 @@ async fn reviews(
}))
}
-async fn get_review_users(
- mut db: Connection<Db>,
+async fn get_project_default_review_users<'a>(
+ db: &mut Connection<Db>,
projectid: &str,
- reviewid: u64,
) -> Vec<api_model::ReviewUserEntry> {
- let mut users = sqlx::query!(
+ sqlx::query!(
"SELECT id,name,dn,project_users.default_role AS role FROM users JOIN project_users ON project_users.user=id WHERE project_users.project=? ORDER BY role,id",
projectid)
- .fetch(&mut **db)
+ .fetch(&mut ***db)
.map_ok(|r| api_model::ReviewUserEntry {
user: api_model::User {
id: r.id,
@@ -555,7 +556,15 @@ async fn get_review_users(
})
.try_collect::<Vec<_>>()
.await
- .unwrap();
+ .unwrap()
+}
+
+async fn get_review_users(
+ mut db: Connection<Db>,
+ projectid: &str,
+ reviewid: u64,
+) -> Vec<api_model::ReviewUserEntry> {
+ let mut users = get_project_default_review_users(&mut db, projectid).await;
let override_users = sqlx::query!(
"SELECT id,name,dn,review_users.role AS role FROM users JOIN review_users ON review_users.user=id WHERE review_users.review=? ORDER BY role,id",
@@ -1116,6 +1125,333 @@ fn healthcheck() -> &'static str {
""
}
+async fn get_translation_review(
+ mut db: Connection<Db>,
+ projectid: &str,
+ translation_reviewid: u64,
+) -> Result<Json<api_model::TranslationReview>, NotFound<&'static str>> {
+ let mut translation_review = sqlx::query!(
+ "SELECT title,description,state,progress,archived,base,head,users.id AS user_id,users.name AS name,users.dn AS user_dn FROM translation_reviews JOIN users ON users.id=owner WHERE project=? AND translation_reviews.id=?",
+ projectid, translation_reviewid)
+ .fetch_one(&mut **db)
+ .map_ok(|r| {
+ api_model::TranslationReview {
+ id: translation_reviewid,
+ title: r.title,
+ description: r.description,
+ owner: api_model::User {
+ id: r.user_id,
+ name: r.name,
+ active: r.user_dn.is_some(),
+ },
+ users: Vec::new(),
+ state: api_model::ReviewState::try_from(r.state).unwrap(),
+ progress: r.progress,
+ archived: r.archived != 0,
+ base: r.base,
+ head: r.head,
+ }
+ })
+ .map_err(|_| NotFound("No such review"))
+ .await?;
+
+ translation_review.users =
+ get_translation_review_users(db, projectid, translation_reviewid).await;
+
+ Ok(Json(translation_review))
+}
+
+async fn get_translation_review_users(
+ mut db: Connection<Db>,
+ projectid: &str,
+ translation_reviewid: u64,
+) -> Vec<api_model::ReviewUserEntry> {
+ let mut users = get_project_default_review_users(&mut db, projectid).await;
+
+ let override_users = sqlx::query!(
+ "SELECT id,name,dn,translation_review_users.role AS role FROM users JOIN translation_review_users ON translation_review_users.user=id WHERE translation_review_users.translation_review=? ORDER BY role,id",
+ translation_reviewid)
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::ReviewUserEntry {
+ user: api_model::User {
+ id: r.id,
+ name: r.name,
+ active: r.dn.is_some(),
+ },
+ role: api_model::UserReviewRole::try_from(r.role).unwrap(),
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+
+ for override_user in override_users {
+ if let Some(user) = users
+ .iter_mut()
+ .find(|ue| ue.user.id == override_user.user.id)
+ {
+ user.role = override_user.role;
+ } else {
+ users.push(override_user);
+ }
+ }
+
+ users
+}
+
+#[utoipa::path(
+ responses(
+ (status = 200, description = "Translation review created", body = api_model::TranslationReview),
+ ),
+ security(
+ ("session" = []),
+ ),
+)]
+#[post("/translation/<projectid>/new", data = "<data>")]
+async fn translation_review_new(
+ db: &Db,
+ mut conn: Connection<Db>,
+ roots_state: &State<git_root::Roots>,
+ session: auth::Session,
+ projectid: &str,
+ data: Json<api_model::TranslationReviewData<'_>>,
+) -> Result<Json<api_model::TranslationReview>, Custom<String>> {
+ let title = if data.title == "" {
+ "Unnamed"
+ } else {
+ data.title.as_str()
+ };
+ let mut base = data.base.unwrap_or("").to_string();
+ let translation_reviewid: u64;
+
+ {
+ let mut tx = conn.begin().await.unwrap();
+
+ if base == "" {
+ let main_branch =
+ sqlx::query!("SELECT main_branch FROM projects WHERE id=?", projectid)
+ .fetch_one(&mut *tx)
+ .map_ok(|r| r.main_branch)
+ .map_err(|_| Custom(Status::NotFound, "No such project".to_string()))
+ .await?;
+
+ base = roots_state
+ .fetch_branch(projectid, main_branch.as_str())
+ .map_err(|_| Custom(Status::InternalServerError, "git error".to_string()))
+ .await?;
+ }
+
+ let r = sqlx::query!(
+ "INSERT INTO translation_reviews (project, owner, title, description, base, head) VALUES (?, ?, ?, ?, ?, ?)",
+ projectid,
+ session.user_id,
+ title,
+ data.description,
+ base,
+ base,
+ )
+ .execute(&mut *tx)
+ .map_err(|e| Custom(Status::InternalServerError, format!("Database error: {e:?}")))
+ .await?;
+
+ translation_reviewid = r.last_insert_id();
+
+ tx.commit().await.unwrap();
+ }
+
+ roots_state
+ .new_translation_review(db, projectid, translation_reviewid, base.as_str())
+ .map_err(|e| Custom(Status::InternalServerError, format!("{e}")))
+ .await?;
+
+ Ok(
+ get_translation_review(conn, projectid, translation_reviewid)
+ .await
+ .unwrap(),
+ )
+}
+
+#[utoipa::path(
+ responses(
+ (status = 200, description = "Get all translation reviews for project", body = api_model::TranslationReviews),
+ (status = 404, description = "No such project"),
+ ),
+ security(
+ ("session" = []),
+ ),
+)]
+#[get("/project/<projectid>/translations?<limit>&<offset>")]
+async fn translation_reviews(
+ mut db: Connection<Db>,
+ _session: auth::Session,
+ projectid: &str,
+ limit: Option<u32>,
+ offset: Option<u32>,
+) -> Result<Json<api_model::TranslationReviews>, NotFound<&'static str>> {
+ let uw_offset = offset.unwrap_or(0);
+ let uw_limit = limit.unwrap_or(10);
+ let entries = sqlx::query!(
+ "SELECT translation_reviews.id AS id,title,state,progress,base,head,users.id AS user_id,users.name AS name,users.dn AS user_dn FROM translation_reviews JOIN users ON users.id=owner WHERE project=? ORDER BY id DESC LIMIT ? OFFSET ?",
+ projectid, uw_limit, uw_offset)
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::TranslationReviewEntry {
+ id: r.id,
+ title: r.title,
+ owner: api_model::User {
+ id: r.user_id,
+ name: r.name,
+ active: r.user_dn.is_some(),
+ },
+ state: api_model::ReviewState::try_from(r.state).unwrap(),
+ progress: r.progress,
+ base: r.base,
+ head: r.head,
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+
+ let count = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM translation_reviews WHERE project=?",
+ projectid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+
+ if count == 0 {
+ let projects = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM projects WHERE id=?",
+ projectid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+ if projects == 0 {
+ return Err(NotFound("No such project"));
+ }
+ }
+
+ let u32_count = u32::try_from(count).unwrap();
+
+ Ok(Json(api_model::TranslationReviews {
+ offset: uw_offset,
+ limit: uw_limit,
+ total_count: u32_count,
+ more: uw_offset + uw_limit < u32_count,
+ reviews: entries,
+ }))
+}
+
+#[utoipa::path(
+ responses(
+ (status = 200, description = "Get all strings for a translation review", body = api_model::LocalizationStrings),
+ (status = 404, description = "No such translation review"),
+ ),
+ security(
+ ("session" = []),
+ ),
+)]
+#[get("/translation/<translation_reviewid>/strings?<limit>&<offset>")]
+async fn translation_review_strings(
+ mut db: Connection<Db>,
+ _session: auth::Session,
+ translation_reviewid: u64,
+ limit: Option<u32>,
+ offset: Option<u32>,
+) -> Result<Json<api_model::LocalizationStrings>, NotFound<&'static str>> {
+ let uw_offset = offset.unwrap_or(0);
+ let uw_limit = limit.unwrap_or(10);
+ let (ids, mut entries) = sqlx::query!(
+ "SELECT id,name,file,description,meaning,source,placeholder_offsets FROM localization_strings WHERE translation_review=? ORDER BY id ASC LIMIT ? OFFSET ?",
+ translation_reviewid, uw_limit, uw_offset)
+ .fetch(&mut **db)
+ .try_fold((Vec::new(), Vec::new()), async move |mut vecs, r| {
+ vecs.0.push(r.id);
+ vecs.1.push(api_model::LocalizationString {
+ id: r.name,
+ file: r.file,
+ description: r.description,
+ meaning: r.meaning,
+ source: r.source,
+ placeholders: Vec::new(),
+ placeholder_offset: r.placeholder_offsets.split_terminator(',').map(|x| x.parse::<usize>().unwrap()).collect(),
+ translations: Vec::new(),
+ });
+ Ok(vecs)
+ })
+ .await
+ .unwrap();
+
+ for (i, entry) in entries.iter_mut().enumerate() {
+ if !entry.placeholder_offset.is_empty() {
+ entry.placeholders = sqlx::query!(
+ "SELECT name,content,example FROM localization_placeholders WHERE localization_string=? ORDER BY id ASC",
+ ids[i])
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::LocalizationPlaceholder {
+ id: r.name,
+ content: r.content,
+ example: r.example,
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+ }
+
+ entry.translations = sqlx::query!(
+ "SELECT language,head_translation,head_placeholder_offsets,state,comment FROM translation_strings WHERE localization_string=? ORDER BY language ASC",
+ ids[i])
+ .fetch(&mut **db)
+ .map_ok(|r| api_model::TranslationString {
+ language: r.language,
+ translation: r.head_translation,
+ placeholder_offset: r.head_placeholder_offsets.split_terminator(',').map(|x| x.parse::<usize>().unwrap()).collect(),
+ state: api_model::TranslationState::try_from(r.state).unwrap(),
+ comment: r.comment,
+ // TODO
+ reviewer: None,
+ })
+ .try_collect::<Vec<_>>()
+ .await
+ .unwrap();
+ }
+
+ let count = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM localization_strings WHERE translation_review=?",
+ translation_reviewid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+
+ if count == 0 {
+ let reviews = sqlx::query!(
+ "SELECT COUNT(id) AS count FROM translation_reviews WHERE id=?",
+ translation_reviewid
+ )
+ .fetch_one(&mut **db)
+ .map_ok(|r| r.count)
+ .await
+ .unwrap();
+ if reviews == 0 {
+ return Err(NotFound("No such translation review"));
+ }
+ }
+
+ let u32_count = u32::try_from(count).unwrap();
+
+ Ok(Json(api_model::LocalizationStrings {
+ offset: uw_offset,
+ limit: uw_limit,
+ total_count: u32_count,
+ more: uw_offset + uw_limit < u32_count,
+ strings: entries,
+ }))
+}
+
async fn run_migrations(rocket: Rocket<Build>) -> fairing::Result {
match Db::fetch(&rocket) {
Some(db) => match sqlx::migrate!().run(&**db).await {
@@ -1158,6 +1494,9 @@ fn rocket_from_config(figment: Figment) -> Rocket<Build> {
user_key_get,
user_key_del,
user_keys,
+ translation_review_new,
+ translation_review_strings,
+ translation_reviews,
],
)
.attach(auth::stage(basepath))
diff --git a/server/src/tests.rs b/server/src/tests.rs
index c7c1d8a..44d56d5 100644
--- a/server/src/tests.rs
+++ b/server/src/tests.rs
@@ -995,7 +995,6 @@ async fn test_collect_strings() {
source: "Extra title".to_string(),
placeholders: vec![],
placeholder_offset: vec![],
- translation_id: 3567801202192813433,
translations: vec![],
},
api_model::LocalizationString {
@@ -1006,27 +1005,38 @@ async fn test_collect_strings() {
source: "Bookmarks".to_string(),
placeholders: vec![],
placeholder_offset: vec![],
- translation_id: 8820817407110198400,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "Bookmarks".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my".to_string(),
translation: "ဝက်ဘ်လိပ်စာ မှတ်ထားမှုများ".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my-zg".to_string(),
translation: "ဝက္ဘ္လိပ္စာ မွတ္ထားမွုမ်ား".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "sv".to_string(),
translation: "Bokmärken".to_string(),
placeholder_offset: vec![],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
@@ -1044,27 +1054,38 @@ async fn test_collect_strings() {
},
],
placeholder_offset: vec![11],
- translation_id: 8443102241046796905,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "Welcome to ".to_string(),
placeholder_offset: vec![11],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my".to_string(),
translation: " မှ ကြိုဆိုပါသည်".to_string(),
placeholder_offset: vec![0],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my-zg".to_string(),
translation: " မွ ႀကိဳဆိုပါသည္".to_string(),
placeholder_offset: vec![0],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "sv".to_string(),
translation: "Välkommen till ".to_string(),
placeholder_offset: vec![16],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
@@ -1097,27 +1118,38 @@ async fn test_collect_strings() {
},
],
placeholder_offset: vec![54, 70, 140, 157],
- translation_id: 2466140279568640908,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "By using this application you are agreeing to Opera's Terms of Service. Also, you can learn how Opera handles and protects your data in our Privacy Statement.".to_string(),
placeholder_offset: vec![54, 70, 140, 157],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my".to_string(),
translation: "ဤအပလီကေးရှင်းကို အသုံးပြုခြင်းဖြင့် သင်သည် Opera ၏ ဝန်ဆောင်မှုစည်းမျဉ်းများ ကို သဘောတူရာ ရောက်ပါသည်။ ထို့အပြင် ကျွန်ုပ်တို့၏ကိုယ်ရေးလုံခြုံမှု ထုတ်ပြန်ချက် ထဲတွင် သင့်ဒေတာများကို Opera ၏ ကိုင်တွယ်ပုံနှင့် ကာကွယ်ပုံတို့ကိုလည်း လေ့လာနိုင်သည်။".to_string(),
placeholder_offset: vec![133, 205, 342, 433],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "my-zg".to_string(),
translation: "ဤအပလီေကးရွင္းကို အသုံးျပဳျခင္းျဖင့္ သင္သည္ Opera ၏ ဝန္ေဆာင္မွုစည္းမ်ဥ္းမ်ား ကို သေဘာတူရာ ေရာက္ပါသည္။ ထို႔အျပင္ ကၽြန္ုပ္တို႔၏ကိုယ္ေရးလုံျခဳံမွု ထုတ္ျပန္ခ်က္ ထဲတြင္ သင့္ေဒတာမ်ားကို Opera ၏ ကိုင္တြယ္ပုံႏွင့္ ကာကြယ္ပုံတို႔ကိုလည္း ေလ့လာနိုင္သည္။".to_string(),
placeholder_offset: vec![133, 205, 342, 433],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
api_model::TranslationString {
language: "sv".to_string(),
translation: "I och med din användning av det här programmet samtycker du till Operas Licensvillkor. Du kan också läsa om hur Opera hanterar och skyddar dina data i vårt Sekretessmeddelande.".to_string(),
placeholder_offset: vec![74, 87, 161, 180],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
@@ -1150,12 +1182,14 @@ async fn test_collect_strings() {
},
],
placeholder_offset: vec![34, 65, 98, 132],
- translation_id: 7770247413830876286,
translations: vec![
api_model::TranslationString {
language: "en-gb".to_string(),
translation: "{BOOKMARKS, plural,\n one { folder deleted}\n few { folders deleted}\n many { folders deleted}\n other { folders deleted}}".to_string(),
placeholder_offset: vec![35, 67, 101, 136],
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
},
],
},
diff --git a/server/src/trans.rs b/server/src/trans.rs
index 6a18e27..c4e3b45 100644
--- a/server/src/trans.rs
+++ b/server/src/trans.rs
@@ -1,27 +1,46 @@
+#![allow(dead_code)]
+
use anyhow;
+use futures::stream::TryStreamExt;
+use rocket_db_pools::{sqlx, Database, Pool};
+use sorted_insert::SortedInsertByKey;
+use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
+use std::fs::File;
+use std::io::{BufReader, Read};
use std::iter::{repeat, IntoIterator};
-use std::path::{Path, PathBuf};
+use std::path::Path;
use tokio::task::JoinSet;
+use crate::Db;
use eyeballs_api::api_model;
use eyeballs_common::grit;
-fn schedule_translations(
+type DbPool = <Db as Database>::Pool;
+type DbConnection = <DbPool as Pool>::Connection;
+
+fn schedule_translations<F, R>(
tasks: &mut JoinSet<anyhow::Result<grit::TranslationFile>>,
known: &mut HashSet<String>,
- path: &Path,
+ opener: &F,
files: &Vec<grit::IfFile>,
-) {
+) where
+ F: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + 'static,
+ R: Read,
+{
for file in files {
match file {
grit::IfFile::File(file) => {
if known.insert(file.path.to_string()) {
- tasks.spawn(grit::parse_xlf(path.join(file.path.as_str())));
+ let path = file.path.to_string();
+ let opener = opener.clone();
+ tasks.spawn(async {
+ grit::parse_xlf_with_opener(move || opener(path.as_str())).await
+ });
}
}
grit::IfFile::If { expr: _, file } => {
- schedule_translations(tasks, known, path, file);
+ schedule_translations(tasks, known, opener, file);
}
}
}
@@ -29,6 +48,7 @@ fn schedule_translations(
fn push_strings(
strings: &mut Vec<api_model::LocalizationString>,
+ translation_ids: &mut Vec<i64>,
file: &String,
messages: Vec<grit::IfMessagePart>,
) {
@@ -71,15 +91,15 @@ fn push_strings(
source,
placeholders,
placeholder_offset,
- translation_id,
translations: Vec::<api_model::TranslationString>::new(),
});
+ translation_ids.push(translation_id);
}
grit::IfMessagePart::If { expr: _, message } => {
- push_strings(strings, file, message);
+ push_strings(strings, translation_ids, file, message);
}
grit::IfMessagePart::Part { file, messages } => {
- push_strings(strings, &file, messages);
+ push_strings(strings, translation_ids, &file, messages);
}
}
}
@@ -126,52 +146,87 @@ fn push_translation(
}
}
- string.translations.push(api_model::TranslationString {
- language: language.to_string(),
- translation,
- placeholder_offset,
- })
+ string.translations.sorted_insert_asc_by_key(
+ api_model::TranslationString {
+ language: language.to_string(),
+ translation,
+ placeholder_offset,
+ state: api_model::TranslationState::Unreviewed,
+ comment: "".to_string(),
+ reviewer: None,
+ },
+ |e| &e.language,
+ );
}
pub async fn collect_strings(
base: impl AsRef<Path>,
grits: impl IntoIterator<Item = String>,
) -> anyhow::Result<Vec<api_model::LocalizationString>> {
+ let base = base.as_ref().to_path_buf();
+ collect_strings_with_opener(grits, move |x| {
+ let path = base.join(x);
+ Ok(BufReader::new(File::open(path)?))
+ })
+ .await
+}
+
+pub async fn collect_strings_with_opener<F, R>(
+ grits: impl IntoIterator<Item = String>,
+ opener: F,
+) -> anyhow::Result<Vec<api_model::LocalizationString>>
+where
+ // TODO: Would like to avoid Sync here, it was possible in grit but not here
+ // for some reason.
+ F: Fn(&str) -> anyhow::Result<BufReader<R>> + Clone + Send + Sync + 'static,
+ R: Read,
+{
let mut grit_tasks = JoinSet::new();
for grit_name in grits {
- let grit_path = base.as_ref().join(grit_name.as_str());
+ let opener_copy = opener.clone();
+ let grit_name_copy = grit_name.clone();
+ let grit_opener = move || opener_copy(grit_name_copy.as_str());
+
+ let part_opener = opener.clone();
grit_tasks.spawn(async move {
- let tmp = grit::parse_grit_with_parts(grit_path.as_path()).await;
- (grit_path, grit_name, tmp)
+ let tmp = grit::parse_grit_with_parts_and_opener(grit_opener, part_opener).await;
+ (grit_name, tmp)
});
}
let mut parsed_grits =
- Vec::<(PathBuf, String, anyhow::Result<grit::Grit>)>::with_capacity(grit_tasks.len());
+ Vec::<(String, anyhow::Result<grit::Grit>)>::with_capacity(grit_tasks.len());
while let Some(res) = grit_tasks.join_next().await {
parsed_grits.push(res?);
}
let mut strings = Vec::<api_model::LocalizationString>::new();
+ let mut translation_ids = Vec::<i64>::new();
let mut translation_tasks = JoinSet::new();
let mut known_translations = HashSet::<String>::new();
- for (grit_path, grit_name, maybe_grit) in parsed_grits {
+ for (grit_name, maybe_grit) in parsed_grits {
let grit = maybe_grit?;
schedule_translations(
&mut translation_tasks,
&mut known_translations,
- grit_path.parent().unwrap(),
+ &opener,
&grit.translations.file,
);
let first_index = strings.len();
- push_strings(&mut strings, &grit_name, grit.release.messages.messages);
+ push_strings(
+ &mut strings,
+ &mut translation_ids,
+ &grit_name,
+ grit.release.messages.messages,
+ );
- let mut id_to_string = HashMap::<i64, usize>::with_capacity(strings.len() - first_index);
- for i in first_index..strings.len() {
- id_to_string.insert(strings[i].translation_id, i);
+ let mut id_to_string =
+ HashMap::<i64, usize>::with_capacity(translation_ids.len() - first_index);
+ for i in first_index..translation_ids.len() {
+ id_to_string.insert(translation_ids[i], i);
}
while let Some(res) = translation_tasks.join_next().await {
@@ -190,3 +245,212 @@ pub async fn collect_strings(
Ok(strings)
}
+
+#[derive(Hash, PartialEq, Eq)]
+struct LocalizationStringKey<'a> {
+ file: Cow<'a, str>,
+ name: Cow<'a, str>,
+ meaning: Cow<'a, str>,
+}
+
+struct TranslationString {
+ base_translation: Option<String>,
+ base_placeholder_offsets: Option<String>,
+ head_translation: String,
+ head_placeholder_offsets: String,
+ state: api_model::TranslationState,
+}
+
+pub async fn review_add_strings(
+ db: &Db,
+ translation_reviewid: u64,
+ strings: Vec<api_model::LocalizationString>,
+ base: bool,
+) -> anyhow::Result<()> {
+ {
+ let mut tx = db.begin().await?;
+
+ let existing = sqlx::query!(
+ "SELECT id, name, file, meaning FROM localization_strings WHERE translation_review=?",
+ translation_reviewid
+ )
+ .fetch(&mut *tx)
+ .try_fold(HashMap::new(), async move |mut ex, r| {
+ ex.insert(
+ LocalizationStringKey {
+ file: r.file.into(),
+ name: r.name.into(),
+ meaning: r.meaning.into(),
+ },
+ r.id,
+ );
+ Ok(ex)
+ })
+ .await
+ .unwrap();
+
+ for string in strings {
+ let key = LocalizationStringKey {
+ file: string.file.as_str().into(),
+ name: string.id.as_str().into(),
+ meaning: string.meaning.as_str().into(),
+ };
+ let id: u64;
+ let placeholder_offsets = string
+ .placeholder_offset
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect::<Vec<String>>()
+ .join(",");
+ if let Some(existing_id) = existing.get(&key) {
+ sqlx::query!(
+ "UPDATE localization_strings SET description=?, source=?, placeholder_offsets=? WHERE id=?",
+ string.description,
+ string.source,
+ placeholder_offsets,
+ existing_id)
+ .execute(&mut *tx)
+ .await
+ .unwrap();
+ // TODO: Might be worth checking what needs updating but meh.
+ sqlx::query!(
+ "DELETE FROM localization_placeholders WHERE localization_string=?",
+ existing_id
+ )
+ .execute(&mut *tx)
+ .await
+ .unwrap();
+ id = *existing_id;
+ } else {
+ let result = sqlx::query!(
+ "INSERT INTO localization_strings (translation_review, name, file, description, meaning, source, placeholder_offsets) VALUES (?, ?, ?, ?, ?, ?, ?)",
+ translation_reviewid,
+ string.id,
+ string.file,
+ string.description,
+ string.meaning,
+ string.source,
+ placeholder_offsets)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ id = result.last_insert_id();
+ }
+
+ for placeholder in string.placeholders {
+ sqlx::query!(
+ "INSERT INTO localization_placeholders (localization_string, name, content, example) VALUES (?, ?, ?, ?)",
+ id,
+ placeholder.id,
+ placeholder.content,
+ placeholder.example)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+
+ if base {
+ sqlx::query!(
+ "DELETE FROM translation_strings WHERE localization_string=?",
+ id
+ )
+ .execute(&mut *tx)
+ .await
+ .unwrap();
+
+ for translation in string.translations {
+ let placeholder_offsets = translation
+ .placeholder_offset
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect::<Vec<String>>()
+ .join(",");
+ // Mark all as Unchanged as base == head here.
+ sqlx::query!(
+ "INSERT INTO translation_strings (localization_string, language, base_translation, base_placeholder_offsets, head_translation, head_placeholder_offsets, state) VALUES (?, ?, ?, ?, ?, ?, ?)",
+ id,
+ translation.language,
+ translation.translation,
+ placeholder_offsets,
+ translation.translation,
+ placeholder_offsets,
+ u8::from(api_model::TranslationState::Unchanged))
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+ } else {
+ let existing = sqlx::query!("SELECT language, base_translation, base_placeholder_offsets, head_translation, head_placeholder_offsets, state FROM translation_strings WHERE localization_string=?", id)
+ .fetch(&mut *tx)
+ .try_fold(HashMap::new(), async move |mut ex, r| {
+ ex.insert(r.language,
+ TranslationString {
+ base_translation: r.base_translation,
+ base_placeholder_offsets: r.base_placeholder_offsets,
+ head_translation: r.head_translation,
+ head_placeholder_offsets: r.head_placeholder_offsets,
+ state: api_model::TranslationState::try_from(r.state).unwrap_or(api_model::TranslationState::Unreviewed),
+ });
+ Ok(ex)
+ })
+ .await
+ .unwrap();
+
+ for translation in string.translations {
+ let placeholder_offsets = translation
+ .placeholder_offset
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect::<Vec<String>>()
+ .join(",");
+
+ if let Some(existing_translation) = existing.get(translation.language.as_str())
+ {
+ if existing_translation.head_translation != translation.translation
+ || existing_translation.head_placeholder_offsets != placeholder_offsets
+ {
+ // Reset state whenever translation changes
+ let new_state = if existing_translation
+ .base_translation
+ .as_ref()
+ .is_some_and(|x| *x == translation.translation)
+ && existing_translation
+ .base_placeholder_offsets
+ .as_ref()
+ .is_some_and(|x| *x == placeholder_offsets)
+ {
+ api_model::TranslationState::Unchanged
+ } else {
+ api_model::TranslationState::Unreviewed
+ };
+ sqlx::query!(
+ "UPDATE translation_strings SET head_translation=?, head_placeholder_offsets=?, state=? WHERE localization_string = ? AND language = ?",
+ translation.translation,
+ placeholder_offsets,
+ u8::from(new_state),
+ id,
+ translation.language)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+ } else {
+ sqlx::query!(
+ "INSERT INTO translation_strings (localization_string, language, head_translation, head_placeholder_offsets) VALUES (?, ?, ?, ?)",
+ id,
+ translation.language,
+ translation.translation,
+ placeholder_offsets)
+ .execute(&mut* tx)
+ .await
+ .unwrap();
+ }
+ }
+ }
+ }
+
+ tx.commit().await?;
+ }
+
+ Ok(())
+}
diff --git a/server/tests/common/mod.rs b/server/tests/common/mod.rs
index 5a7e30d..0a9556a 100644
--- a/server/tests/common/mod.rs
+++ b/server/tests/common/mod.rs
@@ -91,6 +91,75 @@ async fn git_cmd(base: impl AsRef<Path>, args: &[&str]) -> Result<(), anyhow::Er
run(&mut cmd, "git command").await
}
+const STRINGS_GRD: &str = r#"<?xml version="1.0" encoding="UTF-8"?>
+<grit current_release="1" latest_public_release="0">
+ <outputs>
+ <output filename="values/strings.xml" type="android" lang="en" />
+ <output filename="values-en-rGB/strings.xml" type="android" lang="en-GB" />
+ <output filename="values-sv/strings.xml" type="android" lang="sv" />
+ </outputs>
+ <translations>
+ <file path="translations/strings_en_gb.xlf" lang="en-GB" />
+ <file path="translations/strings_sv.xlf" lang="sv" />
+ </translations>
+ <release allow_pseudo="false" seq="1">
+ <messages fallback_to_english="true">
+ <part file="extra.grdp" />
+ <message desc="Description" name="MAIN_STRING">
+ Main test string
+ </message>
+ </messages>
+ </release>
+</grit>"#;
+const EXTRA_GRDP: &str = r#"<?xml version="1.0" encoding="utf-8"?>
+<grit-part>
+ <message desc="Some description" name="EXTRA_STRING">
+ Extra string, gray
+ </message>
+</grit-part>"#;
+const STRINGS_EN_GB_XLF: &str = r#"<?xml version="1.0" encoding="UTF-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" version="1.2">
+<file datatype="xml" source-language="en-US" original="strings.grd" target-language="en-gb">
+<body>
+<trans-unit id="759906012366268261">
+ <source>Main test string</source>
+ <target>Main test string</target>
+ <note>MAIN_STRING
+ Description
+ </note>
+</trans-unit>
+<trans-unit id="3195503415604121324">
+ <source>Extra string, gray</source>
+ <target>Extra string, grey</target>
+ <note>EXTRA_STRING
+ Some description
+ </note>
+</trans-unit>
+</body>
+</file>
+</xliff>"#;
+const STRINGS_SV_XLF: &str = r#"<?xml version="1.0" encoding="UTF-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" version="1.2">
+<file datatype="xml" source-language="en-US" original="strings.grd" target-language="sv">
+<body>
+<trans-unit id="759906012366268261">
+ <source>Main test string</source>
+ <target>Primära teststrängen</target>
+ <note>MAIN_STRING
+ Description
+ </note>
+</trans-unit>
+<trans-unit id="3195503415604121324">
+ <source>Extra string, gray</source>
+ <target>Extra sträng, grå</target>
+ <note>EXTRA_STRING
+ Some description
+ </note>
+</trans-unit>
+</body>
+</file>
+</xliff>"#;
+
impl DockerComposeContext {
pub fn url(&self) -> &str {
self.url.as_str()
@@ -235,6 +304,47 @@ impl AsyncTestContext for DockerComposeContext {
.await
.expect("git push");
+ fs::write(mod_path.join("fake/strings.grd"), STRINGS_GRD)
+ .await
+ .expect("Write strings.grd");
+ fs::write(mod_path.join("fake/extra.grdp"), EXTRA_GRDP)
+ .await
+ .expect("Write extra.grdp");
+ fs::create_dir(mod_path.join("fake/translations"))
+ .await
+ .expect("mkdir translations");
+ fs::write(
+ mod_path.join("fake/translations/strings_en_gb.xlf"),
+ STRINGS_EN_GB_XLF,
+ )
+ .await
+ .expect("Write strings_en_gb.xlf");
+ fs::write(
+ mod_path.join("fake/translations/strings_sv.xlf"),
+ STRINGS_SV_XLF,
+ )
+ .await
+ .expect("Write strings_sv");
+
+ git_cmd(
+ &mod_path,
+ &[
+ "add",
+ "strings.grd",
+ "extra.grdp",
+ "translations/strings_en_gb.xlf",
+ "translations/strings_sv.xlf",
+ ],
+ )
+ .await
+ .expect("git add");
+ git_cmd(&mod_path, &["commit", "-m", "Add strings"])
+ .await
+ .expect("git commit");
+ git_cmd(&mod_path, &["push", "origin", "HEAD:main"])
+ .await
+ .expect("git push");
+
ctx
}
@@ -351,3 +461,48 @@ pub async fn list_reviews(
Err(anyhow::Error::msg(content))
}
}
+
+pub async fn create_translation_review(
+ ctx: &mut DockerComposeContext,
+ client: &mut Client,
+ projectid: &str,
+) -> Result<api_model::TranslationReview, anyhow::Error> {
+ let data = api_model::TranslationReviewData {
+ title: "Test".to_string(),
+ description: "Some test".to_string(),
+ base: None,
+ };
+ let result = client
+ .post(format!("{}/api/v1/translation/{projectid}/new", ctx.url()))
+ .json(&data)
+ .send()
+ .await?;
+ if result.status().is_success() {
+ let review = result.json::<api_model::TranslationReview>().await?;
+ Ok(review)
+ } else {
+ let content = result.text().await?;
+ Err(anyhow::Error::msg(content))
+ }
+}
+
+pub async fn list_translation_strings(
+ ctx: &mut DockerComposeContext,
+ client: &mut Client,
+ translation_reviewid: u64,
+) -> Result<api_model::LocalizationStrings, anyhow::Error> {
+ let result = client
+ .get(format!(
+ "{}/api/v1/translation/{translation_reviewid}/strings",
+ ctx.url()
+ ))
+ .send()
+ .await?;
+ if result.status().is_success() {
+ let strings = result.json::<api_model::LocalizationStrings>().await?;
+ Ok(strings)
+ } else {
+ let content = result.text().await?;
+ Err(anyhow::Error::msg(content))
+ }
+}
diff --git a/server/tests/integration_test.rs b/server/tests/integration_test.rs
index 200657e..6becf61 100644
--- a/server/tests/integration_test.rs
+++ b/server/tests/integration_test.rs
@@ -1,9 +1,12 @@
+use pretty_assertions::assert_eq;
use std::path::PathBuf;
use std::thread::sleep;
use std::time::Duration;
use test_context::test_context;
use tokio::fs;
+use eyeballs_api::api_model;
+
mod common;
const TESTKEY1: &str = "-----BEGIN OPENSSH PRIVATE KEY-----
@@ -128,3 +131,98 @@ async fn test_sanity(ctx: &mut common::DockerComposeContext) {
sleep(Duration::from_millis(500));
}
}
+
+#[test_context(common::DockerComposeContext)]
+#[tokio::test]
+async fn test_translation_review_create(ctx: &mut common::DockerComposeContext) {
+ let mut client1 = common::create_client().expect("client1");
+ common::login(ctx, &mut client1, "user01", "password1")
+ .await
+ .expect("user01 login");
+
+ common::user_key_add(ctx, &mut client1, "ssh-rsa", TESTKEY1_PUB)
+ .await
+ .expect("user01 key add");
+ ctx.setup_ssh_key("client1", TESTKEY1)
+ .await
+ .expect("user01 ssh_config setup");
+
+ let remote_git = String::from(ctx.remote_git());
+ let remote_git_key = PathBuf::from(ctx.remote_git_key());
+ common::create_project(ctx, &mut client1, "fake", &remote_git, &remote_git_key)
+ .await
+ .expect("create fake project");
+
+ let review = common::create_translation_review(ctx, &mut client1, "fake")
+ .await
+ .expect("create translation review");
+
+ for _ in 0..5 {
+ let strings = common::list_translation_strings(ctx, &mut client1, review.id)
+ .await
+ .expect("list strings");
+ if strings.strings.len() > 0 {
+ assert_eq!(
+ strings.strings,
+ vec![
+ api_model::LocalizationString {
+ id: "EXTRA_STRING".to_string(),
+ file: "extra.grdp".to_string(),
+ description: "Some description".to_string(),
+ meaning: "".to_string(),
+ source: "Extra string, gray".to_string(),
+ placeholders: vec![],
+ placeholder_offset: vec![],
+ translations: vec![
+ api_model::TranslationString {
+ language: "en-gb".to_string(),
+ translation: "Extra string, grey".to_string(),
+ placeholder_offset: vec![],
+ state: api_model::TranslationState::Unchanged,
+ comment: "".to_string(),
+ reviewer: None,
+ },
+ api_model::TranslationString {
+ language: "sv".to_string(),
+ translation: "Extra sträng, grå".to_string(),
+ placeholder_offset: vec![],
+ state: api_model::TranslationState::Unchanged,
+ comment: "".to_string(),
+ reviewer: None,
+ }
+ ],
+ },
+ api_model::LocalizationString {
+ id: "MAIN_STRING".to_string(),
+ file: "strings.grd".to_string(),
+ description: "Description".to_string(),
+ meaning: "".to_string(),
+ source: "Main test string".to_string(),
+ placeholders: vec![],
+ placeholder_offset: vec![],
+ translations: vec![
+ api_model::TranslationString {
+ language: "en-gb".to_string(),
+ translation: "Main test string".to_string(),
+ placeholder_offset: vec![],
+ state: api_model::TranslationState::Unchanged,
+ comment: "".to_string(),
+ reviewer: None,
+ },
+ api_model::TranslationString {
+ language: "sv".to_string(),
+ translation: "Primära teststrängen".to_string(),
+ placeholder_offset: vec![],
+ state: api_model::TranslationState::Unchanged,
+ comment: "".to_string(),
+ reviewer: None,
+ }
+ ],
+ },
+ ]
+ );
+ break;
+ }
+ sleep(Duration::from_millis(500));
+ }
+}