diff --git a/.github/workflows/weblate-merge-po.yml b/.github/workflows/weblate-merge-po.yml
new file mode 100644
index 0000000000..0b54d95ac8
--- /dev/null
+++ b/.github/workflows/weblate-merge-po.yml
@@ -0,0 +1,109 @@
+name: Weblate Merge PO
+
+on:
+ schedule:
+ # run every Monday at 2:42AM UTC
+ - cron: "42 2 * * 0"
+
+ # allow running manually
+ workflow_dispatch:
+
+jobs:
+ merge-po:
+ # allow pushing and creating pull requests
+ permissions:
+ contents: write
+ pull-requests: write
+
+ # do not run in forks
+ if: github.repository == 'openSUSE/agama'
+
+ runs-on: ubuntu-latest
+
+ container:
+ image: registry.opensuse.org/opensuse/tumbleweed:latest
+
+ steps:
+ - name: Configure and refresh repositories
+ run: |
+ # install the GitHub command line tool "gh"
+ zypper addrepo https://cli.github.com/packages/rpm/gh-cli.repo
+ # disable unused repositories to have a faster refresh
+ zypper modifyrepo -d repo-non-oss repo-openh264 repo-update && \
+ zypper --non-interactive --gpg-auto-import-keys ref
+
+ - name: Install tools
+ run: zypper --non-interactive install --no-recommends gh git gettext-tools
+
+ - name: Configure Git
+ run: |
+ git config --global user.name "YaST Bot"
+ git config --global user.email "yast-devel@opensuse.org"
+
+ - name: Checkout sources
+ uses: actions/checkout@v3
+ with:
+ path: agama
+
+ - name: Checkout Agama-weblate sources
+ uses: actions/checkout@v3
+ with:
+ path: agama-weblate
+ repository: openSUSE/agama-weblate
+
+ - name: Update PO files
+ working-directory: ./agama
+ run: |
+ mkdir -p web/po
+ # delete the current translations
+ find web/po -name '*.po' -exec git rm '{}' ';'
+
+ # copy the new ones
+ cp -a ../agama-weblate/web/*.po web/po
+ git add web/po/*.po
+
+ - name: Validate the PO files
+ working-directory: ./agama
+ run: msgfmt --check-format -o /dev/null web/po/*.po
+
+ # any changes besides the timestamps in the PO files?
+ - name: Check changes
+ id: check_changes
+ working-directory: ./agama
+ run: |
+ git diff --staged --ignore-matching-lines="POT-Creation-Date:" \
+ --ignore-matching-lines="PO-Revision-Date:" web/po > po.diff
+
+ if [ -s po.diff ]; then
+ echo "PO files updated"
+ # this is an Output Parameter
+ # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
+ echo "po_updated=true" >> $GITHUB_OUTPUT
+ else
+ echo "PO files unchanged"
+ echo "po_updated=false" >> $GITHUB_OUTPUT
+ fi
+
+ rm po.diff
+
+ - name: Push updated PO files
+ # run only when a PO file has been updated
+ if: steps.check_changes.outputs.po_updated == 'true'
+ working-directory: ./agama
+ run: |
+ # use a unique branch to avoid possible conflicts with already existing branches
+ git checkout -b "po_merge_${GITHUB_RUN_ID}"
+ git commit -a -m "Update PO files"$'\n\n'"Agama-weblate commit: `git -C ../agama-weblate rev-parse HEAD`"
+ git push origin "po_merge_${GITHUB_RUN_ID}"
+
+ - name: Create pull request
+ # run only when a PO file has been updated
+ if: steps.check_changes.outputs.po_updated == 'true'
+ working-directory: ./agama
+ run: |
+ gh pr create -B master -H "po_merge_${GITHUB_RUN_ID}" \
+ --label translations --label bot \
+ --title "Update PO files" \
+ --body "Updating the translation files from the agama-weblate repository"
+ env:
+ GH_TOKEN: ${{ github.token }}
diff --git a/.github/workflows/weblate-update-pot.yml b/.github/workflows/weblate-update-pot.yml
new file mode 100644
index 0000000000..6d4db70d63
--- /dev/null
+++ b/.github/workflows/weblate-update-pot.yml
@@ -0,0 +1,88 @@
+name: Weblate Update POT
+
+on:
+ schedule:
+ # run every working day (Monday-Friday) at 1:42AM UTC
+ - cron: "42 1 * * 0-4"
+
+ # allow running manually
+ workflow_dispatch:
+
+jobs:
+ update-pot:
+ # do not run in forks
+ if: github.repository == 'openSUSE/agama'
+
+ runs-on: ubuntu-latest
+
+ container:
+ image: registry.opensuse.org/opensuse/tumbleweed:latest
+
+ steps:
+ - name: Configure and refresh repositories
+ # disable unused repositories to have a faster refresh
+ run: zypper modifyrepo -d repo-non-oss repo-openh264 repo-update && zypper ref
+
+ - name: Install tools
+ run: zypper --non-interactive install --no-recommends diffutils git gettext-tools
+
+ - name: Checkout Agama sources
+ uses: actions/checkout@v3
+ with:
+ path: agama
+
+ - name: Generate POT file
+ # TODO: use a shared script for this
+ run: |
+ cd agama/web
+ xgettext --default-domain=agama --output=- --language=C --keyword= \
+ --keyword=_:1,1t --keyword=_:1c,2,2t --keyword=C_:1c,2 \
+ --keyword=N_ --keyword=NC_:1c,2 --foreign-user \
+ --copyright-holder="SuSE Linux Products GmbH, Nuernberg" \
+ --from-code=UTF-8 --add-comments=TRANSLATORS --sort-by-file \
+ $(find . ! -name cockpit.js -name '*.js' -o ! -name '*.test.jsx' -name '*.jsx') | \
+ sed '/^#/ s/, c-format//' > agama.pot
+ msgfmt --statistics agama.pot
+
+ - name: Validate the generated POT file
+ run: msgfmt --check-format agama/web/agama.pot
+
+ - name: Checkout Weblate sources
+ uses: actions/checkout@v3
+ with:
+ path: agama-weblate
+ repository: openSUSE/agama-weblate
+ token: ${{ secrets.GH_TOKEN }}
+
+ - name: Configure Git
+ run: |
+ git config --global user.name "YaST Bot"
+ git config --global user.email "yast-devel@opensuse.org"
+
+ - name: Update POT file
+ run: |
+ mkdir -p agama-weblate/web
+ cp agama/web/agama.pot agama-weblate/web/agama.pot
+
+ # any change besides the timestamp in the POT file?
+ - name: Check changes
+ id: check_changes
+ run: |
+ git -C agama-weblate diff --ignore-matching-lines="POT-Creation-Date:" web/agama.pot > pot.diff
+
+ if [ -s pot.diff ]; then
+ echo "POT file updated"
+ echo "pot_updated=true" >> $GITHUB_OUTPUT
+ else
+ echo "POT file unchanged"
+ echo "pot_updated=false" >> $GITHUB_OUTPUT
+ fi
+
+ - name: Push updated POT file
+ # run only when the POT file has been updated
+ if: steps.check_changes.outputs.pot_updated == 'true'
+ run: |
+ cd agama-weblate
+ git add web/agama.pot
+ git commit -m "Update POT file"$'\n\n'"Agama commit: $GITHUB_SHA"
+ git push
diff --git a/README.md b/README.md
index 36a4d84087..b12c27f19e 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,15 @@
+**Checks**
+
[![CI Status](https://github.com/openSUSE/agama/actions/workflows/ci.yml/badge.svg)](https://github.com/openSUSE/agama/actions/workflows/ci.yml)
[![Coverage Status](https://coveralls.io/repos/github/openSUSE/agama/badge.svg?branch=master)](https://coveralls.io/github/openSUSE/agama?branch=master)
[![GitHub Pages](https://github.com/openSUSE/agama/actions/workflows/github-pages.yml/badge.svg)](https://github.com/openSUSE/agama/actions/workflows/github-pages.yml)
+**Translations**
+
+[![Weblate Update POT](https://github.com/openSUSE/agama/actions/workflows/weblate-update-pot.yml/badge.svg)](https://github.com/openSUSE/agama/actions/workflows/weblate-update-pot.yml)
+[![Weblate Merge PO](https://github.com/openSUSE/agama/actions/workflows/weblate-merge-po.yml/badge.svg)](https://github.com/openSUSE/agama/actions/workflows/weblate-merge-po.yml)
+[![Translation Status](https://l10n.opensuse.org/widgets/agama/-/agama-web/svg-badge.svg)](https://l10n.opensuse.org/engage/agama/)
+
**[OBS systemsmanagement:Agama:Staging](https://build.opensuse.org/project/show/systemsmanagement:Agama:Staging)**
[![Submit agama-cli](https://github.com/openSUSE/agama/actions/workflows/obs-staging-rust.yml/badge.svg)](https://github.com/openSUSE/agama/actions/workflows/obs-staging-rust.yml)
diff --git a/doc/answers_example.yaml b/doc/answers_example.yaml
new file mode 100644
index 0000000000..581f39ff1d
--- /dev/null
+++ b/doc/answers_example.yaml
@@ -0,0 +1,3 @@
+answers:
+ - class: storage.luks_activation
+ answer: "skip"
diff --git a/doc/dbus/bus/org.opensuse.Agama.Questions1.bus.xml b/doc/dbus/bus/org.opensuse.Agama.Questions1.bus.xml
index c9d9c8a126..12ed3ad5c5 100644
--- a/doc/dbus/bus/org.opensuse.Agama.Questions1.bus.xml
+++ b/doc/dbus/bus/org.opensuse.Agama.Questions1.bus.xml
@@ -87,10 +87,13 @@
+
+
+
-
-
+
diff --git a/doc/dbus/org.opensuse.Agama.Questions1.doc.xml b/doc/dbus/org.opensuse.Agama.Questions1.doc.xml
index eddea73a4a..e32cdbb56f 100644
--- a/doc/dbus/org.opensuse.Agama.Questions1.doc.xml
+++ b/doc/dbus/org.opensuse.Agama.Questions1.doc.xml
@@ -76,18 +76,30 @@ when the question is answered and the answer is successfully read.
+
-
+
-
-
+
+
+
+
diff --git a/doc/questions.md b/doc/questions.md
index 09f8919882..edd58bb454 100644
--- a/doc/questions.md
+++ b/doc/questions.md
@@ -53,3 +53,14 @@ Sensitive answers or params will be replaced, so the user has to explicitly spec
default answer instead of asking user.
4. I have my own vendor iso and want to pre-configure installer using CLI before showing web UI. And some actions can/will
questions that I want to answer before user sees UI -> Use answers.yml file
+
+### Question Types
+
+| class | description | possible answers | available data | notes |
+|--- |--- |--- |--- |--- |
+| `software.medium_error` | When there is issue with access to medium | `Retry` `Skip` | `url` with url where failed access happen | |
+| `software.unsigned_file` | When file from repository is not digitally signed. If it should be used | `Yes` `No` | `filename` with name of file | |
+| `software.import_gpg` | When signature is sign with unknown GPG key | `Trust` `Skip` | `id` of key `name` of key and `fingerprint` of key | |
+| `storage.activate_multipath` | When it looks like system has multipath and if it should be activated | `yes` `no` | | Here it is used lower case. It should be unified. |
+| `storage.commit_error` | When some storage actions failed and if it should continue | `yes` `no` | | Also here it is lowercase |
+| `storage.luks_activation` | When LUKS encrypted device is detected and it needs password to probe it | `skip` `decrypt` | `device` name, `label` of device, `size` of device and `attempt` the number of attempt | Answer contain additional field password that has to be filled if answer is `decrypt`. Attempt data can be used to limit passing wrong password. |
diff --git a/rust/Cargo.lock b/rust/Cargo.lock
index 1260d60b6b..6afaee1473 100644
--- a/rust/Cargo.lock
+++ b/rust/Cargo.lock
@@ -37,6 +37,8 @@ dependencies = [
"async-std",
"log",
"parking_lot",
+ "serde",
+ "serde_yaml",
"simplelog",
"systemd-journal-logger",
"thiserror",
@@ -674,6 +676,12 @@ dependencies = [
"syn 2.0.26",
]
+[[package]]
+name = "equivalent"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+
[[package]]
name = "errno"
version = "0.3.1"
@@ -892,6 +900,12 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+[[package]]
+name = "hashbrown"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
+
[[package]]
name = "heck"
version = "0.4.1"
@@ -936,7 +950,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
- "hashbrown",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.14.0",
]
[[package]]
@@ -1614,11 +1638,11 @@ dependencies = [
[[package]]
name = "serde_yaml"
-version = "0.9.21"
+version = "0.9.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9d684e3ec7de3bf5466b32bd75303ac16f0736426e5a4e0d6e489559ce1249c"
+checksum = "bd5f51e3fdb5b9cdd1577e1cb7a733474191b1aca6a72c2e50913241632c1180"
dependencies = [
- "indexmap",
+ "indexmap 2.0.0",
"itoa",
"ryu",
"serde",
@@ -1856,7 +1880,7 @@ version = "0.19.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13"
dependencies = [
- "indexmap",
+ "indexmap 1.9.3",
"toml_datetime",
"winnow",
]
diff --git a/rust/agama-cli/src/questions.rs b/rust/agama-cli/src/questions.rs
index d0a22c1c00..8bce0cb928 100644
--- a/rust/agama-cli/src/questions.rs
+++ b/rust/agama-cli/src/questions.rs
@@ -1,12 +1,15 @@
use agama_lib::connection;
use agama_lib::proxies::Questions1Proxy;
-use anyhow::{Context, Ok};
+use anyhow::Context;
use clap::{Args, Subcommand, ValueEnum};
#[derive(Subcommand, Debug)]
pub enum QuestionsCommands {
/// Set mode for answering questions.
Mode(ModesArgs),
+ Answers {
+ path: String,
+ },
}
#[derive(Args, Debug)]
@@ -20,29 +23,31 @@ pub enum Modes {
Interactive,
NonInteractive,
}
-// TODO when more commands is added, refactor and add it to agama-lib and share a bit of functionality
-async fn set_mode(value: Modes) -> anyhow::Result<()> {
- match value {
- Modes::NonInteractive => {
- let connection = connection().await?;
- let proxy = Questions1Proxy::new(&connection)
- .await
- .context("Failed to connect to Questions service")?;
- // TODO: how to print dbus error in that anyhow?
- proxy
- .use_default_answer()
- .await
- .context("Failed to set default answer")?;
- }
- Modes::Interactive => log::info!("not implemented"), //TODO do it
- }
+async fn set_mode(proxy: Questions1Proxy<'_>, value: Modes) -> anyhow::Result<()> {
+ // TODO: how to print dbus error in that anyhow?
+ proxy
+ .set_interactive(value == Modes::Interactive)
+ .await
+ .context("Failed to set mode for answering questions.")
+}
- Ok(())
+async fn set_answers(proxy: Questions1Proxy<'_>, path: String) -> anyhow::Result<()> {
+ // TODO: how to print dbus error in that anyhow?
+ proxy
+ .add_answer_file(path.as_str())
+ .await
+ .context("Failed to set answers from answers file")
}
pub async fn run(subcommand: QuestionsCommands) -> anyhow::Result<()> {
+ let connection = connection().await?;
+ let proxy = Questions1Proxy::new(&connection)
+ .await
+ .context("Failed to connect to Questions service")?;
+
match subcommand {
- QuestionsCommands::Mode(value) => set_mode(value.value).await,
+ QuestionsCommands::Mode(value) => set_mode(proxy, value.value).await,
+ QuestionsCommands::Answers { path } => set_answers(proxy, path).await,
}
}
diff --git a/rust/agama-dbus-server/Cargo.toml b/rust/agama-dbus-server/Cargo.toml
index dff9b917f1..f74f49d0d2 100644
--- a/rust/agama-dbus-server/Cargo.toml
+++ b/rust/agama-dbus-server/Cargo.toml
@@ -18,3 +18,5 @@ async-std = { version = "1.12.0", features = ["attributes"]}
uuid = { version = "1.3.4", features = ["v4"] }
parking_lot = "0.12.1"
thiserror = "1.0.40"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_yaml = "0.9.24"
\ No newline at end of file
diff --git a/rust/agama-dbus-server/src/network/action.rs b/rust/agama-dbus-server/src/network/action.rs
index e0592e7fa4..9f935f70f1 100644
--- a/rust/agama-dbus-server/src/network/action.rs
+++ b/rust/agama-dbus-server/src/network/action.rs
@@ -3,7 +3,7 @@ use agama_lib::network::types::DeviceType;
/// Networking actions, like adding, updating or removing connections.
///
-/// These actions are meant to be processed by [crate::system::NetworkSystem], updating the model
+/// These actions are meant to be processed by [crate::network::system::NetworkSystem], updating the model
/// and the D-Bus tree as needed.
#[derive(Debug)]
pub enum Action {
diff --git a/rust/agama-dbus-server/src/network/dbus/interfaces.rs b/rust/agama-dbus-server/src/network/dbus/interfaces.rs
index e85f1d188e..db5e2ad118 100644
--- a/rust/agama-dbus-server/src/network/dbus/interfaces.rs
+++ b/rust/agama-dbus-server/src/network/dbus/interfaces.rs
@@ -82,7 +82,7 @@ impl Device {
///
/// Possible values: 0 = loopback, 1 = ethernet, 2 = wireless.
///
- /// See [crate::model::DeviceType].
+ /// See [agama_lib::network::types::DeviceType].
#[dbus_interface(property, name = "Type")]
pub fn device_type(&self) -> u8 {
self.device.type_ as u8
@@ -124,7 +124,7 @@ impl Connections {
/// Adds a new network connection.
///
/// * `id`: connection name.
- /// * `ty`: connection type (see [crate::model::DeviceType]).
+ /// * `ty`: connection type (see [agama_lib::network::types::DeviceType]).
pub async fn add_connection(&mut self, id: String, ty: u8) -> zbus::fdo::Result<()> {
let actions = self.actions.lock();
actions
@@ -274,7 +274,7 @@ impl Ipv4 {
///
/// Possible values: "disabled", "auto", "manual" or "link-local".
///
- /// See [crate::model::IpMethod].
+ /// See [crate::network::model::IpMethod].
#[dbus_interface(property)]
pub fn method(&self) -> String {
let connection = self.get_connection();
@@ -401,7 +401,7 @@ impl Wireless {
///
/// Possible values: "unknown", "adhoc", "infrastructure", "ap" or "mesh".
///
- /// See [crate::model::WirelessMode].
+ /// See [crate::network::model::WirelessMode].
#[dbus_interface(property)]
pub fn mode(&self) -> String {
let connection = self.get_wireless();
@@ -442,7 +442,7 @@ impl Wireless {
/// Possible values: "none", "owe", "ieee8021x", "wpa-psk", "sae", "wpa-eap",
/// "wpa-eap-suite-b192".
///
- /// See [crate::model::SecurityProtocol].
+ /// See [crate::network::model::SecurityProtocol].
#[dbus_interface(property)]
pub fn security(&self) -> String {
let connection = self.get_wireless();
diff --git a/rust/agama-dbus-server/src/questions.rs b/rust/agama-dbus-server/src/questions.rs
index b84a3a834d..21afb6606c 100644
--- a/rust/agama-dbus-server/src/questions.rs
+++ b/rust/agama-dbus-server/src/questions.rs
@@ -9,6 +9,8 @@ use anyhow::Context;
use log;
use zbus::{dbus_interface, fdo::ObjectManager, zvariant::ObjectPath, Connection};
+mod answers;
+
#[derive(Clone, Debug)]
struct GenericQuestionObject(questions::GenericQuestion);
@@ -81,7 +83,11 @@ enum QuestionType {
}
/// Trait for objects that can provide answers to all kind of Question.
+///
+/// If no strategy is selected or the answer is unknown, then ask to the user.
trait AnswerStrategy {
+ /// Id for quick runtime inspection of strategy type
+ fn id(&self) -> u8;
/// Provides answer for generic question
///
/// I gets as argument the question to answer. Returned value is `answer`
@@ -103,7 +109,17 @@ trait AnswerStrategy {
/// AnswerStrategy that provides as answer the default option.
struct DefaultAnswers;
+impl DefaultAnswers {
+ pub fn id() -> u8 {
+ 1
+ }
+}
+
impl AnswerStrategy for DefaultAnswers {
+ fn id(&self) -> u8 {
+ DefaultAnswers::id()
+ }
+
fn answer(&self, question: &GenericQuestion) -> Option {
Some(question.default_option.clone())
}
@@ -227,11 +243,43 @@ impl Questions {
Ok(())
}
- /// sets questions to be answered by default answer instead of asking user
- async fn use_default_answer(&mut self) -> Result<(), Error> {
- log::info!("Answer questions with default option");
- self.answer_strategies.push(Box::new(DefaultAnswers {}));
- Ok(())
+ /// property that defines if questions is interactive or automatically answered with
+ /// default answer
+ #[dbus_interface(property)]
+ fn interactive(&self) -> bool {
+ let last = self.answer_strategies.last();
+ if let Some(real_strategy) = last {
+ real_strategy.id() != DefaultAnswers::id()
+ } else {
+ true
+ }
+ }
+
+ #[dbus_interface(property)]
+ fn set_interactive(&mut self, value: bool) {
+ if value != self.interactive() {
+ log::info!("interactive value unchanged - {}", value);
+ return;
+ }
+
+ log::info!("set interactive to {}", value);
+ if value {
+ self.answer_strategies.pop();
+ } else {
+ self.answer_strategies.push(Box::new(DefaultAnswers {}));
+ }
+ }
+
+ fn add_answer_file(&mut self, path: String) -> Result<(), Error> {
+ log::info!("Adding answer file {}", path);
+ let answers = answers::Answers::new_from_file(path.as_str());
+ match answers {
+ Ok(answers) => {
+ self.answer_strategies.push(Box::new(answers));
+ Ok(())
+ }
+ Err(e) => Err(e.into()),
+ }
}
}
diff --git a/rust/agama-dbus-server/src/questions/answers.rs b/rust/agama-dbus-server/src/questions/answers.rs
new file mode 100644
index 0000000000..a955093fdd
--- /dev/null
+++ b/rust/agama-dbus-server/src/questions/answers.rs
@@ -0,0 +1,291 @@
+use std::collections::HashMap;
+
+use anyhow::Context;
+use serde::{Deserialize, Serialize};
+
+/// Data structure for single yaml answer. For variables specification see
+/// corresponding [agama_lib::questions::GenericQuestion] fields.
+/// The *matcher* part is: `class`, `text`, `data`.
+/// The *answer* part is: `answer`, `password`.
+#[derive(Serialize, Deserialize, PartialEq, Debug)]
+struct Answer {
+ pub class: Option,
+ pub text: Option,
+ /// A matching GenericQuestion can have other data fields too
+ pub data: Option>,
+ /// The answer text is the only mandatory part of an Answer
+ pub answer: String,
+ /// All possible mixins have to be here, so they can be specified in an Answer
+ pub password: Option,
+}
+
+/// Data structure holding list of Answer.
+/// The first matching Answer is used, even if there is
+/// a better (more specific) match later in the list.
+#[derive(Serialize, Deserialize, PartialEq, Debug)]
+pub struct Answers {
+ answers: Vec,
+}
+
+impl Answers {
+ pub fn new_from_file(path: &str) -> anyhow::Result {
+ let f = std::fs::File::open(path).context(format!("Failed to open {}", path))?;
+ let result: Self =
+ serde_yaml::from_reader(f).context(format!("Failed to parse values at {}", path))?;
+
+ Ok(result)
+ }
+
+ pub fn id() -> u8 {
+ 2
+ }
+
+ fn find_answer(&self, question: &agama_lib::questions::GenericQuestion) -> Option<&Answer> {
+ 'main: for answerd in self.answers.iter() {
+ if let Some(v) = &answerd.class {
+ if !question.class.eq(v) {
+ continue;
+ }
+ }
+ if let Some(v) = &answerd.text {
+ if !question.text.eq(v) {
+ continue;
+ }
+ }
+ if let Some(v) = &answerd.data {
+ for (key, value) in v {
+ // all keys defined in answer has to match
+ let entry = question.data.get(key);
+ if let Some(e_val) = entry {
+ if !e_val.eq(value) {
+ continue 'main;
+ }
+ } else {
+ continue 'main;
+ }
+ }
+ }
+
+ return Some(answerd);
+ }
+
+ None
+ }
+}
+
+impl crate::questions::AnswerStrategy for Answers {
+ fn id(&self) -> u8 {
+ Answers::id()
+ }
+
+ fn answer(&self, question: &agama_lib::questions::GenericQuestion) -> Option {
+ let answer = self.find_answer(question);
+ answer.map(|answer| answer.answer.clone())
+ }
+
+ fn answer_with_password(
+ &self,
+ question: &agama_lib::questions::WithPassword,
+ ) -> (Option, Option) {
+ // use here fact that with password share same matchers as generic one
+ let answer = self.find_answer(&question.base);
+ if let Some(answer) = answer {
+ (Some(answer.answer.clone()), answer.password.clone())
+ } else {
+ (None, None)
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use agama_lib::questions::{GenericQuestion, WithPassword};
+
+ use crate::questions::AnswerStrategy;
+
+ use super::*;
+
+ // set of fixtures for test
+ fn get_answers() -> Answers {
+ Answers {
+ answers: vec![
+ Answer {
+ class: Some("without_data".to_string()),
+ data: None,
+ text: None,
+ answer: "Ok".to_string(),
+ password: Some("testing pwd".to_string()), // ignored for generic question
+ },
+ Answer {
+ class: Some("with_data".to_string()),
+ data: Some(HashMap::from([
+ ("data1".to_string(), "value1".to_string()),
+ ("data2".to_string(), "value2".to_string()),
+ ])),
+ text: None,
+ answer: "Maybe".to_string(),
+ password: None,
+ },
+ Answer {
+ class: Some("with_data".to_string()),
+ data: Some(HashMap::from([(
+ "data1".to_string(),
+ "another_value1".to_string(),
+ )])),
+ text: None,
+ answer: "Ok2".to_string(),
+ password: None,
+ },
+ ],
+ }
+ }
+
+ #[test]
+ fn test_class_match() {
+ let answers = get_answers();
+ let question = GenericQuestion {
+ id: 1,
+ class: "without_data".to_string(),
+ text: "JFYI we will kill all bugs during installation.".to_string(),
+ options: vec!["Ok".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::new(),
+ answer: "".to_string(),
+ };
+ assert_eq!(Some("Ok".to_string()), answers.answer(&question));
+ }
+
+ #[test]
+ fn test_no_match() {
+ let answers = get_answers();
+ let question = GenericQuestion {
+ id: 1,
+ class: "non-existing".to_string(),
+ text: "Hard question?".to_string(),
+ options: vec!["Ok".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::new(),
+ answer: "".to_string(),
+ };
+ assert_eq!(None, answers.answer(&question));
+ }
+
+ #[test]
+ fn test_with_password() {
+ let answers = get_answers();
+ let question = GenericQuestion {
+ id: 1,
+ class: "without_data".to_string(),
+ text: "Please provide password for dooms day.".to_string(),
+ options: vec!["Ok".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::new(),
+ answer: "".to_string(),
+ };
+ let with_password = WithPassword {
+ password: "".to_string(),
+ base: question,
+ };
+ let expected = (Some("Ok".to_string()), Some("testing pwd".to_string()));
+ assert_eq!(expected, answers.answer_with_password(&with_password));
+ }
+
+ /// An Answer matches on *data* if all its keys and values are in the GenericQuestion *data*.
+ /// The GenericQuestion can have other *data* keys.
+ #[test]
+ fn test_partial_data_match() {
+ let answers = get_answers();
+ let question = GenericQuestion {
+ id: 1,
+ class: "with_data".to_string(),
+ text: "Hard question?".to_string(),
+ options: vec!["Ok2".to_string(), "Maybe".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::from([
+ ("data1".to_string(), "value1".to_string()),
+ ("data2".to_string(), "value2".to_string()),
+ ("data3".to_string(), "value3".to_string()),
+ ]),
+ answer: "".to_string(),
+ };
+ assert_eq!(Some("Maybe".to_string()), answers.answer(&question));
+ }
+
+ #[test]
+ fn test_full_data_match() {
+ let answers = get_answers();
+ let question = GenericQuestion {
+ id: 1,
+ class: "with_data".to_string(),
+ text: "Hard question?".to_string(),
+ options: vec!["Ok2".to_string(), "Maybe".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::from([
+ ("data1".to_string(), "another_value1".to_string()),
+ ("data2".to_string(), "value2".to_string()),
+ ("data3".to_string(), "value3".to_string()),
+ ]),
+ answer: "".to_string(),
+ };
+ assert_eq!(Some("Ok2".to_string()), answers.answer(&question));
+ }
+
+ #[test]
+ fn test_no_data_match() {
+ let answers = get_answers();
+ let question = GenericQuestion {
+ id: 1,
+ class: "with_data".to_string(),
+ text: "Hard question?".to_string(),
+ options: vec!["Ok2".to_string(), "Maybe".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::from([
+ ("data1".to_string(), "different value".to_string()),
+ ("data2".to_string(), "value2".to_string()),
+ ("data3".to_string(), "value3".to_string()),
+ ]),
+ answer: "".to_string(),
+ };
+ assert_eq!(None, answers.answer(&question));
+ }
+
+ // A "universal answer" with unspecified class+text+data is possible
+ #[test]
+ fn test_universal_match() {
+ let answers = Answers {
+ answers: vec![Answer {
+ class: None,
+ text: None,
+ data: None,
+ answer: "Yes".into(),
+ password: None,
+ }],
+ };
+ let question = GenericQuestion {
+ id: 1,
+ class: "without_data".to_string(),
+ text: "JFYI we will kill all bugs during installation.".to_string(),
+ options: vec!["Ok".to_string(), "Cancel".to_string()],
+ default_option: "Cancel".to_string(),
+ data: HashMap::new(),
+ answer: "".to_string(),
+ };
+ assert_eq!(Some("Yes".to_string()), answers.answer(&question));
+ }
+
+ #[test]
+ fn test_loading_yaml() {
+ let file = r#"
+ answers:
+ - class: "without_data"
+ answer: "OK"
+ - class: "with_data"
+ data:
+ testk: testv
+ testk2: testv2
+ answer: "Cancel"
+ "#;
+ let result: Answers = serde_yaml::from_str(file).expect("failed to load yaml string");
+ assert_eq!(result.answers.len(), 2);
+ }
+}
diff --git a/rust/agama-lib/src/proxies.rs b/rust/agama-lib/src/proxies.rs
index d6678c0701..9d6371eac0 100644
--- a/rust/agama-lib/src/proxies.rs
+++ b/rust/agama-lib/src/proxies.rs
@@ -111,14 +111,21 @@ trait Locale1 {
fn set_vconsole_keyboard(&self, value: &str) -> zbus::Result<()>;
}
-#[dbus_proxy(interface = "org.opensuse.Agama.Questions1", assume_defaults = true)]
+#[dbus_proxy(
+ interface = "org.opensuse.Agama.Questions1",
+ default_service = "org.opensuse.Agama.Questions1",
+ default_path = "/org/opensuse/Agama/Questions1"
+)]
trait Questions1 {
+ /// AddAnswerFile method
+ fn add_answer_file(&self, path: &str) -> zbus::Result<()>;
+
/// Delete method
fn delete(&self, question: &zbus::zvariant::ObjectPath<'_>) -> zbus::Result<()>;
/// New method
#[dbus_proxy(name = "New")]
- fn new_generic(
+ fn new_quetion(
&self,
class: &str,
text: &str,
@@ -137,6 +144,8 @@ trait Questions1 {
data: std::collections::HashMap<&str, &str>,
) -> zbus::Result;
- /// UseDefaultAnswer method
- fn use_default_answer(&self) -> zbus::Result<()>;
+ /// Interactive property
+ #[dbus_proxy(property)]
+ fn interactive(&self) -> zbus::Result;
+ fn set_interactive(&self, value: bool) -> zbus::Result<()>;
}
diff --git a/rust/agama-lib/src/questions.rs b/rust/agama-lib/src/questions.rs
index b5e6b75970..34094b4371 100644
--- a/rust/agama-lib/src/questions.rs
+++ b/rust/agama-lib/src/questions.rs
@@ -1,11 +1,11 @@
-use std::collections::HashMap;
+//! Data model for Agama questions
-/// module holdings data model for agama questions
+use std::collections::HashMap;
/// Basic generic question that fits question without special needs
#[derive(Clone, Debug)]
pub struct GenericQuestion {
- /// numeric id used to indetify question on dbus
+ /// numeric id used to identify question on D-Bus
pub id: u32,
/// class of questions. Similar kinds of questions share same class.
/// It is dot separated list of elements. Examples are
@@ -74,11 +74,13 @@ impl GenericQuestion {
/// mixins arise to convert it to Question Struct that have optional mixins
/// inside like
///
+/// ```no_compile
/// struct Question {
/// base: GenericQuestion,
/// with_password: Option,
/// another_mixin: Option
/// }
+/// ```
///
/// This way all handling code can check if given mixin is used and
/// act appropriate.
diff --git a/rust/package/agama-cli.changes b/rust/package/agama-cli.changes
index b7acc9f760..fa8fb57f97 100644
--- a/rust/package/agama-cli.changes
+++ b/rust/package/agama-cli.changes
@@ -1,5 +1,5 @@
-------------------------------------------------------------------
-Tue Jul 18 15:42:30 UTC 2023 - Imobach Gonzalez Sosa
+Mon Jul 31 06:58:15 UTC 2023 - Imobach Gonzalez Sosa
- Move the settings functionality to a separate package,
agama-settings (gh#openSUSE/agama#666).
@@ -7,6 +7,14 @@ Tue Jul 18 15:42:30 UTC 2023 - Imobach Gonzalez Sosa
- Extend the "Settings" derive macro to generate code for
InstallSettings and NetworkSettings.
+-------------------------------------------------------------------
+Wed Jul 26 11:08:09 UTC 2023 - Josef Reidinger
+
+- CLI: add to "questions" command "answers" subcommand to set
+ file with predefined answers
+- dbus-server: add "AddAnswersFile" method to Questions service
+ (gh#openSUSE/agama#669)
+
-------------------------------------------------------------------
Tue Jul 18 13:32:04 UTC 2023 - Josef Reidinger
diff --git a/service/etc/agama.yaml b/service/etc/agama.yaml
index b2a014c9f7..a867b62b59 100644
--- a/service/etc/agama.yaml
+++ b/service/etc/agama.yaml
@@ -1,13 +1,7 @@
products:
- ALP-Bedrock:
- name: SUSE ALP Server
- description: 'SUSE ALP Server is a flexible, secure, customizable and
- modular Server allowing an enterprise to run a variety of services,
- workloads and application in a compartmentalized form. Based on an
- immutable root filesystem, security has been built into it from the ground.'
- ALP-Micro:
- name: SUSE ALP Micro
- description: 'SUSE ALP Micro is a minimum immutable OS core, focused on
+ ALP-Dolomite:
+ name: SUSE ALP Dolomite
+ description: 'SUSE ALP Dolomite is a minimum immutable OS core, focused on
security to provide the bare minimum to run workloads and services as
containers or virtual machines.'
Tumbleweed:
@@ -143,99 +137,22 @@ Tumbleweed:
proposed_configurable: true
disable_order: 2
-ALP-Bedrock:
+ALP-Dolomite:
software:
installation_repositories:
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Bedrock:/0.1/images/repo/ALP-Bedrock-0.1-x86_64-Media1/
+ - url: https://updates.suse.com/SUSE/Products/ALP-Dolomite/1.0/x86_64/product/
archs: x86_64
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Bedrock:/0.1/images/repo/ALP-Bedrock-0.1-aarch64-Media1/
+ - url: https://updates.suse.com/SUSE/Products/ALP-Dolomite/1.0/aarch64/product/
archs: aarch64
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Bedrock:/0.1/images/repo/ALP-Bedrock-0.1-s390x-Media1/
+ - url: https://updates.suse.com/SUSE/Products/ALP-Dolomite/1.0/s390x/product/
archs: s390
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Bedrock:/0.1/images/repo/ALP-Bedrock-0.1-ppc64le-Media1/
+ - url: https://updates.suse.com/SUSE/Products/ALP-Dolomite/1.0/ppc64le/product/
archs: ppc
mandatory_patterns:
- - alp-bedrock-base
- - alp-bedrock-cockpit
- - alp-bedrock-hardware
- - alp-bedrock-container_runtime
- optional_patterns: null # no optional pattern shared
- mandatory_packages:
- - package: device-mapper # Apparently needed if devices at /dev/mapper are used at boot (eg. FDE)
- - package: fde-tools # Needed for FDE with TPM, hardcoded here temporarily (aarch64, x86_64 specific)
- archs: aarch64, x86_64
- - package: libtss2-tcti-device0 # Same than fde-tools
- optional_packages: null
- base_product: ALP-Bedrock
-
- security:
- tpm_luks_open: true
- lsm: selinux
- available_lsms:
- # apparmor:
- # patterns:
- # - apparmor
- selinux:
- patterns:
- - alp-bedrock-selinux
- policy: enforcing
- none:
- patterns: null
-
- storage:
- encryption:
- method: luks2
- pbkdf: pbkdf2
- volumes:
- - mount_point: "/"
- fs_type: btrfs
- min_size: 5 GiB
- fs_types:
- - btrfs
- weight: 1
- snapshots: true
- snapshots_configurable: false
- proposed_configurable: false
- btrfs_default_subvolume: "@"
- btrfs_read_only: true
- subvolumes:
- - path: root
- - path: home
- - path: opt
- - path: srv
- - path: boot/writable
- - path: usr/local
- - path: boot/grub2/arm64-efi
- archs: aarch64
- - path: boot/grub2/i386-pc
- archs: x86_64
- - path: boot/grub2/powerpc-ieee1275
- archs: ppc,!board_powernv
- - path: boot/grub2/s390x-emu
- archs: s390
- - path: boot/grub2/x86_64-efi
- archs: x86_64
- - path: var
- copy_on_write: false
-
-ALP-Micro:
- software:
- installation_repositories:
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Micro:/0.1/images/repo/ALP-Micro-0.1-x86_64-Media1/
- archs: x86_64
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Micro:/0.1/images/repo/ALP-Micro-0.1-aarch64-Media1/
- archs: aarch64
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Micro:/0.1/images/repo/ALP-Micro-0.1-s390x-Media1/
- archs: s390
- - url: https://download.opensuse.org/repositories/SUSE:/ALP:/Products:/Micro:/0.1/images/repo/ALP-Micro-0.1-ppc64le-Media1/
- archs: ppc
-
- mandatory_patterns:
- - alp-micro-base
- - alp-micro-cockpit
- - alp-micro-container_runtime
- - alp-micro-hardware
+ - patterns-alp-base
+ - patterns-alp-cockpit
+ - patterns-alp-hardware
optional_patterns: null # no optional pattern shared
mandatory_packages:
- package: device-mapper # Apparently needed if devices at /dev/mapper are used at boot (eg. FDE)
@@ -243,7 +160,7 @@ ALP-Micro:
archs: aarch64, x86_64
- package: libtss2-tcti-device0 # Same than fde-tools
optional_packages: null
- base_product: ALP-Micro
+ base_product: ALP-Dolomite
security:
tpm_luks_open: true
@@ -254,7 +171,7 @@ ALP-Micro:
# - apparmor
selinux:
patterns:
- - alp-micro-selinux
+ - patterns-alp-selinux
policy: enforcing
none:
patterns: null
diff --git a/service/package/rubygem-agama.changes b/service/package/rubygem-agama.changes
index 5bdcc703c4..035d15fc98 100644
--- a/service/package/rubygem-agama.changes
+++ b/service/package/rubygem-agama.changes
@@ -1,3 +1,9 @@
+-------------------------------------------------------------------
+Wed Jul 26 10:00:39 UTC 2023 - José Iván López González
+
+- Adapt config file to install ALP Dolomite instead of ALP Micro
+ and remove ALP Bedrock (gh#openSUSE/agama#674).
+
-------------------------------------------------------------------
Mon Jul 17 09:16:38 UTC 2023 - Josef Reidinger
diff --git a/web/cspell.json b/web/cspell.json
index 51faca87f9..940b7fae66 100644
--- a/web/cspell.json
+++ b/web/cspell.json
@@ -32,6 +32,7 @@
"filename",
"fullname",
"freedesktop",
+ "gettext",
"ibft",
"ifaces",
"ipaddr",
diff --git a/web/po/cs.po b/web/po/cs.po
new file mode 100644
index 0000000000..b07094de5c
--- /dev/null
+++ b/web/po/cs.po
@@ -0,0 +1,33 @@
+# Czech translations
+#
+# Copyright (C) YEAR SuSE Linux Products GmbH, Nuernberg
+# This file is distributed under the same license as the Agama package.
+# FIRST AUTHOR , YEAR.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-07-19 12:50+0000\n"
+"PO-Revision-Date: 2023-07-26 15:03+0000\n"
+"Last-Translator: Ladislav Slezák \n"
+"Language-Team: Czech "
+"\n"
+"Language: cs\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n"
+"X-Generator: Weblate 4.9.1\n"
+
+#: src/components/core/FileViewer.jsx:67
+msgid "Reading file..."
+msgstr "Soubor se načítá…"
+
+#: src/components/core/FileViewer.jsx:73
+msgid "Cannot read the file"
+msgstr "Soubor nelze přečíst"
+
+#: src/components/core/FileViewer.jsx:82
+msgid "Close"
+msgstr "Zavřít"
diff --git a/web/po/de.po b/web/po/de.po
deleted file mode 100644
index 0394e917d0..0000000000
--- a/web/po/de.po
+++ /dev/null
@@ -1,39 +0,0 @@
-# starter-kit German translations
-#, fuzzy
-msgid ""
-msgstr ""
-"Project-Id-Version: starter-kit 1.0\n"
-"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2022-03-09 16:09+0100\n"
-"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
-"Last-Translator: FULL NAME \n"
-"Language-Team: LANGUAGE \n"
-"Language: de\n"
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=UTF-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=2; plural=n != 1\n"
-
-#: src/index.html:20
-msgid "Cockpit Starter Kit"
-msgstr "Cockpit Bausatz"
-
-#: src/app.jsx:43
-msgid "Running on $0"
-msgstr "Läuft auf $0"
-
-#: org.cockpit-project.starter-kit.metainfo.xml:6
-msgid "Scaffolding for a cockpit module"
-msgstr "Gerüst für ein Cockpit-Modul"
-
-#: org.cockpit-project.starter-kit.metainfo.xml:8
-msgid "Scaffolding for a cockpit module."
-msgstr "Gerüst für ein Cockpit-Modul."
-
-#: src/manifest.json:0 org.cockpit-project.starter-kit.metainfo.xml:5
-msgid "Starter Kit"
-msgstr "Bausatz"
-
-#: src/app.jsx:29
-msgid "Unknown"
-msgstr "Unbekannt"
diff --git a/web/po/html2po b/web/po/html2po
deleted file mode 100755
index 8b34fa05dd..0000000000
--- a/web/po/html2po
+++ /dev/null
@@ -1,264 +0,0 @@
-#!/usr/bin/env node
-
-/*
- * Extracts translatable strings from HTML files in the following forms:
- *
- * String
- * String
- * String
- *
- *
- * Supports the following Glade compatible forms:
- *
- * String
- * String
- *
- * Supports the following angular-gettext compatible forms:
- *
- * String
- * Singular
- *
- * Note that some of the use of the translated may not support all the strings
- * depending on the code actually using these strings to translate the HTML.
- */
-
-
-function fatal(message, code) {
- console.log((filename || "html2po") + ": " + message);
- process.exit(code || 1);
-}
-
-function usage() {
- console.log("usage: html2po input output");
- process.exit(2);
-}
-
-var fs, htmlparser, path, stdio;
-
-try {
- fs = require('fs');
- path = require('path');
- htmlparser = require('htmlparser');
- stdio = require('stdio');
-} catch (ex) {
- fatal(ex.message, 127); /* missing looks for this */
-}
-
-var opts = stdio.getopt({
- directory: { key: "d", args: 1, description: "Base directory for input files", default: "." },
- output: { key: "o", args: 1, description: "Output file" },
- from: { key: "f", args: 1, description: "File containing list of input files", default: "" },
-});
-
-if (!opts.from && opts.args.length < 1) {
- usage();
-}
-
-var input = opts.args;
-var entries = { };
-
-/* Filename being parsed and offset of line number */
-var filename = null;
-var offsets = 0;
-
-/* The HTML parser we're using */
-var handler = new htmlparser.DefaultHandler(function(error, dom) {
- if (error)
- fatal(error);
- else
- walk(dom);
-});
-
-prepare();
-
-/* Decide what input files to process */
-function prepare() {
- if (opts.from) {
- fs.readFile(opts.from, { encoding: "utf-8"}, function(err, data) {
- if (err)
- fatal(err.message);
- input = data.split("\n").filter(function(value) {
- return !!value;
- }).concat(input);
- step();
- });
- } else {
- step();
- }
-}
-
-/* Now process each file in turn */
-function step() {
- filename = input.shift();
- if (filename === undefined) {
- finish();
- return;
- }
-
- /* Qualify the filename if necessary */
- var full = filename;
- if (opts.directory)
- full = path.join(opts.directory, filename);
-
- fs.readFile(full, { encoding: "utf-8"}, function(err, data) {
- if (err)
- fatal(err.message);
-
- var parser = new htmlparser.Parser(handler, { includeLocation: true });
- parser.parseComplete(data);
- step();
- });
-}
-
-/* Process an array of nodes */
-function walk(children) {
- if (!children)
- return;
-
- children.forEach(function(child) {
- var line = (child.location || { }).line || 0;
- var offset = line - 1;
-
- /* Scripts get their text processed as HTML */
- if (child.type == 'script' && child.children) {
- var parser = new htmlparser.Parser(handler, { includeLocation: true });
-
- /* Make note of how far into the outer HTML file we are */
- offsets += offset;
-
- child.children.forEach(function(node) {
- parser.parseChunk(node.raw);
- });
- parser.done();
-
- offsets -= offset;
-
- /* Tags get extracted as usual */
- } else if (child.type == 'tag') {
- tag(child);
- }
- });
-}
-
-/* Process a single loaded tag */
-function tag(node) {
-
- var tasks, line, entry;
- var attrs = node.attribs || { };
- var nest = true;
-
- /* Extract translate strings */
- if ("translate" in attrs || "translatable" in attrs) {
- tasks = (attrs["translate"] || attrs["translatable"] || "yes").split(" ");
-
- /* Calculate the line location taking into account nested parsing */
- line = (node.location || { })["line"] || 0;
- line += offsets;
-
- entry = {
- msgctxt: attrs['translate-context'] || attrs['context'],
- msgid_plural: attrs['translate-plural'],
- locations: [ filename + ":" + line ]
- };
-
- /* For each thing listed */
- tasks.forEach(function(task) {
- var copy = Object.assign({}, entry);
-
- /* The element text itself */
- if (task == "yes" || task == "translate") {
- copy.msgid = extract(node.children);
- nest = false;
-
- /* An attribute */
- } else if (task) {
- copy.msgid = attrs[task];
- }
-
- if (copy.msgid)
- push(copy);
- });
- }
-
- /* Walk through all the children */
- if (nest)
- walk(node.children);
-}
-
-/* Push an entry onto the list */
-function push(entry) {
- var key = entry.msgid + "\0" + entry.msgid_plural + "\0" + entry.msgctxt;
- var prev = entries[key];
- if (prev) {
- prev.locations = prev.locations.concat(entry.locations);
- } else {
- entries[key] = entry;
- }
-}
-
-/* Extract the given text */
-function extract(children) {
- if (!children)
- return null;
-
- var i, len, node, str = [];
- children.forEach(function(node) {
- if (node.type == 'tag' && node.children)
- str.push(extract(node.children))
- else if (node.type == 'text' && node.data)
- str.push(node.data);
- });
-
- return str.join("");
-}
-
-/* Escape a string for inclusion in po file */
-function escape(string) {
- var bs = string.split('\\').join('\\\\').split('"').join('\\"');
- return bs.split("\n").map(function(line) {
- return '"' + line + '"';
- }).join("\n");
-}
-
-/* Finish by writing out the strings */
-function finish() {
- var result = [
- 'msgid ""',
- 'msgstr ""',
- '"Project-Id-Version: PACKAGE_VERSION\\n"',
- '"MIME-Version: 1.0\\n"',
- '"Content-Type: text/plain; charset=UTF-8\\n"',
- '"Content-Transfer-Encoding: 8bit\\n"',
- '"X-Generator: Cockpit html2po\\n"',
- '',
- ];
-
- var msgid, entry;
- for (msgid in entries) {
- entry = entries[msgid];
- result.push('#: ' + entry.locations.join(" "));
- if (entry.msgctxt)
- result.push('msgctxt ' + escape(entry.msgctxt));
- result.push('msgid ' + escape(entry.msgid));
- if (entry.msgid_plural) {
- result.push('msgid_plural ' + escape(entry.msgid_plural));
- result.push('msgstr[0] ""');
- result.push('msgstr[1] ""');
- } else {
- result.push('msgstr ""');
- }
- result.push('');
- }
-
- var data = result.join('\n');
- if (!opts.output) {
- process.stdout.write(data);
- process.exit(0);
- } else {
- fs.writeFile(opts.output, data, function(err) {
- if (err)
- fatal(err.message);
- process.exit(0);
- });
- }
-}
diff --git a/web/po/manifest2po b/web/po/manifest2po
deleted file mode 100755
index 46fa744b51..0000000000
--- a/web/po/manifest2po
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env node
-
-/*
- * Extracts translatable strings from manifest.json files.
- *
- */
-
-function fatal(message, code) {
- console.log((filename || "manifest2po") + ": " + message);
- process.exit(code || 1);
-}
-
-function usage() {
- console.log("usage: manifest2po [-o output] input...");
- process.exit(2);
-}
-
-var fs, path, stdio;
-
-try {
- fs = require('fs');
- path = require('path');
- stdio = require('stdio');
-} catch (ex) {
- fatal(ex.message, 127); /* missing looks for this */
-}
-
-var opts = stdio.getopt({
- directory: { key: "d", args: 1, description: "Base directory for input files", default: "." },
- output: { key: "o", args: 1, description: "Output file" },
- from: { key: "f", args: 1, description: "File containing list of input files", default: "" },
-});
-
-if (!opts.from && opts.args.length < 1) {
- usage();
-}
-
-var input = opts.args;
-var entries = { };
-
-/* Filename being parsed */
-var filename = null;
-
-prepare();
-
-/* Decide what input files to process */
-function prepare() {
- if (opts.from) {
- fs.readFile(opts.from, { encoding: "utf-8"}, function(err, data) {
- if (err)
- fatal(err.message);
- input = data.split("\n").filter(function(value) {
- return !!value;
- }).concat(input);
- step();
- });
- } else {
- step();
- }
-}
-
-/* Now process each file in turn */
-function step() {
- filename = input.shift();
- if (filename === undefined) {
- finish();
- return;
- }
-
- if (path.basename(filename) != "manifest.json")
- return step();
-
- /* Qualify the filename if necessary */
- var full = filename;
- if (opts.directory)
- full = path.join(opts.directory, filename);
-
- fs.readFile(full, { encoding: "utf-8"}, function(err, data) {
- if (err)
- fatal(err.message);
-
- // There are variables which when not substituted can cause JSON.parse to fail
- // Dummy replace them. None variable is going to be translated anyway
- safe_data = data.replace(/\@.+?\@/gi, 1);
- process_manifest(JSON.parse(safe_data));
-
- return step();
- });
-}
-
-function process_manifest(manifest) {
- if (manifest.menu)
- process_menu(manifest.menu);
- if (manifest.tools)
- process_menu(manifest.tools);
-}
-
-function process_keywords(keywords) {
- keywords.forEach(v => {
- v.matches.forEach(keyword =>
- push({
- msgid: keyword,
- locations: [ filename + ":0" ]
- })
- );
- });
-}
-
-function process_docs(docs) {
- docs.forEach(doc => {
- push({
- msgid: doc.label,
- locations: [ filename + ":0" ]
- })
- });
-}
-
-function process_menu(menu) {
- for (var m in menu) {
- if (menu[m].label) {
- push({
- msgid: menu[m].label,
- locations: [ filename + ":0" ]
- });
- }
- if (menu[m].keywords)
- process_keywords(menu[m].keywords);
- if (menu[m].docs)
- process_docs(menu[m].docs);
- }
-}
-
-/* Push an entry onto the list */
-function push(entry) {
- var key = entry.msgid + "\0" + entry.msgid_plural + "\0" + entry.msgctxt;
- var prev = entries[key];
- if (prev) {
- prev.locations = prev.locations.concat(entry.locations);
- } else {
- entries[key] = entry;
- }
-}
-
-/* Escape a string for inclusion in po file */
-function escape(string) {
- var bs = string.split('\\').join('\\\\').split('"').join('\\"');
- return bs.split("\n").map(function(line) {
- return '"' + line + '"';
- }).join("\n");
-}
-
-/* Finish by writing out the strings */
-function finish() {
- var result = [
- 'msgid ""',
- 'msgstr ""',
- '"Project-Id-Version: PACKAGE_VERSION\\n"',
- '"MIME-Version: 1.0\\n"',
- '"Content-Type: text/plain; charset=UTF-8\\n"',
- '"Content-Transfer-Encoding: 8bit\\n"',
- '"X-Generator: Cockpit manifest2po\\n"',
- '',
- ];
-
- var msgid, entry;
- for (msgid in entries) {
- entry = entries[msgid];
- result.push('#: ' + entry.locations.join(" "));
- if (entry.msgctxt)
- result.push('msgctxt ' + escape(entry.msgctxt));
- result.push('msgid ' + escape(entry.msgid));
- if (entry.msgid_plural) {
- result.push('msgid_plural ' + escape(entry.msgid_plural));
- result.push('msgstr[0] ""');
- result.push('msgstr[1] ""');
- } else {
- result.push('msgstr ""');
- }
- result.push('');
- }
-
- var data = result.join('\n');
- if (!opts.output) {
- process.stdout.write(data);
- process.exit(0);
- } else {
- fs.writeFile(opts.output, data, function(err) {
- if (err)
- fatal(err.message);
- process.exit(0);
- });
- }
-}
diff --git a/web/src/components/core/FileViewer.jsx b/web/src/components/core/FileViewer.jsx
index 09eb9474f1..f23aab4f00 100644
--- a/web/src/components/core/FileViewer.jsx
+++ b/web/src/components/core/FileViewer.jsx
@@ -26,6 +26,9 @@ import { Loading } from "~/components/layout";
import cockpit from "../../lib/cockpit";
+// FIXME: replace by a wrapper, this is just for testing
+const _ = cockpit.gettext;
+
export default function FileViewer({ file, title, onCloseCallback }) {
// the popup is visible
const [isOpen, setIsOpen] = useState(true);
@@ -61,13 +64,13 @@ export default function FileViewer({ file, title, onCloseCallback }) {
title={title || file}
className="large"
>
- {state === "loading" && }
+ {state === "loading" && }
{(content === null || error) &&
{error}
}
@@ -76,7 +79,7 @@ export default function FileViewer({ file, title, onCloseCallback }) {
- Close
+ {_("Close")}
);
diff --git a/web/src/components/core/FileViewer.test.jsx b/web/src/components/core/FileViewer.test.jsx
index f64110b886..49955e8182 100644
--- a/web/src/components/core/FileViewer.test.jsx
+++ b/web/src/components/core/FileViewer.test.jsx
@@ -22,7 +22,7 @@
import React from "react";
import { screen, waitFor, within } from "@testing-library/react";
-import { plainRender } from "~/test-utils";
+import { mockGettext, plainRender } from "~/test-utils";
import { FileViewer } from "~/components/core";
import cockpit from "../../lib/cockpit";
@@ -44,6 +44,8 @@ const file_name = "/testfile";
const content = "Read file content";
const title = "YaST Logs";
+mockGettext();
+
describe("FileViewer", () => {
beforeEach(() => {
readFn.mockResolvedValue(content);
diff --git a/web/src/test-utils.js b/web/src/test-utils.js
index 436efee575..51fc16ff0d 100644
--- a/web/src/test-utils.js
+++ b/web/src/test-utils.js
@@ -33,6 +33,7 @@ import { render } from "@testing-library/react";
import { createClient } from "~/client/index";
import { InstallerClientProvider } from "~/context/installer";
import { NotificationProvider } from "~/context/notification";
+import cockpit from "./lib/cockpit";
/**
* Internal mock for manipulating routes, using ["/"] by default
@@ -165,11 +166,25 @@ const withNotificationProvider = (content) => {
);
};
+/**
+ * Mocks the cockpit.gettext() method with an identity function (returns
+ * the original untranslated text)
+ */
+const mockGettext = () => {
+ const gettextFn = jest.fn();
+ gettextFn.mockImplementation((text) => {
+ return text;
+ });
+
+ cockpit.gettext.mockImplementation(gettextFn);
+};
+
export {
plainRender,
installerRender,
createCallbackMock,
mockComponent,
+ mockGettext,
mockLayout,
mockNavigateFn,
mockRoutes,