Skip to content

Commit

Permalink
Merge branch 'main' into more-attributes
Browse files Browse the repository at this point in the history
  • Loading branch information
teolemon authored May 23, 2023
2 parents 6efaf77 + 7953dbc commit aefba0b
Show file tree
Hide file tree
Showing 805 changed files with 195,836 additions and 5,532 deletions.
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,4 @@ packager-codes/*.csv
packager-codes/*.tsv
packager-codes/*.py
packager-codes/*.json
taxonomies*/**/*.txt
taxonomies*/**/*.sto
1 change: 1 addition & 0 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,7 @@ packager codes:
- scripts/update_packager_codes_fsa_ratings.pl
- scripts/test_normalize_packaging_codes.pl
- scripts/update_emb_codes_in_mongodb.pl
- packager-codes/**

Packaging:
- lib/ProductOpener/Packaging.pm
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/container-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,17 @@ jobs:
proxy_key: ${{ secrets.SSH_PRIVATE_KEY }}
script_stop: false
script: |
# while we would use shallow clone on a normal repo,
# comparison is too costly for this project and takes too much time, so we use a regular clone
# Clone Git repository if not already there
[ ! -d '${{ matrix.env }}' ] && git clone --depth 1 https://github.com/${{ github.repository }} ${{ matrix.env }} --no-single-branch 2>&1
[ ! -d '${{ matrix.env }}' ] && git clone https://github.com/${{ github.repository }} ${{ matrix.env }} 2>&1
# Go to repository directory
cd ${{ matrix.env }}
# Fetch newest commits (in case it wasn't freshly cloned)
git fetch --depth 1
# here again we use full depth instead of usual --depth 1, because of perf problems
git fetch
# Checkout current commit SHA
git checkout -qf ${{ github.sha }}
Expand Down Expand Up @@ -152,6 +155,7 @@ jobs:
echo "INFLUXDB_HOST=${{ env.INFLUXDB_HOST }}" >> .env
echo "LOG_LEVEL_ROOT=ERROR" >> .env
echo "LOG_LEVEL_MONGODB=ERROR" >> .env
echo "BUILD_CACHE_REPO=openfoodfacts/openfoodfacts-build-cache" >> .env
# Override domain name in nginx.conf
sed -i.bak "s/productopener.localhost/${{ env.PRODUCT_OPENER_DOMAIN }}/g" ./conf/nginx.conf
Expand Down Expand Up @@ -180,6 +184,7 @@ jobs:
proxy_host: ${{ env.SSH_PROXY_HOST }}
proxy_username: ${{ env.SSH_USERNAME }}
proxy_key: ${{ secrets.SSH_PRIVATE_KEY }}
command_timeout: 25m
script_stop: false
script: |
cd ${{ matrix.env }} && \
Expand All @@ -194,6 +199,7 @@ jobs:
proxy_host: ${{ env.SSH_PROXY_HOST }}
proxy_username: ${{ env.SSH_USERNAME }}
proxy_key: ${{ secrets.SSH_PRIVATE_KEY }}
command_timeout: 15m
script_stop: false
script: |
cd ${{ matrix.env }} && \
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/crowdin-per-language.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v3
- name: Matrix
uses: crowdin/github-action@v1.7.1
uses: crowdin/github-action@v1.8.1
with:
upload_translations: false # default is false
download_translations: true
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/crowdin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v3
- name: crowdin action
uses: crowdin/github-action@v1.7.1
uses: crowdin/github-action@v1.8.1
with:
upload_translations: false # default is false
# Use this option to upload translations for a single specified language
Expand Down Expand Up @@ -48,6 +48,7 @@ jobs:
# create_pull_request: # default is true
# The title of the new pull request
pull_request_title: "l10n: New Crowdin translations to review and merge"
pull_request_body: '### What\n- Automated pull request pulling in new or updated translations from Crowdin (https://translate.openfoodfacts.org).\n## Checklist\n- [ ] Check that they are no bad translations. If there are, correct them directly in Crowdin so that they are not resynced again. Then you can correct them here as well, or wait 24 hours for the sync to happen automatically.\n- [ ] Put extra attention on Acholi, which is used mistakenly as a sandbox by people discovering the self-service translation button on Open Food Facts\n- [ ] Once you are happy, that automated checks pass, you can approve the PR and merge it.\n### Part of\n- Translations'
# The contents of the pull request
# pull_request_body: # optional
# To add labels for created pull request
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
steps:
- uses: actions/add-to-project@main
with:
project-url: https://github.com/orgs/openfoodfacts/projects/11 # Add issue to the openfoodfacts-design project
project-url: https://github.com/orgs/openfoodfacts/projects/43 # Add issue to the openfoodfacts-design project
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
labeled: mockups available, needs mockup
label-operator: OR
Expand Down
133 changes: 133 additions & 0 deletions CHANGELOG.md

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,8 @@ RUN \
done && \
chown www-data:www-data -R /mnt/podata && \
# Create symlinks of data files that are indeed conf data in /mnt/podata (because we currently mix data and conf data)
for path in data-default ecoscore emb_codes forest-footprint ingredients packager-codes po taxonomies templates build-cache; do \
# NOTE: do not changes those links for they are in a volume, or handle migration in entry-point
for path in data-default external-data emb_codes ingredients packager-codes po taxonomies templates build-cache; do \
ln -sf /opt/product-opener/${path} /mnt/podata/${path}; \
done && \
# Create some necessary files to ensure permissions in volumes
Expand Down
26 changes: 17 additions & 9 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ MOUNT_POINT ?= /mnt
DOCKER_LOCAL_DATA ?= /srv/off/docker_data
OS := $(shell uname)

# mount point for shared data (default to the one on staging)
NFS_VOLUMES_ADDRESS ?= 10.0.0.3
NFS_VOLUMES_BASE_PATH ?= /rpool/off/clones

export DOCKER_BUILDKIT=1
export COMPOSE_DOCKER_CLI_BUILD=1
UID ?= $(shell id -u)
Expand Down Expand Up @@ -252,10 +256,11 @@ test-stop:
${DOCKER_COMPOSE_TEST} stop

# usage: make test-unit test=test-name.t
# you can add args= to pass options, like args="-d" to debug
test-unit: guard-test
@echo "🥫 Running test: 'tests/unit/${test}' …"
${DOCKER_COMPOSE_TEST} up -d memcached postgres mongodb
${DOCKER_COMPOSE_TEST} run --rm backend perl tests/unit/${test}
${DOCKER_COMPOSE_TEST} run --rm backend perl ${args} tests/unit/${test}

# usage: make test-int test=test-name.t
test-int: guard-test # usage: make test-one test=test-file.t
Expand Down Expand Up @@ -344,14 +349,14 @@ rebuild_taxonomies: build_taxonomies
#------------#
create_external_volumes:
@echo "🥫 Creating external volumes (production only) …"
# zfs replications
docker volume create --driver=local -o type=none -o o=bind -o device=${MOUNT_POINT}/data html_data
docker volume create --driver=local -o type=none -o o=bind -o device=${MOUNT_POINT}/users users
docker volume create --driver=local -o type=none -o o=bind -o device=${MOUNT_POINT}/products products
docker volume create --driver=local -o type=none -o o=bind -o device=${MOUNT_POINT}/product_images product_images
docker volume create --driver=local -o type=none -o o=bind -o device=${MOUNT_POINT}/orgs orgs
# zfs clones hosted on Ovh3 as NFS
docker volume create --driver=local --opt type=nfs --opt o=addr=${NFS_VOLUMES_ADDRESS},rw,nolock --opt device=:${NFS_VOLUMES_BASE_PATH}/users ${COMPOSE_PROJECT_NAME}_users
docker volume create --driver=local --opt type=nfs --opt o=addr=${NFS_VOLUMES_ADDRESS},rw,nolock --opt device=:${NFS_VOLUMES_BASE_PATH}/products ${COMPOSE_PROJECT_NAME}_products
docker volume create --driver=local --opt type=nfs --opt o=addr=${NFS_VOLUMES_ADDRESS},rw,nolock --opt device=:${NFS_VOLUMES_BASE_PATH}/images/products ${COMPOSE_PROJECT_NAME}_product_images
docker volume create --driver=local --opt type=nfs --opt o=addr=${NFS_VOLUMES_ADDRESS},rw,nolock --opt device=:${NFS_VOLUMES_BASE_PATH}/orgs ${COMPOSE_PROJECT_NAME}_orgs
# local data
docker volume create --driver=local -o type=none -o o=bind -o device=${DOCKER_LOCAL_DATA}/podata podata
docker volume create --driver=local -o type=none -o o=bind -o device=${DOCKER_LOCAL_DATA}/data ${COMPOSE_PROJECT_NAME}_html_data
docker volume create --driver=local -o type=none -o o=bind -o device=${DOCKER_LOCAL_DATA}/podata ${COMPOSE_PROJECT_NAME}_podata

create_external_networks:
@echo "🥫 Creating external networks (production only) …"
Expand All @@ -369,14 +374,17 @@ prune_cache:
@echo "🥫 Pruning Docker builder cache …"
docker builder prune -f

clean_folders:
clean_folders: clean_logs
( rm html/images/products || true )
( rm -rf node_modules/ || true )
( rm -rf html/data/i18n/ || true )
( rm -rf html/{css,js}/dist/ || true )
( rm -rf tmp/ || true )

clean_logs:
( rm -f logs/* logs/apache2/* logs/nginx/* || true )


clean: goodbye hdown prune prune_cache clean_folders

#-----------#
Expand Down
21 changes: 3 additions & 18 deletions cgi/export_products.pl
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@

if ($action eq "display") {

my $template_data_ref = {lang => \&lang,};
my $template_data_ref = {};

# Query filters

Expand All @@ -95,25 +95,10 @@
}
}

# Number of products matching the optional query
my $count = count_products({}, $query_ref);

# Number of products matching the query with changes that have not yet been imported
$query_ref->{states_tags} = "en:to-be-exported";
my $count_to_be_exported = count_products({}, $query_ref);

$template_data_ref->{count} = $count;
$template_data_ref->{count_to_be_exported} = $count_to_be_exported;

if ($count == 0) {
$template_data_ref->{n_products_will_be_exported} = lang("no_products_to_export");
}
elsif ($count == 1) {
$template_data_ref->{n_products_will_be_exported} = lang("one_product_will_be_exported");
}
else {
$template_data_ref->{n_products_will_be_exported} = sprintf(lang("n_products_will_be_exported"), $count);
}
$template_data_ref->{count_to_be_exported} = count_products({}, $query_ref);
$template_data_ref->{count_obsolete_to_be_exported} = count_products({}, $query_ref, 1);

my $export_photos_value = "";
my $replace_selected_photos_value = "";
Expand Down
88 changes: 35 additions & 53 deletions cgi/product_jqm_multilingual.pl
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ =head1 DESCRIPTION
use ProductOpener::ForestFootprint qw/:all/;
use ProductOpener::Text qw/:all/;
use ProductOpener::API qw/:all/;
use ProductOpener::APIProductWrite qw/:all/;

use Apache2::RequestRec ();
use Apache2::Const ();
Expand Down Expand Up @@ -308,10 +309,6 @@ =head1 DESCRIPTION
}
}

# Do not allow edits / removal through API for data provided by producers (only additions for non existing fields)
# when the corresponding organization has the protect_data checkbox checked
my $protected_data = product_data_is_protected($product_ref);

foreach my $field (@app_fields, 'nutrition_data_per', 'serving_size', 'traces', 'ingredients_text', 'origin',
'packaging_text', 'lang')
{
Expand Down Expand Up @@ -344,48 +341,45 @@ =head1 DESCRIPTION
existing_value => $product_ref->{$field}
}
) if $log->is_debug();
next;
}

elsif (defined single_param($field)) {

# Do not allow edits / removal through API for data provided by producers (only additions for non existing fields)
if (($protected_data) and (defined $product_ref->{$field}) and ($product_ref->{$field} ne "")) {
$log->debug("producer data already exists for field, skip empty value",
{field => $field, code => $code, existing_value => $product_ref->{$field}})
if $log->is_debug();
if (defined single_param($field)) {

# Only moderators can update values for fields sent by the producer
if (skip_protected_field($product_ref, $field, $User{moderator})) {
next;
}
else {
if ($field eq "lang") {
my $value = remove_tags_and_quote(decode utf8 => single_param($field));

# strip variants fr-BE fr_BE
$value =~ s/^([a-z][a-z])(-|_).*$/$1/i;
$value = lc($value);
if ($field eq "lang") {
my $value = remove_tags_and_quote(decode utf8 => single_param($field));

# skip unrecognized languages (keep the existing lang & lc value)
if (defined $lang_lc{$value}) {
$product_ref->{lang} = $value;
$product_ref->{lc} = $value;
}
# strip variants fr-BE fr_BE
$value =~ s/^([a-z][a-z])(-|_).*$/$1/i;
$value = lc($value);

# skip unrecognized languages (keep the existing lang & lc value)
if (defined $lang_lc{$value}) {
$product_ref->{lang} = $value;
$product_ref->{lc} = $value;
}
elsif ($field eq "ecoscore_extended_data") {
# we expect a JSON value
if (defined single_param($field)) {
$product_ref->{$field} = decode_json(single_param($field));
}
}
else {
$product_ref->{$field} = remove_tags_and_quote(decode utf8 => single_param($field));

if ((defined $language_fields{$field}) and (defined $product_ref->{lc})) {
my $field_lc = $field . "_" . $product_ref->{lc};
$product_ref->{$field_lc} = $product_ref->{$field};
}
}
elsif ($field eq "ecoscore_extended_data") {
# we expect a JSON value
if (defined single_param($field)) {
$product_ref->{$field} = decode_json(single_param($field));
}
}
else {
$product_ref->{$field} = remove_tags_and_quote(decode utf8 => single_param($field));

compute_field_tags($product_ref, $lc, $field);
if ((defined $language_fields{$field}) and (defined $product_ref->{lc})) {
my $field_lc = $field . "_" . $product_ref->{lc};
$product_ref->{$field_lc} = $product_ref->{$field};
}

compute_field_tags($product_ref, $lc, $field);
}
}

Expand All @@ -395,33 +389,21 @@ =head1 DESCRIPTION
my $field_lc = $field . '_' . $param_lang;
if (defined single_param($field_lc)) {

# Do not allow edits / removal through API for data provided by producers (only additions for non existing fields)
if (($protected_data) and (defined $product_ref->{$field_lc}) and ($product_ref->{$field_lc} ne ""))
{
$log->debug("producer data already exists for field, skip empty value",
{field_lc => $field_lc, code => $code, existing_value => $product_ref->{$field_lc}})
if $log->is_debug();
# Only moderators can update values for fields sent by the producer
if (skip_protected_field($product_ref, $field_lc, $User{moderator})) {
next;
}
else {

$product_ref->{$field_lc} = remove_tags_and_quote(decode utf8 => single_param($field_lc));
compute_field_tags($product_ref, $lc, $field_lc);
}
$product_ref->{$field_lc} = remove_tags_and_quote(decode utf8 => single_param($field_lc));
compute_field_tags($product_ref, $lc, $field_lc);
}
}
}
}

# Nutrition data

# Do not allow nutrition edits through API for data provided by producers
if (($protected_data) and (defined $product_ref->{"nutriments"})) {
print STDERR
"product_jqm_multilingual.pm - code: $code - nutrition data provided by producer exists, skip nutrients\n";
}
else {
assign_nutriments_values_from_request_parameters($product_ref, $nutriment_table);
}
assign_nutriments_values_from_request_parameters($product_ref, $nutriment_table, $User{moderator});

analyze_and_enrich_product_data($product_ref, $response_ref);

Expand Down
Loading

0 comments on commit aefba0b

Please sign in to comment.