From 42508e1e19263079fe1516c2e388df0052eb98fd Mon Sep 17 00:00:00 2001 From: Antoine B <56827368+4nt0ineB@users.noreply.github.com> Date: Mon, 12 Aug 2024 20:06:06 +0200 Subject: [PATCH 1/8] feat: generate historical events from product revisions (#10523) ### What Add script that goes through the product revisions and generate historical events `scripts/product_revision_to_historical_events.pl` ### Issues - #10526 --------- Signed-off-by: John Gomersall Co-authored-by: benbenben2 <110821832+benbenben2@users.noreply.github.com> Co-authored-by: John Gomersall Co-authored-by: john-gom <116556069+john-gom@users.noreply.github.com> --- Makefile | 2 +- lib/ProductOpener/Redis.pm | 4 +- .../product_revision_to_historical_events.pl | 226 ++++++++++++++++++ scripts/update_all_products.pl | 2 +- 4 files changed, 231 insertions(+), 3 deletions(-) create mode 100755 scripts/product_revision_to_historical_events.pl diff --git a/Makefile b/Makefile index 4a343c000cef9..2d265d3119fba 100644 --- a/Makefile +++ b/Makefile @@ -315,7 +315,7 @@ update_tests_results: build_taxonomies_test build_lang_test bash: @echo "🥫 Open a bash shell in the backend container" - ${DOCKER_COMPOSE} run --rm -w /opt/product-opener backend bash + ${DOCKER_COMPOSE_RUN} run --rm -w /opt/product-opener backend bash bash_test: @echo "🥫 Open a bash shell in the test container" diff --git a/lib/ProductOpener/Redis.pm b/lib/ProductOpener/Redis.pm index b5dc72d4def85..5659831d831f7 100644 --- a/lib/ProductOpener/Redis.pm +++ b/lib/ProductOpener/Redis.pm @@ -103,7 +103,7 @@ a hashref of the differences between the previous and new revision of the produc =cut -sub push_to_redis_stream ($user_id, $product_ref, $action, $comment, $diffs) { +sub push_to_redis_stream ($user_id, $product_ref, $action, $comment, $diffs, $timestamp = time()) { if (!$redis_url) { # No Redis URL provided, we can't push to Redis @@ -131,7 +131,9 @@ sub push_to_redis_stream ($user_id, $product_ref, $action, $comment, $diffs) { # We let Redis generate the id '*', # fields + 'timestamp', $timestamp, 'code', Encode::encode_utf8($product_ref->{code}), + 'rev', Encode::encode_utf8($product_ref->{rev}), # product_type should be used over flavor (kept for backward compatibility) 'product_type', $options{product_type}, 'flavor', $options{current_server}, diff --git a/scripts/product_revision_to_historical_events.pl b/scripts/product_revision_to_historical_events.pl new file mode 100755 index 0000000000000..80e1152a76866 --- /dev/null +++ b/scripts/product_revision_to_historical_events.pl @@ -0,0 +1,226 @@ +#!/usr/bin/perl -w + +# This file is part of Product Opener. +# +# Product Opener +# Copyright (C) 2011-2024 Association Open Food Facts +# Contact: contact@openfoodfacts.org +# Address: 21 rue des Iles, 94100 Saint-Maur des Fossés, France +# +# Product Opener is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +use ProductOpener::PerlStandards; +use utf8; + +use ProductOpener::Config qw/%options $query_url/; +use ProductOpener::Store qw/store retrieve/; +use ProductOpener::Paths qw/%BASE_DIRS/; +use ProductOpener::Redis qw/push_to_redis_stream/; +use ProductOpener::Products qw/product_id_from_path/; +use Path::Tiny; +use JSON::MaybeXS; + +# This script recursively visits all product.sto files from the root of the products directory +# and process its changes to generate a JSONL file of historical events +my $start_from = $ARGV[0] // 0; +my $end_before = $ARGV[1] // 9999999999; +#perl scripts/product_revision_to_historical_events.pl 1704067200 + +my ($checkpoint_file, $last_processed_path, $last_processed_rev) = open_checkpoint('checkpoint.tmp'); +my $can_process = $last_processed_path ? 0 : 1; + +# JSONL +my $filename = 'historical_events.jsonl'; +open(my $file, '>>:encoding(UTF-8)', $filename) or die "Could not open file '$filename' $!"; + +my $product_count = 0; +my $event_count = 0; + +my @events = (); + +$query_url =~ s/^\s+|\s+$//g; +my $query_post_url = URI->new("$query_url/productupdates"); +my $ua = LWP::UserAgent->new(); +# Add a timeout to the HTTP query +$ua->timeout(15); + +sub process_file($path, $code) { + $product_count++; + + if ($product_count % 1000 == 0) { + print '[' . localtime() . "] $product_count products processed. Sent $event_count events \n"; + } + + my $changes = retrieve($path . "/changes.sto"); + if (!defined $changes) { + print '[' . localtime() . "] Unable to open $path/changes.sto\n"; + return; + } + + # JSONL + #my $code = product_id_from_path($path); + my $change_count = @$changes; # some $product don't have a 'rev' + my $rev = 0; # some $change don't have a 'rev' + + foreach my $change (@{$changes}) { + $rev++; + # my $product = retrieve($path . "/" . $rev . ".sto"); + + if (not $can_process and $rev == $last_processed_rev) { + $can_process = 1; + print "Resuming from '$last_processed_path' revision $last_processed_rev\n"; + next; # we don't want to process the revision again + } + + next if not $can_process; + + my $timestamp = $change->{t} // 0; + next if ($timestamp < $start_from or $timestamp >= $end_before); + + my $action = 'updated'; + if ($rev eq 1) { + $action = 'created'; + } + elsif ( $rev == $change_count + and $change->{comment} =~ /^Deleting product/) + { + $action = 'deleted'; + } + + if (exists $change->{diffs}{fields}{add} + and (grep {$_ eq 'obsolete'} @{$change->{diffs}{fields}{add}})) + { + $action = 'archived'; + } + if (exists $change->{diffs}{fields}{delete} + and (grep {$_ eq 'obsolete'} @{$change->{diffs}{fields}{delete}})) + { + $action = 'unarchived'; + } + + push( + @events, + { + timestamp => $timestamp, + code => $code, + rev => $rev + 0, + user_id => $change->{userid} // 'initial_import', + comment => $change->{comment}, + product_type => $options{product_type}, + action => $action, + diffs => $change->{diffs} + } + ); + + $event_count++; + + if ($event_count % 1000 == 0) { + send_events(); + update_checkpoint($checkpoint_file, $path, $rev); + } + } + + return 1; +} + +sub send_events() { + foreach my $event (@events) { + print $file encode_json($event) . "\n"; + } + + my $resp = $ua->post( + $query_post_url, + Content => encode_json(\@events), + 'Content-Type' => 'application/json; charset=utf-8' + ); + if (!$resp->is_success) { + print '[' + . localtime() + . "] query response not ok calling " + . $query_post_url + . " error: " + . $resp->status_line . "\n"; + die; + } + + # Note pushing to redis will cause product to be reloaded + # push_to_redis_stream( + # $change->{userid} // 'initial_import', + # {code=>$code, rev=>$rev}, + # $action, + # $change->{comment}, + # $change->{diffs}, + # $change->{t} + # ); + + @events = (); + + return 1; +} + +# because getting products from mongodb won't give 'deleted' ones +# found that path->visit was slow with full product volume +sub find_products($dir, $code) { + opendir DH, "$dir" or die "could not open $dir directory: $!\n"; + my @files = readdir(DH); + closedir DH; + foreach my $entry (sort @files) { + next if $entry =~ /^\.\.?$/; + my $file_path = "$dir/$entry"; + + if (-d $file_path and ($can_process or ($last_processed_path =~ m/^\Q$file_path/))) { + find_products($file_path, "$code$entry"); + next; + } + + if ($entry eq 'product.sto') { + if ($can_process or ($last_processed_path and $last_processed_path eq $dir)) { + process_file($dir, $code); + } + } + } + + return; +} + +sub open_checkpoint($filename) { + if (!-e $filename) { + `touch $filename`; + } + open(my $checkpoint_file, '+<', $filename) or die "Could not open file '$filename' $!"; + seek($checkpoint_file, 0, 0); + my $checkpoint = <$checkpoint_file>; + chomp $checkpoint if $checkpoint; + my ($last_processed_path, $rev); + if ($checkpoint) { + ($last_processed_path, $rev) = split(',', $checkpoint); + } + return ($checkpoint_file, $last_processed_path, $rev); +} + +sub update_checkpoint($checkpoint_file, $dir, $revision) { + seek($checkpoint_file, 0, 0); + print $checkpoint_file "$dir,$revision"; + truncate($checkpoint_file, tell($checkpoint_file)); + return 1; +} + +find_products($BASE_DIRS{PRODUCTS}, ''); + +if (scalar(@events)) { + send_events(); +} + +close $file; +close $checkpoint_file; diff --git a/scripts/update_all_products.pl b/scripts/update_all_products.pl index 724a1692f0d29..a9417ef80fff8 100755 --- a/scripts/update_all_products.pl +++ b/scripts/update_all_products.pl @@ -1482,7 +1482,7 @@ else { $products_pushed_to_redis++; print STDERR ". Pushed to Redis stream"; - push_to_redis_stream('update_all_products', $product_ref, "updated", $comment, {}); + push_to_redis_stream('update_all_products', $product_ref, "reprocessed", $comment, {}); } } else { From 753ab9cc73cd7004f2c0ace964b19ea46ffff5cc Mon Sep 17 00:00:00 2001 From: Arnaud Leene Date: Tue, 13 Aug 2024 11:51:19 +0200 Subject: [PATCH 2/8] chore: taxonomy updates week 32 (#10690) ### What More improvements of various kind --- taxonomies/food/categories.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/taxonomies/food/categories.txt b/taxonomies/food/categories.txt index 5d406dbe8d8b1..823b937f478c6 100644 --- a/taxonomies/food/categories.txt +++ b/taxonomies/food/categories.txt @@ -42380,6 +42380,7 @@ hr: Crna riža it: Riso venere ja: 黒米 lt: Juodieji ryžiai +nl: Zwarte rijsten, zwarte rijst wikidata:en: Q3434002 < en:Japonica rices From 2258e28900f089468b11d9769ff7110e2290f355 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Frederik=20=E2=80=9CFreso=E2=80=9D=20S=2E=20Olesen?= Date: Tue, 13 Aug 2024 11:53:54 +0200 Subject: [PATCH 3/8] taxonomy: Add Swedish steaming and rolling (#10688) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What Processing by steaming and/or rolling wasn’t recognised in ingredients lists in Swedish. They should be after this PR. ### Related issue(s) and discussion - https://se.openfoodfacts.org/product/7340083480638/fiberhavregryn-garant#health --- taxonomies/ingredients_processing.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/taxonomies/ingredients_processing.txt b/taxonomies/ingredients_processing.txt index 7fe7489efa138..7b6d31110b957 100644 --- a/taxonomies/ingredients_processing.txt +++ b/taxonomies/ingredients_processing.txt @@ -363,6 +363,7 @@ de: gerollt, gerollte, gerollter, gerolltes es: arrollada, arrollado nl: geplet, geplette pt: enrolado, enrolada, enrolados, enroladas +sv: valsade # en:description:Produced by grating; grate: to shred (things, usually foodstuffs), by rubbing across a grater. en: grated @@ -1741,6 +1742,7 @@ wikidata:en: Q101017 # Date: Tue, 13 Aug 2024 11:57:15 +0200 Subject: [PATCH 4/8] fix: gen_users_emails.pl (#10686) The users_emails.sto file in production got deleted (and rebuilt with new users, but it was missing old users). I ran this script to repair it. We will move to Keykloak hopefully soon, so we won't have those issues once we move. Co-authored-by: Off --- scripts/gen_users_emails.pl | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/scripts/gen_users_emails.pl b/scripts/gen_users_emails.pl index 6118168263ceb..1faacc5b42815 100755 --- a/scripts/gen_users_emails.pl +++ b/scripts/gen_users_emails.pl @@ -36,24 +36,19 @@ @userids = retrieve_userids(); } +my $emails_ref = retrieve("$BASE_DIRS{USERS}/users_emails.sto"); + foreach my $userid (@userids) { my $user_ref = retrieve_user($userid); - - my $first = ''; - if (!exists $user_ref->{discussion}) { - $first = 'first'; - } - - # print $user_ref->{email} . "\tnews_$user_ref->{newsletter}$first\tdiscussion_$user_ref->{discussion}\n"; - - if ($user_ref->{newsletter}) { - print lc($user_ref->{email}) . "\n"; - } - - if ($user_ref->{twitter} ne '') { - # print "\@" . $user_ref->{twitter} . " "; + if (defined $user_ref) { + my $email = $user_ref->{email}; + if ((defined $email) and ($email =~ /\@/)) { + $emails_ref->{$email} = [$userid]; + } } } +store("$BASE_DIRS{USERS}/users_emails.sto", $emails_ref); + exit(0); From 22d8fb95ecca3d9bf12a57b39822552b607ead2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Frederik=20=E2=80=9CFreso=E2=80=9D=20S=2E=20Olesen?= Date: Tue, 13 Aug 2024 11:57:55 +0200 Subject: [PATCH 5/8] =?UTF-8?q?taxonomy:=20Add=20sv:pecann=C3=B6tter=20and?= =?UTF-8?q?=20sv:l=C3=B6nnsirap=20(#10685)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What Adds `sv:pecannötter` and `sv:lönnsirap` to the ingredient taxonomy. ### Screenshot [![unrecognised ingredients](https://github.com/user-attachments/assets/bcc35416-8dc5-40bd-876b-79cde675dd82)](https://se.openfoodfacts.org/product/7311312007865/granola-crunchy-pekan-l%C3%B6nnsirap-risenta#health) ### Related issue(s) and discussion - https://se.openfoodfacts.org/product/7311312007865/granola-crunchy-pekan-l%C3%B6nnsirap-risenta#health --- taxonomies/food/ingredients.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/taxonomies/food/ingredients.txt b/taxonomies/food/ingredients.txt index 4261716c0e229..bac88399c95d8 100644 --- a/taxonomies/food/ingredients.txt +++ b/taxonomies/food/ingredients.txt @@ -32462,6 +32462,7 @@ nl: esdoornsiroop, ahornsiroop pl: syrop klonowy ro: sirop de arţar ru: кленовый сироп +sv: lönnsirap vegan:en: yes vegetarian:en: yes @@ -62251,7 +62252,7 @@ ro: nuci Pecan ru: пекан sk: pekanové orechy sl: ameriški orehi -sv: pekannöt, pekannötter +sv: pekannöt, pekannötter, pecannöt, pecannötter zh: 碧根果 ciqual_food_code:en: 15026 ciqual_food_name:en: Pecan nut From 026ff6a56c709b88cb633c760b6eb0fcb1bbee7a Mon Sep 17 00:00:00 2001 From: Antoine B <56827368+4nt0ineB@users.noreply.github.com> Date: Tue, 13 Aug 2024 12:00:58 +0200 Subject: [PATCH 6/8] feat: sync number of product public/pro in CRM (#10684) The daily script `(save_org_product_data_daily_off_pro.pl)` which updates orgs data now sync the number of public/prod products with CRM. --- lib/ProductOpener/CRM.pm | 28 ++++--- lib/ProductOpener/Orgs.pm | 84 ++++++++----------- .../save_org_product_data_daily_off_pro.pl | 9 +- 3 files changed, 61 insertions(+), 60 deletions(-) diff --git a/lib/ProductOpener/CRM.pm b/lib/ProductOpener/CRM.pm index 5811f895da518..4f15c0f4b848a 100644 --- a/lib/ProductOpener/CRM.pm +++ b/lib/ProductOpener/CRM.pm @@ -66,6 +66,8 @@ BEGIN { &update_last_export_date &update_company_last_logged_in_contact &update_company_last_import_type + &update_public_products + &update_pro_products &add_category_to_company &update_template_download_date &update_contact_last_login @@ -579,12 +581,20 @@ sub change_company_main_contact($org_ref, $user_id) { return $req_company; } -sub update_last_import_date($org_id, $time) { - return _update_partner_field(retrieve_org($org_id), 'x_off_last_import_date', _time_to_odoo_date_str($time)); +sub update_last_import_date($org_ref, $time) { + return _update_partner_field($org_ref, 'x_off_last_import_date', _time_to_odoo_date_str($time)); } -sub update_last_export_date($org_id, $time) { - return _update_partner_field(retrieve_org($org_id), 'x_off_last_export_date', _time_to_odoo_date_str($time)); +sub update_last_export_date($org_ref, $time) { + return _update_partner_field($org_ref, 'x_off_last_export_date', _time_to_odoo_date_str($time)); +} + +sub update_public_products($org_ref, $number_of_products) { + return _update_partner_field($org_ref, 'x_off_public_products', $number_of_products); +} + +sub update_pro_products($org_ref, $number_of_products) { + return _update_partner_field($org_ref, 'x_off_pro_products', $number_of_products); } sub update_contact_last_login ($user_ref) { @@ -634,8 +644,7 @@ Add a category to a company in Odoo =cut -sub add_category_to_company($org_id, $label) { - my $org_ref = retrieve_org($org_id); +sub add_category_to_company($org_ref, $label) { return if not defined $org_ref->{crm_org_id}; my $category_id = $crm_data->{category}{$label}; @@ -645,7 +654,7 @@ sub add_category_to_company($org_id, $label) { if $log->is_debug(); return; } - $log->debug("add_category_to_company", {org_id => $org_id, label => $label, category_id => $category_id}) + $log->debug("add_category_to_company", {org_id => $org_ref->{org_id}, label => $label, category_id => $category_id}) if $log->is_debug(); return make_odoo_request('res.partner', 'write', [[$org_ref->{crm_org_id}], {category_id => [[$commands{link}, $category_id]]}]); @@ -661,11 +670,10 @@ must match one of the values in CRM.pm @data_source =cut -sub update_company_last_import_type($org_id, $label) { - my $org_ref = retrieve_org($org_id); +sub update_company_last_import_type($org_ref, $label) { return if not defined $org_ref->{crm_org_id}; my $category_id = $crm_data->{category}{$label}; - add_category_to_company($org_id, $label); + add_category_to_company($org_ref, $label); return make_odoo_request('res.partner', 'write', [[$org_ref->{crm_org_id}], {x_off_last_import_type => $category_id}]); } diff --git a/lib/ProductOpener/Orgs.pm b/lib/ProductOpener/Orgs.pm index 8aba9553d4983..52f41a367b243 100644 --- a/lib/ProductOpener/Orgs.pm +++ b/lib/ProductOpener/Orgs.pm @@ -166,7 +166,7 @@ sub store_org ($org_ref) { defined $org_ref->{org_id} or die("Missing org_id"); # retrieve eventual previous values - my $previous_org_ref = retrieve("$BASE_DIRS{ORGS}/" . $org_ref->{org_id} . ".sto"); + my $previous_org_ref = retrieve("$BASE_DIRS{ORGS}/$org_ref->{org_id}.sto"); if ( (defined $previous_org_ref) && $previous_org_ref->{valid_org} ne 'accepted' @@ -389,20 +389,9 @@ Reference to an array of group ids (e.g. ["admins", "members"]) sub add_user_to_org ($org_id_or_ref, $user_id, $groups_ref) { - my $org_id; - my $org_ref; - - if (ref($org_id_or_ref) eq "") { - $org_id = $org_id_or_ref; - $org_ref = retrieve_org($org_id); - } - else { - $org_ref = $org_id_or_ref; - $org_id = $org_ref->{org_id}; - } + my $org_ref = org_id_or_ref($org_id_or_ref); - $log->debug("add_user_to_org", - {org_id => $org_id, org_ref => $org_ref, user_id => $user_id, groups_ref => $groups_ref}) + $log->debug("add_user_to_org", {org_ref => $org_ref, user_id => $user_id, groups_ref => $groups_ref}) if $log->is_debug(); foreach my $group (@{$groups_ref}) { @@ -449,20 +438,9 @@ Reference to an array of group ids (e.g. ["admins", "members"]) sub remove_user_from_org ($org_id_or_ref, $user_id, $groups_ref) { - my $org_id; - my $org_ref; - - if (ref($org_id_or_ref) eq "") { - $org_id = $org_id_or_ref; - $org_ref = retrieve_org($org_id); - } - else { - $org_ref = $org_id_or_ref; - $org_id = $org_ref->{org_id}; - } + my $org_ref = org_id_or_ref($org_id_or_ref); - $log->debug("remove_user_from_org", - {org_id => $org_id, org_ref => $org_ref, user_id => $user_id, groups_ref => $groups_ref}) + $log->debug("remove_user_from_org", {org_ref => $org_ref, user_id => $user_id, groups_ref => $groups_ref}) if $log->is_debug(); foreach my $group (@{$groups_ref}) { @@ -481,17 +459,7 @@ sub remove_user_from_org ($org_id_or_ref, $user_id, $groups_ref) { sub is_user_in_org_group ($org_id_or_ref, $user_id, $group_id) { - my $org_id; - my $org_ref; - - if (ref($org_id_or_ref) eq "") { - $org_id = $org_id_or_ref; - $org_ref = retrieve_org($org_id); - } - else { - $org_ref = $org_id_or_ref; - $org_id = $org_ref->{org_id}; - } + my $org_ref = org_id_or_ref($org_id_or_ref); if ( (defined $user_id) and (defined $org_ref) @@ -506,7 +474,6 @@ sub is_user_in_org_group ($org_id_or_ref, $user_id, $group_id) { } sub org_name ($org_ref) { - if ((defined $org_ref->{name}) and ($org_ref->{name} ne "")) { return $org_ref->{name}; } @@ -516,23 +483,22 @@ sub org_name ($org_ref) { } sub org_url ($org_ref) { - return canonicalize_tag_link("orgs", $org_ref->{org_id}); } -sub update_import_date($org_id, $time) { - my $org_ref = retrieve_org($org_id); +sub update_import_date($org_id_or_ref, $time) { + my $org_ref = org_id_or_ref($org_id_or_ref); $org_ref->{last_import_t} = $time; store_org($org_ref); - update_last_import_date($org_id, $time); + update_last_import_date($org_ref, $time); return; } -sub update_export_date($org_id, $time) { - my $org_ref = retrieve_org($org_id); +sub update_export_date($org_id_or_ref, $time) { + my $org_ref = org_id_or_ref($org_id_or_ref); $org_ref->{last_export_t} = $time; store_org($org_ref); - update_last_export_date($org_id, $time); + update_last_export_date($org_ref, $time); return; } @@ -566,10 +532,10 @@ Update the last import type for an organization. =cut -sub update_last_import_type ($org_id, $data_source) { - my $org_ref = retrieve_org($org_id); +sub update_last_import_type ($org_id_or_ref, $data_source) { + my $org_ref = retrieve_org($org_id_or_ref); $org_ref->{last_import_type} = $data_source; - update_company_last_import_type($org_id, $data_source); + update_company_last_import_type($org_ref, $data_source); store_org($org_ref); return; } @@ -588,4 +554,24 @@ sub accept_pending_user_in_org ($org_ref, $user_id) { return; } +=head2 org_id_or_ref($org_id_or_ref) + +Systematically return the org_ref for a given org_id or org_ref. + +=cut + +sub org_id_or_ref ($org_id_or_ref) { + my $org_id; + my $org_ref; + if (ref($org_id_or_ref) eq "") { + $org_id = $org_id_or_ref; + $org_ref = retrieve_org($org_id); + } + else { + $org_ref = $org_id_or_ref; + $org_id = $org_ref->{org_id}; + } + return $org_ref; +} + 1; diff --git a/scripts/save_org_product_data_daily_off_pro.pl b/scripts/save_org_product_data_daily_off_pro.pl index 7ba603c8833fa..c0841ed940429 100755 --- a/scripts/save_org_product_data_daily_off_pro.pl +++ b/scripts/save_org_product_data_daily_off_pro.pl @@ -29,10 +29,13 @@ use ProductOpener::Paths qw/%BASE_DIRS ensure_dir_created/; use ProductOpener::Users qw/$Owner_id/; use ProductOpener::Orgs qw/retrieve_org/; +use ProductOpener::CRM qw/update_public_products update_pro_products/; use Storable qw(store); # This script is run daily to gather organisation data -# such as number of products, number of products with errors etc, +# such as number of products, number of products with errors etc. +# Some data such as number of products on the public platform and in the producer platform +# are synced with the CRM. ensure_dir_created($BASE_DIRS{ORGS}); @@ -146,6 +149,10 @@ ($org_id) $org_ref->{'data'} = $data; + # sync crm + update_public_products($org_ref, $org_ref->{data}{products}{number_of_products_on_public_platform}); + update_pro_products($org_ref), $org_ref->{data}{products}{number_of_products_on_producer_platform}; + store($org_ref, $org_file_path); return; } From 2aca63f0b407b39550989ff61a67f95b4abb163f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Frederik=20=E2=80=9CFreso=E2=80=9D=20S=2E=20Olesen?= Date: Tue, 13 Aug 2024 12:11:09 +0200 Subject: [PATCH 7/8] taxonomy: Add Swedish terms for hydrolysed soy protein and ginger extract (#10673) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What Adds Swedish terms for hydrolysed soy protein and ginger extract. I added “hydrolyserat” both to `food/ingredients.txt` and to `ingredients_processing.txt`. Not sure if I should only do the latter, or if it’s fine to have both since “hydrolysed soy protein” is already in `food/ingredients.txt`. ### Screenshot [![unknown ingredients](https://github.com/user-attachments/assets/8ab60e9b-8627-436d-9bb9-29543c6b8cf7)](https://se.openfoodfacts.org/product/7340083469077/heta-chilin%C3%B6tter-eldorado#health) ### Related issue(s) and discussion - https://se.openfoodfacts.org/product/7340083469077/heta-chilin%C3%B6tter-eldorado#health --- taxonomies/food/ingredients.txt | 2 ++ taxonomies/ingredients_processing.txt | 1 + 2 files changed, 3 insertions(+) diff --git a/taxonomies/food/ingredients.txt b/taxonomies/food/ingredients.txt index bac88399c95d8..83ff812b9946a 100644 --- a/taxonomies/food/ingredients.txt +++ b/taxonomies/food/ingredients.txt @@ -33387,6 +33387,7 @@ hu: hidrolizált szójafehérje it: proteine di soia idrolizzate nl: gehydroliseerd soja-eiwit, gehydrolyseerd soja-eiwit pl: hydrolizat białka sojowego, hydrolizowane białko sojowe +sv: hydrolyserat sojaprotein < en:soy protein fr: protéines de soja i.p @@ -65970,6 +65971,7 @@ fr: extrait de gingembre, l'extrait de gingembre #nl:gemberextract hr: ekstrakt đumbira pl: ekstrakt z imbiru, ekstrakt imbiru +sv: ingefärsextrakt #vegan:en:yes #vegetarian:en:yes diff --git a/taxonomies/ingredients_processing.txt b/taxonomies/ingredients_processing.txt index 7b6d31110b957..a51c81085cdf4 100644 --- a/taxonomies/ingredients_processing.txt +++ b/taxonomies/ingredients_processing.txt @@ -2639,6 +2639,7 @@ fr: Hydrolisé, hydrolisée, hydrolisés, hydrolisées hr: hidrolizat, hidrolizirane, hidrolizirano, hidrolizirani nl: gehydrolyseerd, gehydrolyseerde pl: hydrolizat, hydrolizowane +sv: hydrolyserat #en:description:Transformation of unsaturated glycerides into saturated glycerides (of oils and fats) en: hydrogenated, hardened, partially hardened From b1a6c20efd3883c2251a7966059b5461321d20d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Frederik=20=E2=80=9CFreso=E2=80=9D=20S=2E=20Olesen?= Date: Tue, 13 Aug 2024 12:17:27 +0200 Subject: [PATCH 8/8] =?UTF-8?q?taxonomy:=20Add=20=E2=80=9Cgr=C3=A4dd?= =?UTF-8?q?=E2=80=9D=20alias=20for=20cream=20(#10675)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- taxonomies/food/ingredients.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/taxonomies/food/ingredients.txt b/taxonomies/food/ingredients.txt index 83ff812b9946a..65f385e8670c4 100644 --- a/taxonomies/food/ingredients.txt +++ b/taxonomies/food/ingredients.txt @@ -5345,7 +5345,7 @@ sn: Ruomba so: Labeen sq: Pana sr: pavlaka -sv: Grädde +sv: grädde, grädd sw: Samli ta: பாலாடை te: మీగడ