Skip to content

Commit

Permalink
test: fix for product import tests (#156)
Browse files Browse the repository at this point in the history
* feat: product_number variable is replaced with product_id
Import product code samples are added

* use project_number in search_simple_query.py

* use project_number in search_with_boost_spec.py

* use project_number in search_with_filtering.py

* use project number in search_with_ordering.py

* use project_number in search_with_pagination.py

* use project_number in search_with_query_expansion_spec.py

* use project_number in search_with_facet_spec.py

* use correct path to resources

* revert change to paths

* resolve error where bq table doesn't exist

* use google.cloud.bigquery client

* fix for failed tests

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* move samples/interactive-tutorials/product/setup to move samples/interactive-tutorials/product/setup_product

* allow import_products_bq_test to run concurrently

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* add dependency for test

* remove tests for setup script

Co-authored-by: Anthonios Partheniou <[email protected]>
Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Feb 18, 2022
1 parent 747d374 commit 5e8a533
Show file tree
Hide file tree
Showing 14 changed files with 150 additions and 177 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ The bucket name must be unique. For convenience, you can name it `<YOUR_PROJECT_
1. To create the bucket and upload the JSON file, run the following command in the Terminal:

```bash
python product/setup/create_gcs_bucket.py
python product/setup_product/create_gcs_bucket.py
```

Now you can see the bucket is created in the [Cloud Storage](https://console.cloud.google.com/storage/browser), and the files are uploaded.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2021 Google Inc. All Rights Reserved.
# Copyright 2022 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -12,14 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import pytest
import test_utils.prefixer

from setup_cleanup import delete_bucket
prefixer = test_utils.prefixer.Prefixer(
"python-retail", "samples/interactive-tutorials/product"
)


def delete_bucket_by_name(name: str):
if name is None:
bucket_name = os.environ["BUCKET_NAME"]
delete_bucket(bucket_name)
else:
delete_bucket(name)
@pytest.fixture(scope="session")
def table_id_prefix() -> str:
return prefixer.create_prefix()
Original file line number Diff line number Diff line change
Expand Up @@ -12,90 +12,104 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# [START retail_import_products_from_big_query]
# Import products into a catalog from big query table using Retail API
#
import argparse
import os
import time

from google.cloud.retail import (
BigQuerySource,
ImportProductsRequest,
ProductInputConfig,
ProductServiceClient,
)

project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]

default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch"
dataset_id = "products"
table_id = "products"

def main(project_id, dataset_id, table_id):
# [START retail_import_products_from_big_query]
# TODO: Set project_id to your Google Cloud Platform project ID.
# project_id = "my-project"

# TO CHECK ERROR HANDLING USE THE TABLE WITH INVALID PRODUCTS:
# table_id = "products_some_invalid"
# TODO: Set dataset_id
# dataset_id = "products"

# TODO: Set dataset_id
# table_id = "products"

# get import products from big query request
def get_import_products_big_query_request(reconciliation_mode):
# TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE:
# default_catalog = "invalid_catalog_name"
big_query_source = BigQuerySource()
big_query_source.project_id = project_id
big_query_source.dataset_id = dataset_id
big_query_source.table_id = table_id
big_query_source.data_schema = "product"
# Import products into a catalog from big query table using Retail API
import time

input_config = ProductInputConfig()
input_config.big_query_source = big_query_source
from google.cloud.retail import (
BigQuerySource,
ImportProductsRequest,
ProductInputConfig,
ProductServiceClient,
)

import_request = ImportProductsRequest()
import_request.parent = default_catalog
import_request.reconciliation_mode = reconciliation_mode
import_request.input_config = input_config
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog/branches/default_branch"

print("---import products from big query table request---")
print(import_request)
# TO CHECK ERROR HANDLING USE THE TABLE WITH INVALID PRODUCTS:
# table_id = "products_some_invalid"

return import_request
# get import products from big query request
def get_import_products_big_query_request(reconciliation_mode):
# TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE:
# default_catalog = "invalid_catalog_name"
big_query_source = BigQuerySource()
big_query_source.project_id = project_id
big_query_source.dataset_id = dataset_id
big_query_source.table_id = table_id
big_query_source.data_schema = "product"

input_config = ProductInputConfig()
input_config.big_query_source = big_query_source

# call the Retail API to import products
def import_products_from_big_query():
# TRY THE FULL RECONCILIATION MODE HERE:
reconciliation_mode = ImportProductsRequest.ReconciliationMode.INCREMENTAL
import_request = ImportProductsRequest()
import_request.parent = default_catalog
import_request.reconciliation_mode = reconciliation_mode
import_request.input_config = input_config

import_big_query_request = get_import_products_big_query_request(
reconciliation_mode
)
big_query_operation = ProductServiceClient().import_products(
import_big_query_request
)
print("---import products from big query table request---")
print(import_request)

print("---the operation was started:----")
print(big_query_operation.operation.name)
return import_request

while not big_query_operation.done():
print("---please wait till operation is done---")
time.sleep(30)
print("---import products operation is done---")
# call the Retail API to import products
def import_products_from_big_query():
# TRY THE FULL RECONCILIATION MODE HERE:
reconciliation_mode = ImportProductsRequest.ReconciliationMode.INCREMENTAL

if big_query_operation.metadata is not None:
print("---number of successfully imported products---")
print(big_query_operation.metadata.success_count)
print("---number of failures during the importing---")
print(big_query_operation.metadata.failure_count)
else:
print("---operation.metadata is empty---")
import_big_query_request = get_import_products_big_query_request(
reconciliation_mode
)
big_query_operation = ProductServiceClient().import_products(
import_big_query_request
)

if big_query_operation.result is not None:
print("---operation result:---")
print(big_query_operation.result())
else:
print("---operation.result is empty---")
print("---the operation was started:----")
print(big_query_operation.operation.name)

while not big_query_operation.done():
print("---please wait till operation is done---")
time.sleep(30)
print("---import products operation is done---")

if big_query_operation.metadata is not None:
print("---number of successfully imported products---")
print(big_query_operation.metadata.success_count)
print("---number of failures during the importing---")
print(big_query_operation.metadata.failure_count)
else:
print("---operation.metadata is empty---")

if big_query_operation.result is not None:
print("---operation result:---")
print(big_query_operation.result())
else:
print("---operation.result is empty---")

import_products_from_big_query()

import_products_from_big_query()

# [END retail_import_products_from_big_query]


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("dataset_id")
parser.add_argument("table_id")
args = parser.parse_args()
main(project_id, args.dataset_id, args.table_id)
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,41 @@
import re
import subprocess

from setup_product.setup_cleanup import (
create_bq_dataset,
create_bq_table,
delete_bq_table,
upload_data_to_bq_table,
)


def test_import_products_bq(table_id_prefix):
dataset = "products"
valid_products_table = f"{table_id_prefix}products"
product_schema = "../resources/product_schema.json"
valid_products_source_file = "../resources/products.json"

create_bq_dataset(dataset)
create_bq_table(dataset, valid_products_table, product_schema)
upload_data_to_bq_table(
dataset, valid_products_table, valid_products_source_file, product_schema
)

def test_import_products_bq():
output = str(
subprocess.check_output("python import_products_big_query_table.py", shell=True)
subprocess.check_output(
f"python import_products_big_query_table.py {dataset} {valid_products_table}",
shell=True,
)
)

delete_bq_table(dataset, valid_products_table)

assert re.match(".*import products from big query table request.*", output)
assert re.match(".*the operation was started.*", output)
assert re.match(
".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*",
output,
)

assert re.match(".*number of successfully imported products.*316.*", output)
assert re.match(".*number of successfully imported products.*?316.*", output)
assert re.match(".*number of failures during the importing.*?0.*", output)
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,11 @@


# Read the project number from the environment variable
project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
bucket_name = os.environ["BUCKET_NAME"]

# You can change the branch here. The "default_branch" is set to point to the branch "0"
default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch"
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog/branches/default_branch"

gcs_bucket = f"gs://{bucket_name}"
gcs_errors_bucket = f"{gcs_bucket}/error"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import re
import subprocess

from setup_product.setup_cleanup import create_bucket, delete_bucket, upload_blob


def test_import_products_gcs():
bucket_name = os.environ["BUCKET_NAME"]
create_bucket(bucket_name)
upload_blob(bucket_name, "../resources/products.json")

output = str(subprocess.check_output("python import_products_gcs.py", shell=True))

delete_bucket(bucket_name)

assert re.match(".*import products from google cloud source request.*", output)
assert re.match('.*input_uris: "gs://.*/products.json".*', output)
assert re.match(".*the operation was started.*", output)
Expand All @@ -27,4 +36,5 @@ def test_import_products_gcs():
output,
)

assert re.match(".*number of successfully imported products.*316.*", output)
assert re.match(".*number of successfully imported products.*?316.*", output)
assert re.match(".*number of failures during the importing.*?0.*", output)
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@
from google.protobuf.field_mask_pb2 import FieldMask

# Read the project number from the environment variable
project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]

default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch"
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog/branches/default_branch"


# prepare product to import as inline source
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,5 @@ def test_import_products_gcs():
output,
)

assert re.match(".*number of successfully imported products.*2.*", output)
assert re.match(".*number of successfully imported products.*?2.*", output)
assert re.match(".*number of failures during the importing.*?0.*", output)
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pytest==6.2.5
pytest-xdist==2.5.0
google-cloud-testutils==1.3.1

This file was deleted.

Loading

0 comments on commit 5e8a533

Please sign in to comment.