Skip to content

Commit

Permalink
In Historical Retrieval (SDK) use project from client context (#1138)
Browse files Browse the repository at this point in the history
* use client.project in historical retrieval

Signed-off-by: Oleksii Moskalenko <[email protected]>

* formatting

Signed-off-by: Oleksii Moskalenko <[email protected]>
  • Loading branch information
pyalex authored Nov 5, 2020
1 parent 60f24c8 commit abb49f6
Showing 1 changed file with 6 additions and 15 deletions.
21 changes: 6 additions & 15 deletions sdk/python/feast/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -906,7 +906,6 @@ def get_historical_features(
self,
feature_refs: List[str],
entity_source: Union[pd.DataFrame, FileSource, BigQuerySource],
project: Optional[str] = None,
output_location: Optional[str] = None,
) -> RetrievalJob:
"""
Expand All @@ -928,8 +927,6 @@ def get_historical_features(
The user needs to make sure that the source (or staging location, if entity_source is
a Panda DataFrame) is accessible from the Spark cluster that will be used for the
retrieval job.
project: Specifies the project that contains the feature tables
which the requested features belong to.
destination_path: Specifies the path in a bucket to write the exported feature data files
Returns:
Expand All @@ -945,13 +942,12 @@ def get_historical_features(
>>> feature_refs = ["bookings:bookings_7d", "bookings:booking_14d"]
>>> entity_source = FileSource("event_timestamp", ParquetFormat(), "gs://some-bucket/customer")
>>> feature_retrieval_job = feast_client.get_historical_features(
>>> feature_refs, entity_source, project="my_project")
>>> feature_refs, entity_source)
>>> output_file_uri = feature_retrieval_job.get_output_file_uri()
"gs://some-bucket/output/
"""
project = project or FEAST_DEFAULT_OPTIONS[CONFIG_PROJECT_KEY]
feature_tables = self._get_feature_tables_from_feature_refs(
feature_refs, project
feature_refs, self.project
)

if output_location is None:
Expand Down Expand Up @@ -988,7 +984,7 @@ def get_historical_features(
GetHistoricalFeaturesRequest(
feature_refs=feature_refs,
entity_source=entity_source.to_proto(),
project=project,
project=self.project,
output_format=output_format,
output_location=output_location,
),
Expand All @@ -1011,10 +1007,7 @@ def get_historical_features(
)

def get_historical_features_df(
self,
feature_refs: List[str],
entity_source: Union[FileSource, BigQuerySource],
project: str = None,
self, feature_refs: List[str], entity_source: Union[FileSource, BigQuerySource],
):
"""
Launch a historical feature retrieval job.
Expand All @@ -1027,8 +1020,6 @@ def get_historical_features_df(
entity_source (Union[FileSource, BigQuerySource]): Source for the entity rows.
The user needs to make sure that the source is accessible from the Spark cluster
that will be used for the retrieval job.
project: Specifies the project that contains the feature tables
which the requested features belong to.
Returns:
Returns the historical feature retrieval result in the form of Spark dataframe.
Expand All @@ -1043,10 +1034,10 @@ def get_historical_features_df(
>>> feature_refs = ["bookings:bookings_7d", "bookings:booking_14d"]
>>> entity_source = FileSource("event_timestamp", ParquetFormat, "gs://some-bucket/customer")
>>> df = feast_client.get_historical_features(
>>> feature_refs, entity_source, project="my_project")
>>> feature_refs, entity_source)
"""
feature_tables = self._get_feature_tables_from_feature_refs(
feature_refs, project
feature_refs, self.project
)
return start_historical_feature_retrieval_spark_session(
client=self,
Expand Down

0 comments on commit abb49f6

Please sign in to comment.