From 450bbaca9840ad402fee65cd7caa0d5ec7207bfe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 18:48:08 +0000 Subject: [PATCH 01/16] chore: Bump fastapi from 0.99.1 to 0.109.1 in /sdk/python/requirements Bumps [fastapi](https://github.com/tiangolo/fastapi) from 0.99.1 to 0.109.1. - [Release notes](https://github.com/tiangolo/fastapi/releases) - [Commits](https://github.com/tiangolo/fastapi/compare/0.99.1...0.109.1) --- updated-dependencies: - dependency-name: fastapi dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.10-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-requirements.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 740356907d8..fe63c762f70 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -173,7 +173,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 18486d7fa9a..872e96ff5c7 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -42,7 +42,7 @@ dill==0.3.7 # via feast (setup.py) exceptiongroup==1.1.3 # via anyio -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 3bda9e72f9f..56ae95209a2 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -178,7 +178,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index c180c50c81e..98620a139cc 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -42,7 +42,7 @@ dill==0.3.7 # via feast (setup.py) exceptiongroup==1.1.3 # via anyio -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 6989d5b4ccf..ca9265b0a10 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -173,7 +173,7 @@ execnet==2.0.2 # via pytest-xdist executing==2.0.1 # via stack-data -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 3b6f88b4e2a..18ec8fef1e1 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -42,7 +42,7 @@ dill==0.3.7 # via feast (setup.py) exceptiongroup==1.1.3 # via anyio -fastapi==0.99.1 +fastapi==0.109.1 # via feast (setup.py) fastavro==1.9.0 # via From b9b8b26be229c4225be0edec2a89681d0247612a Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Wed, 7 Feb 2024 10:56:16 +0800 Subject: [PATCH 02/16] removing version constraint for fastapi, bumping starlette dependency Signed-off-by: Chester Ong --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.10-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-requirements.txt | 2 +- setup.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index fe63c762f70..f9beeb2d41a 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -836,7 +836,7 @@ sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 872e96ff5c7..2705876fe0e 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -180,7 +180,7 @@ sqlalchemy[mypy]==1.4.50 # sqlalchemy sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 56ae95209a2..dc04ddf9094 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -853,7 +853,7 @@ sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 98620a139cc..7ca1bb01861 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -185,7 +185,7 @@ sqlalchemy[mypy]==1.4.50 # sqlalchemy sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index ca9265b0a10..251fc809d7c 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -845,7 +845,7 @@ sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 # via ipython -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 18ec8fef1e1..5c1ce8f99fe 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -180,7 +180,7 @@ sqlalchemy[mypy]==1.4.50 # sqlalchemy sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy -starlette==0.27.0 +starlette==0.35.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) diff --git a/setup.py b/setup.py index 4905a7697d7..76b25f926af 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ "toml>=0.10.0,<1", "tqdm>=4,<5", "typeguard==2.13.3", - "fastapi>=0.68.0,<0.100", + "fastapi>=0.68.0", "uvicorn[standard]>=0.14.0,<1", "gunicorn", "dask>=2021.1.0", From 698ce468d060a5849ea8b0a98cd59ce14122a2cf Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Wed, 7 Feb 2024 11:51:10 +0800 Subject: [PATCH 03/16] rerun Signed-off-by: Chester Ong From 2bd6a9ef99a7707db24d40b7e733662ebdd0af52 Mon Sep 17 00:00:00 2001 From: Chester Date: Thu, 8 Feb 2024 13:58:13 +0800 Subject: [PATCH 04/16] bumping mypy>=1.4.1 Signed-off-by: Chester Ong --- .../requirements/py3.10-ci-requirements.txt | 22 +++++-------------- setup.py | 2 +- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index f9beeb2d41a..9435a68deb7 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -121,9 +121,7 @@ comm==0.2.0 # ipykernel # ipywidgets coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==41.0.6 # via # azure-identity @@ -226,9 +224,7 @@ google-auth==2.23.4 google-auth-httplib2==0.1.1 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.22.0 # via feast (setup.py) google-cloud-bigtable==2.21.0 @@ -462,7 +458,7 @@ msgpack==1.0.7 # via cachecontrol multiprocess==0.70.15 # via bytewax -mypy==0.982 +mypy==1.8.0 # via # feast (setup.py) # sqlalchemy @@ -801,9 +797,7 @@ sniffio==1.3.0 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.5.0 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -829,9 +823,7 @@ sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 # via sphinx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 @@ -961,9 +953,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/setup.py b/setup.py index 76b25f926af..523ac4e417f 100644 --- a/setup.py +++ b/setup.py @@ -156,7 +156,7 @@ "minio==7.1.0", "mock==2.0.0", "moto", - "mypy>=0.981,<0.990", + "mypy>=1.4.1", "avro==1.10.0", "fsspec<2023.10.0", "urllib3>=1.25.4,<3", From bc7ce0d47bf5ca04f4c3ddc38625d47f549a34bc Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 14:04:27 +0800 Subject: [PATCH 05/16] ran `no_implicit_optional sdk/python` Signed-off-by: Chester Ong --- sdk/python/feast/feature_service.py | 2 +- sdk/python/feast/feature_view.py | 2 +- sdk/python/feast/importer.py | 3 ++- .../contrib/athena_offline_store/athena_source.py | 6 +++--- .../contrib/athena_offline_store/tests/data_source.py | 2 +- .../contrib/mssql_offline_store/tests/data_source.py | 4 ++-- .../contrib/postgres_offline_store/tests/data_source.py | 2 +- .../contrib/spark_offline_store/tests/data_source.py | 4 ++-- sdk/python/feast/infra/passthrough_provider.py | 2 +- sdk/python/feast/infra/provider.py | 2 +- sdk/python/feast/infra/utils/aws_utils.py | 2 +- sdk/python/feast/infra/utils/hbase_utils.py | 8 ++++---- sdk/python/feast/type_map.py | 2 +- sdk/python/tests/data/data_creator.py | 2 +- sdk/python/tests/foo_provider.py | 2 +- .../feature_repos/universal/data_source_creator.py | 2 +- .../feature_repos/universal/data_sources/bigquery.py | 2 +- .../feature_repos/universal/data_sources/file.py | 6 +++--- .../feature_repos/universal/data_sources/redshift.py | 2 +- .../feature_repos/universal/data_sources/snowflake.py | 2 +- 20 files changed, 30 insertions(+), 29 deletions(-) diff --git a/sdk/python/feast/feature_service.py b/sdk/python/feast/feature_service.py index c3037a55da2..7ec923205a3 100644 --- a/sdk/python/feast/feature_service.py +++ b/sdk/python/feast/feature_service.py @@ -56,7 +56,7 @@ def __init__( *, name: str, features: List[Union[FeatureView, OnDemandFeatureView]], - tags: Dict[str, str] = None, + tags: Optional[Dict[str, str]] = None, description: str = "", owner: str = "", logging_config: Optional[LoggingConfig] = None, diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index 67f9662d317..f87ae7ab132 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -101,7 +101,7 @@ def __init__( name: str, source: DataSource, schema: Optional[List[Field]] = None, - entities: List[Entity] = None, + entities: Optional[List[Entity]] = None, ttl: Optional[timedelta] = timedelta(days=0), online: bool = True, description: str = "", diff --git a/sdk/python/feast/importer.py b/sdk/python/feast/importer.py index bbd592101a6..3733031371d 100644 --- a/sdk/python/feast/importer.py +++ b/sdk/python/feast/importer.py @@ -5,9 +5,10 @@ FeastInvalidBaseClass, FeastModuleImportError, ) +from typing import Optional -def import_class(module_name: str, class_name: str, class_type: str = None): +def import_class(module_name: str, class_name: str, class_type: Optional[str] = None): """ Dynamically loads and returns a class from a module. diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py index 8e9e3893f3a..0aca42cd682 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena_source.py @@ -297,9 +297,9 @@ class SavedDatasetAthenaStorage(SavedDatasetStorage): def __init__( self, table_ref: str, - query: str = None, - database: str = None, - data_source: str = None, + query: Optional[str] = None, + database: Optional[str] = None, + data_source: Optional[str] = None, ): self.athena_options = AthenaOptions( table=table_ref, query=query, database=database, data_source=data_source diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py index 384ab69e81f..f68e109d6c1 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py @@ -51,7 +51,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: table_name = destination_name diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py index 9b751d98efe..9e0e1856da7 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py @@ -1,4 +1,4 @@ -from typing import Dict, List +from typing import Optional, Dict, List import pandas as pd import pytest @@ -66,7 +66,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, **kwargs, ) -> DataSource: # Make sure the field mapping is correct and convert the datetime datasources. diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index f4479501323..224fcea30f9 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -85,7 +85,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py index 71c07b20c27..bea250d0e83 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py @@ -2,7 +2,7 @@ import shutil import tempfile import uuid -from typing import Dict, List +from typing import Optional, Dict, List import pandas as pd from pyspark import SparkConf @@ -70,7 +70,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, **kwargs, ) -> DataSource: if timestamp_field in df: diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 28b10c12595..811abe106c3 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -180,7 +180,7 @@ def online_read( config: RepoConfig, table: FeatureView, entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, + requested_features: Optional[List[str]] = None, ) -> List: set_usage_attribute("provider", self.__class__.__name__) result = [] diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 82879b264af..2a9670cacef 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -211,7 +211,7 @@ def online_read( config: RepoConfig, table: FeatureView, entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, + requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: """ Reads features values for the given entity keys. diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index ef83c6d1c62..c3604ee41f0 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -816,7 +816,7 @@ def execute_athena_query( database: str, workgroup: str, query: str, - temp_table: str = None, + temp_table: Optional[str] = None, ) -> str: """Execute athena statement synchronously. Waits for the query to finish. diff --git a/sdk/python/feast/infra/utils/hbase_utils.py b/sdk/python/feast/infra/utils/hbase_utils.py index d44f93f1619..483edff6f11 100644 --- a/sdk/python/feast/infra/utils/hbase_utils.py +++ b/sdk/python/feast/infra/utils/hbase_utils.py @@ -1,4 +1,4 @@ -from typing import List +from typing import Optional, List from happybase import ConnectionPool @@ -38,9 +38,9 @@ class HBaseConnector: def __init__( self, - pool: ConnectionPool = None, - host: str = None, - port: int = None, + pool: Optional[ConnectionPool] = None, + host: Optional[str] = None, + port: Optional[int] = None, connection_pool_size: int = 4, ): if pool is None: diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index e51e1e743bb..914ef130888 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -114,7 +114,7 @@ def feast_value_type_to_pandas_type(value_type: ValueType) -> Any: def python_type_to_feast_value_type( - name: str, value: Any = None, recurse: bool = True, type_name: Optional[str] = None + name: str, value: Optional[Any] = None, recurse: bool = True, type_name: Optional[str] = None ) -> ValueType: """ Finds the equivalent Feast Value Type for a Python value. Both native diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 8d5b1979fa3..1fc66aee845 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -9,7 +9,7 @@ def create_basic_driver_dataset( entity_type: FeastType = Int32, - feature_dtype: str = None, + feature_dtype: Optional[str] = None, feature_is_list: bool = False, list_has_empty_list: bool = False, ) -> pd.DataFrame: diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index d27e2645d4e..044696eda86 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -78,7 +78,7 @@ def online_read( config: RepoConfig, table: FeatureView, entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, + requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: pass diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index b36af0db472..c162d283ada 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -20,7 +20,7 @@ def create_data_source( destination_name: str, event_timestamp_column="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, timestamp_field: Optional[str] = None, ) -> DataSource: """ diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 384037eef14..215d19ba7f3 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -66,7 +66,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, **kwargs, ) -> DataSource: diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 124dd4c88d6..3263785683e 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -41,7 +41,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) @@ -96,7 +96,7 @@ def create_data_source( destination_name: str, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) @@ -171,7 +171,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: filename = f"{destination_name}.parquet" port = self.minio.get_exposed_port("9000") diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index dfe8e3d33bf..e6f20d6125b 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -51,7 +51,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index c14780da97d..1414291a18d 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -51,7 +51,7 @@ def create_data_source( suffix: Optional[str] = None, timestamp_field="ts", created_timestamp_column="created_ts", - field_mapping: Dict[str, str] = None, + field_mapping: Optional[Dict[str, str]] = None, ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) From efab8602dfb538c3617cf56bc37ae0876a310005 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 14:28:31 +0800 Subject: [PATCH 06/16] fixed missing body errors for abstract methods by raising NotImplementedError Signed-off-by: Chester Ong --- sdk/python/feast/data_source.py | 20 +++++------ .../feast/infra/contrib/stream_processor.py | 8 ++--- .../mssql_offline_store/tests/data_source.py | 4 +-- .../feast/infra/offline_stores/file_source.py | 2 +- .../infra/offline_stores/offline_store.py | 35 +++++++------------ .../feast/infra/registry/base_registry.py | 2 ++ sdk/python/tests/foo_provider.py | 6 ++++ .../universal/data_source_creator.py | 2 +- .../universal/online_store_creator.py | 4 +-- .../offline_stores/test_offline_store.py | 10 +++--- 10 files changed, 46 insertions(+), 47 deletions(-) diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index b7ce19aad9b..064291eb792 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import abc import enum import warnings from abc import ABC, abstractmethod @@ -485,12 +485,12 @@ def to_proto(self) -> DataSourceProto: return data_source_proto def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: @@ -534,12 +534,12 @@ def __init__( self.schema = schema def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError def __eq__(self, other): if not isinstance(other, RequestSource): @@ -610,12 +610,12 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: @typechecked class KinesisSource(DataSource): def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError @staticmethod def from_proto(data_source: DataSourceProto): @@ -639,7 +639,7 @@ def from_proto(data_source: DataSourceProto): @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: - pass + raise NotImplementedError def get_table_query_string(self) -> str: raise NotImplementedError @@ -772,12 +772,12 @@ def __hash__(self): return super().__hash__() def validate(self, config: RepoConfig): - pass + raise NotImplementedError def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - pass + raise NotImplementedError @staticmethod def from_proto(data_source: DataSourceProto): diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index 24817c82eaa..c39b51c52fc 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -54,13 +54,13 @@ def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: Ingests data from the stream source attached to the stream feature view; transforms the data and then persists it to the online store and/or offline store, depending on the 'to' parameter. """ - pass + raise NotImplementedError def _ingest_stream_data(self) -> StreamTable: """ Ingests data into a StreamTable. """ - pass + raise NotImplementedError def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: """ @@ -68,14 +68,14 @@ def _construct_transformation_plan(self, table: StreamTable) -> StreamTable: evaluation, the StreamTable will not be materialized until it is actually evaluated. For example: df.collect() in spark or tbl.execute() in Flink. """ - pass + raise NotImplementedError def _write_stream_data(self, table: StreamTable, to: PushMode) -> None: """ Launches a job to persist stream data to the online store and/or offline store, depending on the 'to' parameter, and returns a handle for the job. """ - pass + raise NotImplementedError def get_stream_processor_object( diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py index 9e0e1856da7..85e111e0a22 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py @@ -99,10 +99,10 @@ def create_data_source( ) def create_saved_dataset_destination(self) -> SavedDatasetStorage: - pass + raise NotImplementedError def get_prefixed_table_name(self, destination_name: str) -> str: return f"{self.project_name}_{destination_name}" def teardown(self): - pass + raise NotImplementedError diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index ac824b359f4..887b4100796 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -183,7 +183,7 @@ def create_filesystem_and_path( return None, path def get_table_query_string(self) -> str: - pass + raise NotImplementedError class FileOptions: diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 6141e3c435b..5287d2ad46a 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -150,9 +150,8 @@ def to_sql(self) -> str: """ Return RetrievalJob generated SQL statement if applicable. """ - pass + raise NotImplementedError - @abstractmethod def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: """ Synchronously executes the underlying query and returns the result as a pandas dataframe. @@ -162,9 +161,8 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: Does not handle on demand transformations or dataset validation. For either of those, `to_df` should be used. """ - pass + raise NotImplementedError - @abstractmethod def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: """ Synchronously executes the underlying query and returns the result as an arrow table. @@ -174,21 +172,18 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: Does not handle on demand transformations or dataset validation. For either of those, `to_arrow` should be used. """ - pass + raise NotImplementedError @property - @abstractmethod def full_feature_names(self) -> bool: """Returns True if full feature names should be applied to the results of the query.""" - pass + raise NotImplementedError @property - @abstractmethod def on_demand_feature_views(self) -> List[OnDemandFeatureView]: """Returns a list containing all the on demand feature views to be handled.""" - pass + raise NotImplementedError - @abstractmethod def persist( self, storage: SavedDatasetStorage, @@ -204,13 +199,12 @@ def persist( allow_overwrite: If True, a pre-existing location (e.g. table or file) can be overwritten. Currently not all individual offline store implementations make use of this parameter. """ - pass + raise NotImplementedError @property - @abstractmethod def metadata(self) -> Optional[RetrievalMetadata]: """Returns metadata about the retrieval job.""" - pass + raise NotImplementedError def supports_remote_storage_export(self) -> bool: """Returns True if the RetrievalJob supports `to_remote_storage`.""" @@ -226,7 +220,7 @@ def to_remote_storage(self) -> List[str]: Returns: A list of parquet file paths in remote storage. """ - raise NotImplementedError() + raise NotImplementedError class OfflineStore(ABC): @@ -239,7 +233,6 @@ class OfflineStore(ABC): """ @staticmethod - @abstractmethod def pull_latest_from_table_or_query( config: RepoConfig, data_source: DataSource, @@ -270,10 +263,9 @@ def pull_latest_from_table_or_query( Returns: A RetrievalJob that can be executed to get the entity rows. """ - pass + raise NotImplementedError @staticmethod - @abstractmethod def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], @@ -302,10 +294,9 @@ def get_historical_features( Returns: A RetrievalJob that can be executed to get the features. """ - pass + raise NotImplementedError @staticmethod - @abstractmethod def pull_all_from_table_or_query( config: RepoConfig, data_source: DataSource, @@ -334,7 +325,7 @@ def pull_all_from_table_or_query( Returns: A RetrievalJob that can be executed to get the entity rows. """ - pass + raise NotImplementedError @staticmethod def write_logged_features( @@ -358,7 +349,7 @@ def write_logged_features( logging_config: A LoggingConfig object that determines where the logs will be written. registry: The registry for the current feature store. """ - raise NotImplementedError() + raise NotImplementedError @staticmethod def offline_write_batch( @@ -377,4 +368,4 @@ def offline_write_batch( progress: Function to be called once a portion of the data has been written, used to show progress. """ - raise NotImplementedError() + raise NotImplementedError diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 14b098bb123..8fde08e1848 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -501,7 +501,9 @@ def list_validation_references( Returns: List of request feature views """ + raise NotImplementedError + @abstractmethod def list_project_metadata( self, project: str, allow_cache: bool = False ) -> List[ProjectMetadata]: diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index 044696eda86..60a15b56f30 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -1,3 +1,4 @@ +from abc import abstractmethod from datetime import datetime from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -61,6 +62,7 @@ def materialize_single_feature_view( ) -> None: pass + @abstractmethod def get_historical_features( self, config: RepoConfig, @@ -73,6 +75,7 @@ def get_historical_features( ) -> RetrievalJob: pass + @abstractmethod def online_read( self, config: RepoConfig, @@ -82,9 +85,11 @@ def online_read( ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: pass + @abstractmethod def retrieve_saved_dataset(self, config: RepoConfig, dataset: SavedDataset): pass + @abstractmethod def write_feature_service_logs( self, feature_service: FeatureService, @@ -94,6 +99,7 @@ def write_feature_service_logs( ): pass + @abstractmethod def retrieve_feature_service_logs( self, feature_service: FeatureService, diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index c162d283ada..d64463606ff 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -53,7 +53,7 @@ def create_saved_dataset_destination(self) -> SavedDatasetStorage: ... def create_logged_features_destination(self) -> LoggingDestination: - pass + raise NotImplementedError @abstractmethod def teardown(self): diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py index c3872ea697f..10a81437395 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py @@ -8,7 +8,7 @@ def __init__(self, project_name: str, **kwargs): self.project_name = project_name def create_online_store(self) -> FeastConfigBaseModel: - ... + raise NotImplementedError def teardown(self): - ... + raise NotImplementedError diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index ef0cce04707..079d23cf1a3 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -46,7 +46,7 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: Does not handle on demand transformations or dataset validation. For either of those, `to_df` should be used. """ - pass + raise NotImplementedError def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: """ @@ -55,17 +55,17 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: Does not handle on demand transformations or dataset validation. For either of those, `to_arrow` should be used. """ - pass + raise NotImplementedError @property def full_feature_names(self) -> bool: """Returns True if full feature names should be applied to the results of the query.""" - pass + raise NotImplementedError @property def on_demand_feature_views(self) -> List[OnDemandFeatureView]: """Returns a list containing all the on demand feature views to be handled.""" - pass + raise NotImplementedError def persist( self, @@ -87,7 +87,7 @@ def persist( @property def metadata(self) -> Optional[RetrievalMetadata]: """Returns metadata about the retrieval job.""" - pass + raise NotImplementedError # Since RetreivalJob are not really tested for subclasses we add some tests here. From d9ddf1351bbf08909138f036247443766949d1fe Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 15:33:21 +0800 Subject: [PATCH 07/16] added more type hints Signed-off-by: Chester Ong --- sdk/python/feast/infra/contrib/spark_kafka_processor.py | 9 +++++++-- sdk/python/feast/infra/offline_stores/redshift.py | 6 +++--- .../feast/infra/offline_stores/snowflake_source.py | 4 ++-- sdk/python/feast/infra/registry/snowflake.py | 2 +- .../feast/infra/utils/snowflake/snowflake_utils.py | 4 +++- 5 files changed, 16 insertions(+), 9 deletions(-) diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py index ea55d89988a..775352cbae5 100644 --- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py +++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py @@ -5,6 +5,7 @@ from pyspark.sql import DataFrame, SparkSession from pyspark.sql.avro.functions import from_avro from pyspark.sql.functions import col, from_json +from pyspark.sql.streaming import StreamingQuery from feast.data_format import AvroFormat, JsonFormat from feast.data_source import KafkaSource, PushMode @@ -63,7 +64,11 @@ def __init__( self.join_keys = [fs.get_entity(entity).join_key for entity in sfv.entities] super().__init__(fs=fs, sfv=sfv, data_source=sfv.stream_source) - def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> None: + # Type hinting for data_source type. + # data_source type has been checked to be an instance of KafkaSource. + self.data_source: KafkaSource = self.data_source # type: ignore + + def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> StreamingQuery: ingested_stream_df = self._ingest_stream_data() transformed_df = self._construct_transformation_plan(ingested_stream_df) online_store_query = self._write_stream_data(transformed_df, to) @@ -122,7 +127,7 @@ def _ingest_stream_data(self) -> StreamTable: def _construct_transformation_plan(self, df: StreamTable) -> StreamTable: return self.sfv.udf.__call__(df) if self.sfv.udf else df - def _write_stream_data(self, df: StreamTable, to: PushMode): + def _write_stream_data(self, df: StreamTable, to: PushMode) -> StreamingQuery: # Validation occurs at the fs.write_to_online_store() phase against the stream feature view schema. def batch_write(row: DataFrame, batch_id: int): rows: pd.DataFrame = row.toPandas() diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 837cf49655d..6034bf5ac7b 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -51,13 +51,13 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel): type: Literal["redshift"] = "redshift" """ Offline store type selector""" - cluster_id: Optional[StrictStr] + cluster_id: Optional[StrictStr] = None """ Redshift cluster identifier, for provisioned clusters """ - user: Optional[StrictStr] + user: Optional[StrictStr] = None """ Redshift user name, only required for provisioned clusters """ - workgroup: Optional[StrictStr] + workgroup: Optional[StrictStr] = None """ Redshift workgroup identifier, for serverless """ region: StrictStr diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 0cbf82dd1c1..b4500b9807e 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -1,5 +1,5 @@ import warnings -from typing import Callable, Dict, Iterable, Optional, Tuple +from typing import Callable, Dict, Iterable, Optional, Tuple, Any from typeguard import typechecked @@ -223,7 +223,7 @@ def get_table_column_names_and_types( query = f"SELECT * FROM {self.get_table_query_string()} LIMIT 5" cursor = execute_snowflake_statement(conn, query) - metadata = [ + metadata: list[dict[str, Any]] = [ { "column_name": column.name, "type_code": column.type_code, diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index 56c7bc1f659..c1ebf13d6b8 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -418,7 +418,7 @@ def _delete_object( """ cursor = execute_snowflake_statement(conn, query) - if cursor.rowcount < 1 and not_found_exception: + if cursor.rowcount < 1 and not_found_exception: # type: ignore raise not_found_exception(name, project) self._set_last_updated_metadata(datetime.utcnow(), project) diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index a4cda89a6f6..36588efd0fa 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -22,6 +22,8 @@ import feast from feast.errors import SnowflakeIncompleteConfig, SnowflakeQueryUnknownError from feast.feature_view import FeatureView +from feast.infra.offline_stores.snowflake import SnowflakeOfflineStoreConfig +from feast.infra.online_stores.snowflake import SnowflakeOnlineStoreConfig from feast.repo_config import RepoConfig try: @@ -43,7 +45,7 @@ class GetSnowflakeConnection: - def __init__(self, config: str, autocommit=True): + def __init__(self, config: SnowflakeOfflineStoreConfig | SnowflakeOnlineStoreConfig, autocommit=True): self.config = config self.autocommit = autocommit From 23dbcf754dce6cee4292b9f3abd702f87445d610 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 22:55:37 +0800 Subject: [PATCH 08/16] bump all python requirements.lock file Signed-off-by: Chester Ong --- .../requirements/py3.10-requirements.txt | 8 ++----- .../requirements/py3.8-ci-requirements.txt | 22 +++++-------------- .../requirements/py3.8-requirements.txt | 8 ++----- .../requirements/py3.9-ci-requirements.txt | 22 +++++-------------- .../requirements/py3.9-requirements.txt | 8 ++----- 5 files changed, 18 insertions(+), 50 deletions(-) diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 2705876fe0e..5d5d451e148 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -175,9 +175,7 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy starlette==0.35.1 @@ -210,9 +208,7 @@ typing-extensions==4.8.0 urllib3==2.1.0 # via requests uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index dc04ddf9094..808a58e11be 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -127,9 +127,7 @@ comm==0.2.0 # ipykernel # ipywidgets coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==41.0.6 # via # azure-identity @@ -231,9 +229,7 @@ google-auth==2.23.4 google-auth-httplib2==0.1.1 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.22.0 # via feast (setup.py) google-cloud-bigtable==2.21.0 @@ -478,7 +474,7 @@ msgpack==1.0.7 # via cachecontrol multiprocess==0.70.15 # via bytewax -mypy==0.982 +mypy==1.8.0 # via # feast (setup.py) # sqlalchemy @@ -824,9 +820,7 @@ sniffio==1.3.0 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.5.0 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -846,9 +840,7 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 # via sphinx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 @@ -981,9 +973,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 7ca1bb01861..163fa4c9a87 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -180,9 +180,7 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy starlette==0.35.1 @@ -216,9 +214,7 @@ typing-extensions==4.8.0 urllib3==2.1.0 # via requests uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 251fc809d7c..f9d7ac3fb9f 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -121,9 +121,7 @@ comm==0.2.0 # ipykernel # ipywidgets coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov + # via pytest-cov cryptography==41.0.6 # via # azure-identity @@ -226,9 +224,7 @@ google-auth==2.23.4 google-auth-httplib2==0.1.1 # via google-api-python-client google-cloud-bigquery[pandas]==3.12.0 - # via - # feast (setup.py) - # google-cloud-bigquery + # via feast (setup.py) google-cloud-bigquery-storage==2.22.0 # via feast (setup.py) google-cloud-bigtable==2.21.0 @@ -469,7 +465,7 @@ msgpack==1.0.7 # via cachecontrol multiprocess==0.70.15 # via bytewax -mypy==0.982 +mypy==1.8.0 # via # feast (setup.py) # sqlalchemy @@ -810,9 +806,7 @@ sniffio==1.3.0 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.5.0 - # via - # feast (setup.py) - # snowflake-connector-python + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 @@ -838,9 +832,7 @@ sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 # via sphinx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy stack-data==0.6.3 @@ -973,9 +965,7 @@ urllib3==1.26.18 # rockset # snowflake-connector-python uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 5c1ce8f99fe..4d9b8f107de 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -175,9 +175,7 @@ sniffio==1.3.0 # anyio # httpx sqlalchemy[mypy]==1.4.50 - # via - # feast (setup.py) - # sqlalchemy + # via feast (setup.py) sqlalchemy2-stubs==0.0.2a37 # via sqlalchemy starlette==0.35.1 @@ -211,9 +209,7 @@ typing-extensions==4.8.0 urllib3==2.1.0 # via requests uvicorn[standard]==0.24.0.post1 - # via - # feast (setup.py) - # uvicorn + # via feast (setup.py) uvloop==0.19.0 # via uvicorn volatile==2.1.0 From e0e033b908d94c9f6941bc6b935caac18db7988a Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 23:28:12 +0800 Subject: [PATCH 09/16] ignoring tests sdk/python/tests/ type linting Signed-off-by: Chester Ong --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 4b85c0e4483..6736e64078f 100644 --- a/Makefile +++ b/Makefile @@ -310,7 +310,7 @@ format-python: cd ${ROOT_DIR}/sdk/python; python -m black --target-version py38 feast tests lint-python: - cd ${ROOT_DIR}/sdk/python; python -m mypy + cd ${ROOT_DIR}/sdk/python; python -m mypy --exclude=/tests/ --follow-imports=skip feast cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ --check-only cd ${ROOT_DIR}/sdk/python; python -m flake8 feast/ tests/ cd ${ROOT_DIR}/sdk/python; python -m black --check feast tests From fc4a9638c75d70844c7178f29e159c59a51da495 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 23:34:08 +0800 Subject: [PATCH 10/16] lint fixes Signed-off-by: Chester Ong --- sdk/python/feast/infra/contrib/stream_processor.py | 4 ++-- sdk/python/feast/infra/online_stores/dynamodb.py | 4 ++-- sdk/python/feast/infra/registry/registry_store.py | 4 ++-- sdk/python/feast/type_map.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index c39b51c52fc..f246d3b30df 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -1,6 +1,6 @@ from abc import ABC from types import MethodType -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Optional, TypeAlias from pyspark.sql import DataFrame @@ -17,7 +17,7 @@ } # TODO: support more types other than just Spark. -StreamTable = DataFrame +StreamTable: TypeAlias = DataFrame class ProcessorConfig(FeastConfigBaseModel): diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 525978e736b..a1eef16f40d 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -288,12 +288,12 @@ def _get_dynamodb_resource(self, region: str, endpoint_url: Optional[str] = None ) return self._dynamodb_resource - def _sort_dynamodb_response(self, responses: list, order: list): + def _sort_dynamodb_response(self, responses: list, order: list) -> Any: """DynamoDB Batch Get Item doesn't return items in a particular order.""" # Assign an index to order order_with_index = {value: idx for idx, value in enumerate(order)} # Sort table responses by index - table_responses_ordered = [ + table_responses_ordered: Any = [ (order_with_index[tbl_res["entity_id"]], tbl_res) for tbl_res in responses ] table_responses_ordered = sorted( diff --git a/sdk/python/feast/infra/registry/registry_store.py b/sdk/python/feast/infra/registry/registry_store.py index c42a55cd9d2..5151fd74b27 100644 --- a/sdk/python/feast/infra/registry/registry_store.py +++ b/sdk/python/feast/infra/registry/registry_store.py @@ -17,7 +17,7 @@ def get_registry_proto(self) -> RegistryProto: Returns: Returns either the registry proto stored at the registry path, or an empty registry proto. """ - pass + raise NotImplementedError @abstractmethod def update_registry_proto(self, registry_proto: RegistryProto): @@ -40,7 +40,7 @@ def teardown(self): class NoopRegistryStore(RegistryStore): def get_registry_proto(self) -> RegistryProto: - pass + return RegistryProto() def update_registry_proto(self, registry_proto: RegistryProto): pass diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index 914ef130888..a1aee06000c 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -51,7 +51,7 @@ import pyarrow # null timestamps get converted to -9223372036854775808 -NULL_TIMESTAMP_INT_VALUE = np.datetime64("NaT").astype(int) +NULL_TIMESTAMP_INT_VALUE: int = np.datetime64("NaT").astype(int) def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: @@ -321,7 +321,7 @@ def _python_datetime_to_int_timestamp( elif isinstance(value, Timestamp): int_timestamps.append(int(value.ToSeconds())) elif isinstance(value, np.datetime64): - int_timestamps.append(value.astype("datetime64[s]").astype(np.int_)) + int_timestamps.append(value.astype("datetime64[s]").astype(np.int_)) # type: ignore[attr-defined] elif isinstance(value, type(np.nan)): int_timestamps.append(NULL_TIMESTAMP_INT_VALUE) else: From 7f1cb7a8383ebf7d0ee39f5c7d1f9ea51a19be02 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 23:34:47 +0800 Subject: [PATCH 11/16] isort fixes Signed-off-by: Chester Ong --- sdk/python/feast/importer.py | 2 +- .../contrib/mssql_offline_store/tests/data_source.py | 2 +- .../contrib/spark_offline_store/tests/data_source.py | 2 +- sdk/python/feast/infra/offline_stores/snowflake_source.py | 2 +- sdk/python/feast/infra/utils/hbase_utils.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/importer.py b/sdk/python/feast/importer.py index 3733031371d..d1d7d629010 100644 --- a/sdk/python/feast/importer.py +++ b/sdk/python/feast/importer.py @@ -1,11 +1,11 @@ import importlib +from typing import Optional from feast.errors import ( FeastClassImportError, FeastInvalidBaseClass, FeastModuleImportError, ) -from typing import Optional def import_class(module_name: str, class_name: str, class_type: Optional[str] = None): diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py index 85e111e0a22..2604cf7c18b 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py @@ -1,4 +1,4 @@ -from typing import Optional, Dict, List +from typing import Dict, List, Optional import pandas as pd import pytest diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py index bea250d0e83..7b4fda3b5f5 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py @@ -2,7 +2,7 @@ import shutil import tempfile import uuid -from typing import Optional, Dict, List +from typing import Dict, List, Optional import pandas as pd from pyspark import SparkConf diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index b4500b9807e..c30f0e973cf 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -1,5 +1,5 @@ import warnings -from typing import Callable, Dict, Iterable, Optional, Tuple, Any +from typing import Any, Callable, Dict, Iterable, Optional, Tuple from typeguard import typechecked diff --git a/sdk/python/feast/infra/utils/hbase_utils.py b/sdk/python/feast/infra/utils/hbase_utils.py index 483edff6f11..72afda2ef3d 100644 --- a/sdk/python/feast/infra/utils/hbase_utils.py +++ b/sdk/python/feast/infra/utils/hbase_utils.py @@ -1,4 +1,4 @@ -from typing import Optional, List +from typing import List, Optional from happybase import ConnectionPool From 3ceaa0620902054ec74b9b64c6df5f68ce42c47e Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 23:36:39 +0800 Subject: [PATCH 12/16] black and flake8 fixes Signed-off-by: Chester Ong --- sdk/python/feast/data_source.py | 1 - sdk/python/feast/infra/contrib/spark_kafka_processor.py | 4 +++- sdk/python/feast/infra/offline_stores/offline_store.py | 2 +- sdk/python/feast/infra/utils/snowflake/snowflake_utils.py | 6 +++++- sdk/python/feast/type_map.py | 5 ++++- 5 files changed, 13 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 064291eb792..3421fd5d309 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import abc import enum import warnings from abc import ABC, abstractmethod diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py index 775352cbae5..bac1c28b064 100644 --- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py +++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py @@ -68,7 +68,9 @@ def __init__( # data_source type has been checked to be an instance of KafkaSource. self.data_source: KafkaSource = self.data_source # type: ignore - def ingest_stream_feature_view(self, to: PushMode = PushMode.ONLINE) -> StreamingQuery: + def ingest_stream_feature_view( + self, to: PushMode = PushMode.ONLINE + ) -> StreamingQuery: ingested_stream_df = self._ingest_stream_data() transformed_df = self._construct_transformation_plan(ingested_stream_df) online_store_query = self._write_stream_data(transformed_df, to) diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 5287d2ad46a..30135feccb3 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import warnings -from abc import ABC, abstractmethod +from abc import ABC from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index 36588efd0fa..193bd0d494d 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -45,7 +45,11 @@ class GetSnowflakeConnection: - def __init__(self, config: SnowflakeOfflineStoreConfig | SnowflakeOnlineStoreConfig, autocommit=True): + def __init__( + self, + config: SnowflakeOfflineStoreConfig | SnowflakeOnlineStoreConfig, + autocommit=True, + ): self.config = config self.autocommit = autocommit diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index a1aee06000c..ad3e273d37b 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -114,7 +114,10 @@ def feast_value_type_to_pandas_type(value_type: ValueType) -> Any: def python_type_to_feast_value_type( - name: str, value: Optional[Any] = None, recurse: bool = True, type_name: Optional[str] = None + name: str, + value: Optional[Any] = None, + recurse: bool = True, + type_name: Optional[str] = None, ) -> ValueType: """ Finds the equivalent Feast Value Type for a Python value. Both native From 73193afe57bec4e5b04a562eb1fed67d96ee67f5 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Thu, 8 Feb 2024 23:59:18 +0800 Subject: [PATCH 13/16] fix circular import Signed-off-by: Chester Ong --- sdk/python/feast/infra/utils/snowflake/snowflake_utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index 193bd0d494d..8eb5177ac23 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -22,8 +22,6 @@ import feast from feast.errors import SnowflakeIncompleteConfig, SnowflakeQueryUnknownError from feast.feature_view import FeatureView -from feast.infra.offline_stores.snowflake import SnowflakeOfflineStoreConfig -from feast.infra.online_stores.snowflake import SnowflakeOnlineStoreConfig from feast.repo_config import RepoConfig try: @@ -47,7 +45,7 @@ class GetSnowflakeConnection: def __init__( self, - config: SnowflakeOfflineStoreConfig | SnowflakeOnlineStoreConfig, + config: str, autocommit=True, ): self.config = config From 94ced3472cca2dbb2ff93208a628b3335b3453a1 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Fri, 9 Feb 2024 00:32:41 +0800 Subject: [PATCH 14/16] lint fix Signed-off-by: Chester Ong --- sdk/python/feast/infra/contrib/stream_processor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/contrib/stream_processor.py b/sdk/python/feast/infra/contrib/stream_processor.py index f246d3b30df..df4e144f8c6 100644 --- a/sdk/python/feast/infra/contrib/stream_processor.py +++ b/sdk/python/feast/infra/contrib/stream_processor.py @@ -1,8 +1,9 @@ from abc import ABC from types import MethodType -from typing import TYPE_CHECKING, Optional, TypeAlias +from typing import TYPE_CHECKING, Optional from pyspark.sql import DataFrame +from typing_extensions import TypeAlias from feast.data_source import DataSource, PushMode from feast.importer import import_class From 59ba407a2ad6d512edfb517e8d4d528cb1824904 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Fri, 9 Feb 2024 00:39:21 +0800 Subject: [PATCH 15/16] use typing types to support python3.8 Signed-off-by: Chester Ong --- sdk/python/feast/infra/offline_stores/snowflake_source.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index c30f0e973cf..e29197c68d4 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -1,5 +1,5 @@ import warnings -from typing import Any, Callable, Dict, Iterable, Optional, Tuple +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple from typeguard import typechecked @@ -223,7 +223,7 @@ def get_table_column_names_and_types( query = f"SELECT * FROM {self.get_table_query_string()} LIMIT 5" cursor = execute_snowflake_statement(conn, query) - metadata: list[dict[str, Any]] = [ + metadata: List[Dict[str, Any]] = [ { "column_name": column.name, "type_code": column.type_code, From 72ce6c4f0d5510e2f15f4dc9e6b475b921c43046 Mon Sep 17 00:00:00 2001 From: Chester Ong Date: Fri, 9 Feb 2024 09:31:54 +0800 Subject: [PATCH 16/16] tests fix: make foo_provider concrete again | MockRetrievalJob to be concrete Signed-off-by: Chester Ong --- sdk/python/tests/foo_provider.py | 12 +++--------- .../unit/infra/offline_stores/test_offline_store.py | 13 ++++++++----- 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index 60a15b56f30..ba256a3813c 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -1,4 +1,3 @@ -from abc import abstractmethod from datetime import datetime from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -62,7 +61,6 @@ def materialize_single_feature_view( ) -> None: pass - @abstractmethod def get_historical_features( self, config: RepoConfig, @@ -73,9 +71,8 @@ def get_historical_features( project: str, full_feature_names: bool = False, ) -> RetrievalJob: - pass + return RetrievalJob() - @abstractmethod def online_read( self, config: RepoConfig, @@ -83,13 +80,11 @@ def online_read( entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - pass + return [] - @abstractmethod def retrieve_saved_dataset(self, config: RepoConfig, dataset: SavedDataset): pass - @abstractmethod def write_feature_service_logs( self, feature_service: FeatureService, @@ -99,7 +94,6 @@ def write_feature_service_logs( ): pass - @abstractmethod def retrieve_feature_service_logs( self, feature_service: FeatureService, @@ -108,4 +102,4 @@ def retrieve_feature_service_logs( config: RepoConfig, registry: BaseRegistry, ) -> RetrievalJob: - pass + return RetrievalJob() diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index 079d23cf1a3..220bdba0dae 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -39,6 +39,9 @@ class MockRetrievalJob(RetrievalJob): + def to_sql(self) -> str: + return "" + def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: """ Synchronously executes the underlying query and returns the result as a pandas dataframe. @@ -46,7 +49,7 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: Does not handle on demand transformations or dataset validation. For either of those, `to_df` should be used. """ - raise NotImplementedError + return pd.DataFrame() def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: """ @@ -55,17 +58,17 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: Does not handle on demand transformations or dataset validation. For either of those, `to_arrow` should be used. """ - raise NotImplementedError + return pyarrow.Table() @property def full_feature_names(self) -> bool: """Returns True if full feature names should be applied to the results of the query.""" - raise NotImplementedError + return False @property def on_demand_feature_views(self) -> List[OnDemandFeatureView]: """Returns a list containing all the on demand feature views to be handled.""" - raise NotImplementedError + return [] def persist( self, @@ -208,7 +211,7 @@ def retrieval_job(request, environment): def test_to_sql(): - assert MockRetrievalJob().to_sql() is None + assert MockRetrievalJob().to_sql() == "" @pytest.mark.parametrize("timeout", (None, 30))