From 38f804c27ee63538c231c36d9e3fa45bf8926fe1 Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Tue, 14 May 2024 20:00:28 +0100 Subject: [PATCH 1/6] Updates for package update Signed-off-by: GBBBAS --- environment.yml | 77 ++++++++++--------- setup.py | 50 ++++++------ tests/api/v1/test_api_circular_average.py | 4 +- .../test_api_circular_standard_deviation.py | 4 +- tests/api/v1/test_api_interpolate.py | 4 +- tests/api/v1/test_api_latest.py | 6 +- tests/api/v1/test_api_metadata.py | 6 +- tests/api/v1/test_api_plot.py | 4 +- tests/api/v1/test_api_raw.py | 4 +- tests/api/v1/test_api_resample.py | 4 +- tests/api/v1/test_api_sql.py | 2 +- tests/api/v1/test_api_summary.py | 4 +- .../api/v1/test_api_time_weighted_average.py | 4 +- .../deploy/test_databricks_deploy.py | 3 - .../utilities/aws/test_s3_copy_utility.py | 4 +- 15 files changed, 89 insertions(+), 91 deletions(-) diff --git a/environment.yml b/environment.yml index cec84b295..bf10fd0ac 100644 --- a/environment.yml +++ b/environment.yml @@ -19,32 +19,31 @@ channels: - defaults dependencies: - python>=3.9,<3.12 - - jinja2==3.1.3 + - jinja2>=3.1.3 - pytest==7.4.0 - pytest-mock==3.11.1 - pytest-cov==4.1.0 - pylint==2.17.4 - - pip==23.1.2 + - pip>=23.1.2 - turbodbc==4.11.0 - numpy>=1.23.4 - - pandas>=1.5.2,<3.0.0 - oauthlib>=3.2.2 - cryptography>=38.0.3 - - azure-identity==1.12.0 - - azure-storage-file-datalake==12.12.0 - - azure-keyvault-secrets==4.7.0 - - azure-mgmt-storage==21.0.0 - - boto3==1.28.2 - - pyodbc==4.0.39 - - fastapi==0.110.0 - - httpx==0.24.1 + - azure-identity>=1.12.0 + - azure-storage-file-datalake>=12.12.0 + - azure-keyvault-secrets>=4.7.0 + - azure-mgmt-storage>=21.0.0 + - boto3>=1.28.2 + - pyodbc>=4.0.39 + - fastapi>=0.110.0 + - httpx>=0.24.1 - pyspark>=3.3.0,<3.6.0 - delta-spark>=2.2.0,<3.2.0 - grpcio>=1.48.1 - grpcio-status>=1.48.1 - googleapis-common-protos>=1.56.4 - - openjdk==11.0.15 - - openai==1.13.3 + - openjdk>=11.0.15, <12.0.0 + - openai>=1.13.3 - mkdocs-material==9.5.20 - mkdocs-material-extensions==1.3.1 - mkdocstrings==0.22.0 @@ -52,33 +51,35 @@ dependencies: - mkdocs-macros-plugin==1.0.1 - pygments==2.16.1 - pymdown-extensions==10.8.1 - - databricks-sql-connector==3.1.0 - - semver==3.0.0 - - xlrd==2.0.1 - - pygithub==1.59.0 - - pydantic==2.6.0 - - pyjwt==2.8.0 - - web3==6.16.0 + - databricks-sql-connector>=3.1.0 + - semver>=3.0.0 + - xlrd>=2.0.1 + - pygithub>=1.59.0 + - pydantic>=2.6.0 + - pyjwt>=2.8.0 + - web3>=6.16.0 - twine==4.0.2 - - delta-sharing-python==1.0.0 - - polars==0.18.8 - - moto[s3]==4.1.14 + - delta-sharing-python>=1.0.0 + - polars>=0.18.8 + - moto[s3]>=4.1.14 - xarray>=2023.1.0,<2023.8.0 - - ecmwf-api-client==1.6.3 - - netCDF4==1.6.4 - - black==24.1.0 + - ecmwf-api-client>=1.6.3 + - netCDF4>=1.6.4 + - black>=24.1.0 - joblib==1.3.2 - - great-expectations==0.18.8 + - great-expectations>=0.18.8 - pip: - - databricks-sdk==0.20.0 - - dependency-injector==4.41.0 - - azure-functions==1.15.0 - - azure-mgmt-eventgrid==10.2.0 - - hvac==1.1.1 - - langchain==0.1.17 + - databricks-sdk>=0.20.0 + - dependency-injector>=4.41.0 + - azure-functions>=1.15.0 + - azure-mgmt-eventgrid>=10.2.0 + - hvac>=1.1.1 + - langchain>=0.1.17 - build==0.10.0 - - deltalake==0.10.1 - - trio==0.22.1 - - sqlparams==5.1.0 - - entsoe-py==0.5.10 - - eth-typing==4.2.1 \ No newline at end of file + - deltalake>=0.10.1 + - trio>=0.22.1 + - sqlparams>=5.1.0 + - entsoe-py>=0.5.10 + - pandas + - eth-typing>=4.2.1 + - pandas>=1.5.2,<2.2.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 9abecb1b7..8b325c626 100644 --- a/setup.py +++ b/setup.py @@ -29,19 +29,19 @@ long_description = (here / "PYPI-README.md").read_text() INSTALL_REQUIRES = [ - "databricks-sql-connector==3.1.0", - "azure-identity==1.12.0", - "pandas>=1.5.2,<3.0.0", - "jinja2==3.1.2", + "databricks-sql-connector>=3.1.0", + "azure-identity>=1.12.0", + "pandas>=1.5.2,<2.2.0", + "jinja2>=3.1.2", "importlib_metadata>=1.0.0", - "semver==3.0.0", - "xlrd==2.0.1", + "semver>=3.0.0", + "xlrd>=2.0.1", "grpcio>=1.48.1", "grpcio-status>=1.48.1", "googleapis-common-protos>=1.56.4", - "langchain==0.1.17", - "openai==1.13.3", - "pydantic==2.6.0", + "langchain>=0.1.17", + "openai>=1.13.3", + "pydantic>=2.6.0", ] PYSPARK_PACKAGES = [ @@ -50,23 +50,23 @@ ] PIPELINE_PACKAGES = [ - "dependency-injector==4.41.0", - "databricks-sdk==0.20.0", - "azure-storage-file-datalake==12.12.0", - "azure-mgmt-storage==21.0.0", - "azure-mgmt-eventgrid==10.2.0", - "boto3==1.28.2", - "hvac==1.1.1", - "azure-keyvault-secrets==4.7.0", - "web3==6.16.0", - "polars[deltalake]==0.18.8", - "delta-sharing==1.0.0", + "dependency-injector>=4.41.0", + "databricks-sdk>=0.20.0", + "azure-storage-file-datalake>=12.12.0", + "azure-mgmt-storage>=21.0.0", + "azure-mgmt-eventgrid>=10.2.0", + "boto3>=1.28.2", + "hvac>=1.1.1", + "azure-keyvault-secrets>=4.7.0", + "web3>=6.16.0", + "polars[deltalake]>=0.18.8", + "delta-sharing>=1.0.0", "xarray>=2023.1.0,<2023.8.0", - "ecmwf-api-client==1.6.3", - "netCDF4==1.6.4", - "joblib==1.3.2", - "sqlparams==5.1.0", - "entsoe-py==0.5.10", + "ecmwf-api-client>=1.6.3", + "netCDF4>=1.6.4", + "joblib>=1.3.2", + "sqlparams>=5.1.0", + "entsoe-py>=0.5.10", ] EXTRAS_DEPENDENCIES: dict[str, list[str]] = { diff --git a/tests/api/v1/test_api_circular_average.py b/tests/api/v1/test_api_circular_average.py index 0d4fccf63..a70869c8b 100644 --- a/tests/api/v1/test_api_circular_average.py +++ b/tests/api/v1/test_api_circular_average.py @@ -73,7 +73,7 @@ async def test_api_circular_average_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -138,7 +138,7 @@ async def test_api_circular_average_post_validation_error(mocker: MockerFixture) assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_circular_standard_deviation.py b/tests/api/v1/test_api_circular_standard_deviation.py index a2ebac4ff..6239620cf 100644 --- a/tests/api/v1/test_api_circular_standard_deviation.py +++ b/tests/api/v1/test_api_circular_standard_deviation.py @@ -77,7 +77,7 @@ async def test_api_circular_standard_deviation_get_validation_error( assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -144,7 +144,7 @@ async def test_api_circular_standard_deviation_post_validation_error( assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_interpolate.py b/tests/api/v1/test_api_interpolate.py index d08191f1b..850a7b80f 100644 --- a/tests/api/v1/test_api_interpolate.py +++ b/tests/api/v1/test_api_interpolate.py @@ -73,7 +73,7 @@ async def test_api_interpolate_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -138,7 +138,7 @@ async def test_api_interpolate_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_latest.py b/tests/api/v1/test_api_latest.py index f5b30ec23..e591676cc 100644 --- a/tests/api/v1/test_api_latest.py +++ b/tests/api/v1/test_api_latest.py @@ -157,7 +157,7 @@ async def test_api_latest_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null}]}' ) @@ -251,7 +251,7 @@ async def test_api_latest_post_no_tags_provided_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["body"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["body"],"msg":"Field required","input":null}]}' ) @@ -283,7 +283,7 @@ async def test_api_latest_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_metadata.py b/tests/api/v1/test_api_metadata.py index 653ff7223..577d65095 100644 --- a/tests/api/v1/test_api_metadata.py +++ b/tests/api/v1/test_api_metadata.py @@ -88,7 +88,7 @@ async def test_api_metadata_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null}]}' ) @@ -143,7 +143,7 @@ async def test_api_metadata_post_no_tags_provided_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["body"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["body"],"msg":"Field required","input":null}]}' ) @@ -162,7 +162,7 @@ async def test_api_metadata_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","business_unit"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_plot.py b/tests/api/v1/test_api_plot.py index 717316227..ea2f9b7a6 100644 --- a/tests/api/v1/test_api_plot.py +++ b/tests/api/v1/test_api_plot.py @@ -89,7 +89,7 @@ async def test_api_plot_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -179,7 +179,7 @@ async def test_api_plot_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_raw.py b/tests/api/v1/test_api_raw.py index 674e93c20..51edebaec 100644 --- a/tests/api/v1/test_api_raw.py +++ b/tests/api/v1/test_api_raw.py @@ -84,7 +84,7 @@ async def test_api_raw_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -162,7 +162,7 @@ async def test_api_raw_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_resample.py b/tests/api/v1/test_api_resample.py index aa428c73e..f5fafbc39 100644 --- a/tests/api/v1/test_api_resample.py +++ b/tests/api/v1/test_api_resample.py @@ -71,7 +71,7 @@ async def test_api_resample_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -134,7 +134,7 @@ async def test_api_resample_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_sql.py b/tests/api/v1/test_api_sql.py index 2b52d9f7b..685136773 100644 --- a/tests/api/v1/test_api_sql.py +++ b/tests/api/v1/test_api_sql.py @@ -85,7 +85,7 @@ async def test_api_raw_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["body","sql_statement"],"msg":"Field required","input":{},"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["body","sql_statement"],"msg":"Field required","input":{}}]}' ) diff --git a/tests/api/v1/test_api_summary.py b/tests/api/v1/test_api_summary.py index 5b44b31bf..a731b31bf 100644 --- a/tests/api/v1/test_api_summary.py +++ b/tests/api/v1/test_api_summary.py @@ -78,7 +78,7 @@ async def test_api_summary_get_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -132,7 +132,7 @@ async def test_api_summary_post_validation_error(mocker: MockerFixture): assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/api/v1/test_api_time_weighted_average.py b/tests/api/v1/test_api_time_weighted_average.py index d9b1f40ab..01a9a6e8c 100644 --- a/tests/api/v1/test_api_time_weighted_average.py +++ b/tests/api/v1/test_api_time_weighted_average.py @@ -75,7 +75,7 @@ async def test_api_time_weighted_average_get_validation_error(mocker: MockerFixt assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) @@ -142,7 +142,7 @@ async def test_api_time_weighted_average_post_validation_error(mocker: MockerFix assert response.status_code == 422 assert ( actual - == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null,"url":"https://errors.pydantic.dev/2.6/v/missing"}]}' + == '{"detail":[{"type":"missing","loc":["query","start_date"],"msg":"Field required","input":null}]}' ) diff --git a/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py b/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py index c5a7a4af2..d20dc35f8 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py @@ -23,11 +23,8 @@ CreateJob, JobCluster, ClusterSpec, - JobCompute, - ComputeSpec, Task, NotebookTask, - ComputeSpecKind, AutoScale, RuntimeEngine, DataSecurityMode, diff --git a/tests/sdk/python/rtdip_sdk/pipelines/utilities/aws/test_s3_copy_utility.py b/tests/sdk/python/rtdip_sdk/pipelines/utilities/aws/test_s3_copy_utility.py index cf293f20d..c733542eb 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/utilities/aws/test_s3_copy_utility.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/utilities/aws/test_s3_copy_utility.py @@ -19,7 +19,7 @@ import sys from datetime import datetime import boto3 -from moto import mock_s3 +from moto import mock_aws sys.path.insert(0, ".") @@ -31,7 +31,7 @@ from src.sdk.python.rtdip_sdk.data_models.storage_objects import storage_objects_utils -@mock_s3 +@mock_aws def test_s3_copy_utility(): length: int = 1024 random.seed(datetime.now().timestamp()) From 504576f9f865fd671e21944de126fa6e69f1b8c0 Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Wed, 15 May 2024 15:19:42 +0100 Subject: [PATCH 2/6] Updates for Release mkdocs information Signed-off-by: GBBBAS --- docs/macros.py | 9 +++++++-- environment.yml | 3 +-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/macros.py b/docs/macros.py index df9971b1a..3c0f63739 100644 --- a/docs/macros.py +++ b/docs/macros.py @@ -11,14 +11,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import os from github import Github def define_env(env): @env.macro def github_releases(owner, repo): - github_client = Github() + # due to rate limits, only get this data on release + release_env = os.environ.get("GITHUB_JOB", "dev") + if release_env != "job_deploy_mkdocs_github_pages": + return "----\r\n" + + github_client = Github(retry=0, timeout=5) repo = github_client.get_repo("{}/{}".format(owner, repo)) output = "----\r\n" for release in repo.get_releases(): diff --git a/environment.yml b/environment.yml index 0d9e155ec..e75d3cffa 100644 --- a/environment.yml +++ b/environment.yml @@ -80,6 +80,5 @@ dependencies: - trio>=0.22.1 - sqlparams>=5.1.0 - entsoe-py>=0.5.10 - - pandas - - eth-typing>=4.2.1 + - eth-typing>=4.2.3 - pandas>=1.5.2,<2.2.0 From 9b5b4a174d0022fb5d7fb85f1c30d06fec27c18b Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Wed, 15 May 2024 15:41:29 +0100 Subject: [PATCH 3/6] Updates for tests Signed-off-by: GBBBAS --- environment.yml | 2 +- setup.py | 2 +- tests/docs/test_macros.py | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/environment.yml b/environment.yml index e75d3cffa..f32cd41de 100644 --- a/environment.yml +++ b/environment.yml @@ -57,7 +57,6 @@ dependencies: - pygithub>=1.59.0 - pydantic>=2.6.0 - pyjwt>=2.8.0 - - web3>=6.16.0 - twine==4.0.2 - delta-sharing-python>=1.0.0 - polars>=0.18.8 @@ -80,5 +79,6 @@ dependencies: - trio>=0.22.1 - sqlparams>=5.1.0 - entsoe-py>=0.5.10 + - web3>=6.18.0 - eth-typing>=4.2.3 - pandas>=1.5.2,<2.2.0 diff --git a/setup.py b/setup.py index 8b325c626..537b5f671 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ "boto3>=1.28.2", "hvac>=1.1.1", "azure-keyvault-secrets>=4.7.0", - "web3>=6.16.0", + "web3>=6.18.0", "polars[deltalake]>=0.18.8", "delta-sharing>=1.0.0", "xarray>=2023.1.0,<2023.8.0", diff --git a/tests/docs/test_macros.py b/tests/docs/test_macros.py index 5f07c8686..d3fd7a51c 100644 --- a/tests/docs/test_macros.py +++ b/tests/docs/test_macros.py @@ -15,6 +15,7 @@ import sys sys.path.insert(0, ".") +import os from datetime import datetime from pytest_mock import MockerFixture from docs.macros import define_env @@ -52,6 +53,7 @@ def render_markdown(self, text, context=None): def test_github_releases(mocker: MockerFixture): + os.environ["GITHUB_JOB"] = "job_deploy_mkdocs_github_pages" mocker.patch("docs.macros.Github", return_value=MockGithub()) config = load_config() env = MacrosPlugin() From 9f851820e680ba0c6974ab16cc60f2519f68466f Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Mon, 20 May 2024 08:54:46 +0100 Subject: [PATCH 4/6] Updates for Langchain Signed-off-by: GBBBAS --- environment.yml | 61 +++++++++++++++++++++++++------------------------ setup.py | 45 ++++++++++++++++++------------------ 2 files changed, 54 insertions(+), 52 deletions(-) diff --git a/environment.yml b/environment.yml index f32cd41de..4e8e9b3c8 100644 --- a/environment.yml +++ b/environment.yml @@ -26,24 +26,24 @@ dependencies: - pylint==2.17.4 - pip>=23.1.2 - turbodbc==4.11.0 - - numpy>=1.23.4 - - oauthlib>=3.2.2 + - numpy>=1.23.4,<2.0.0 + - oauthlib>=3.2.2,<4.0.0 - cryptography>=38.0.3 - - azure-identity>=1.12.0 - - azure-storage-file-datalake>=12.12.0 - - azure-keyvault-secrets>=4.7.0 + - azure-identity>=1.12.0,<2.0.0 + - azure-storage-file-datalake>=12.12.0,<13.0.0 + - azure-keyvault-secrets>=4.7.0,<5.0.0 - azure-mgmt-storage>=21.0.0 - - boto3>=1.28.2 - - pyodbc>=4.0.39 - - fastapi>=0.110.0 - - httpx>=0.24.1 + - boto3>=1.28.2,<2.0.0 + - pyodbc>=4.0.39,<5.0.0 + - fastapi>=0.110.0,<1.0.0 + - httpx>=0.24.1,<1.0.0 - pyspark>=3.3.0,<3.6.0 - delta-spark>=2.2.0,<3.3.0 - grpcio>=1.48.1 - grpcio-status>=1.48.1 - googleapis-common-protos>=1.56.4 - - openjdk>=11.0.15, <12.0.0 - - openai>=1.13.3 + - openjdk>=11.0.15,<12.0.0 + - openai>=1.13.3,<2.0.0 - mkdocs-material==9.5.20 - mkdocs-material-extensions==1.3.1 - mkdocstrings==0.22.0 @@ -51,34 +51,35 @@ dependencies: - mkdocs-macros-plugin==1.0.1 - pygments==2.16.1 - pymdown-extensions==10.8.1 - - databricks-sql-connector>=3.1.0 - - semver>=3.0.0 + - databricks-sql-connector>=3.1.0,<4.0.0 + - semver>=3.0.0,<4.0.0 - xlrd>=2.0.1 - pygithub>=1.59.0 - - pydantic>=2.6.0 - - pyjwt>=2.8.0 + - pydantic>=2.6.0,<3.0.0 + - pyjwt>=2.8.0,<3.0.0 - twine==4.0.2 - - delta-sharing-python>=1.0.0 - - polars>=0.18.8 + - delta-sharing-python>=1.0.0,<2.0.0 + - polars>=0.18.8,<1.0.0 - moto[s3]>=4.1.14 - xarray>=2023.1.0,<2023.8.0 - - ecmwf-api-client>=1.6.3 - - netCDF4>=1.6.4 + - ecmwf-api-client>=1.6.3,<2.0.0 + - netCDF4>=1.6.4,<2.0.0 - black>=24.1.0 - - joblib==1.3.2 - - great-expectations>=0.18.8 + - joblib==1.3.2,<2.0.0 + - great-expectations>=0.18.8,<1.0.0 - pip: - - databricks-sdk>=0.20.0 - - dependency-injector>=4.41.0 - - azure-functions>=1.15.0 + - databricks-sdk>=0.20.0,<1.0.0 + - dependency-injector>=4.41.0,<5.0.0 + - azure-functions>=1.15.0,<2.0.0 - azure-mgmt-eventgrid>=10.2.0 - hvac>=1.1.1 - - langchain>=0.1.17 + - langchain>=0.2.0,<0.3.0 + - langchain-community>=0.2.0,<0.3.0 - build==0.10.0 - - deltalake>=0.10.1 + - deltalake>=0.10.1,<1.0.0 - trio>=0.22.1 - - sqlparams>=5.1.0 - - entsoe-py>=0.5.10 - - web3>=6.18.0 - - eth-typing>=4.2.3 + - sqlparams>=5.1.0,<6.0.0 + - entsoe-py>=0.5.10,<1.0.0 + - web3>=6.18.0,<7.0.0 + - eth-typing>=4.2.3,<5.0.0 - pandas>=1.5.2,<2.2.0 diff --git a/setup.py b/setup.py index 537b5f671..50f59eda8 100644 --- a/setup.py +++ b/setup.py @@ -29,19 +29,20 @@ long_description = (here / "PYPI-README.md").read_text() INSTALL_REQUIRES = [ - "databricks-sql-connector>=3.1.0", - "azure-identity>=1.12.0", + "databricks-sql-connector>=3.1.0,<4.0.0", + "azure-identity>=1.12.0,<2.0.0", "pandas>=1.5.2,<2.2.0", - "jinja2>=3.1.2", - "importlib_metadata>=1.0.0", - "semver>=3.0.0", - "xlrd>=2.0.1", + "jinja2>=3.1.2,<4.0.0", + "importlib_metadata>=1.0.0,<2.0.0", + "semver>=3.0.0,<4.0.0", + "xlrd>=2.0.1,<3.0.0", "grpcio>=1.48.1", "grpcio-status>=1.48.1", "googleapis-common-protos>=1.56.4", - "langchain>=0.1.17", - "openai>=1.13.3", - "pydantic>=2.6.0", + "langchain>=0.2.0,<0.3.0", + "langchain-community>=0.2.0,<0.3.0", + "openai>=1.13.3,<2.0.0", + "pydantic>=2.6.0,<3.0.0", ] PYSPARK_PACKAGES = [ @@ -50,23 +51,23 @@ ] PIPELINE_PACKAGES = [ - "dependency-injector>=4.41.0", - "databricks-sdk>=0.20.0", - "azure-storage-file-datalake>=12.12.0", + "dependency-injector>=4.41.0,<5.0.0", + "databricks-sdk>=0.20.0,<1.0.0", + "azure-storage-file-datalake>=12.12.0,<13.0.0", "azure-mgmt-storage>=21.0.0", "azure-mgmt-eventgrid>=10.2.0", - "boto3>=1.28.2", + "boto3>=1.28.2,<2.0.0", "hvac>=1.1.1", - "azure-keyvault-secrets>=4.7.0", - "web3>=6.18.0", - "polars[deltalake]>=0.18.8", - "delta-sharing>=1.0.0", + "azure-keyvault-secrets>=4.7.0,<5.0.0", + "web3>=6.18.0,<7.0.0", + "polars[deltalake]>=0.18.8,<1.0.0", + "delta-sharing>=1.0.0,<2.0.0", "xarray>=2023.1.0,<2023.8.0", - "ecmwf-api-client>=1.6.3", - "netCDF4>=1.6.4", - "joblib>=1.3.2", - "sqlparams>=5.1.0", - "entsoe-py>=0.5.10", + "ecmwf-api-client>=1.6.3,<2.0.0", + "netCDF4>=1.6.4,<2.0.0", + "joblib>=1.3.2,<2.0.0", + "sqlparams>=5.1.0,<6.0.0", + "entsoe-py>=0.5.10,<1.0.0", ] EXTRAS_DEPENDENCIES: dict[str, list[str]] = { From 5a796a00414594d9d29a20803fcdbce4c4eef640 Mon Sep 17 00:00:00 2001 From: cching95 <73163191+cching95@users.noreply.github.com> Date: Mon, 20 May 2024 16:12:18 +0100 Subject: [PATCH 5/6] AIO Transformer and Unit Test (#745) * add aio transformer Signed-off-by: Chloe Ching * aio unit test Signed-off-by: Chloe Ching * unit test Signed-off-by: Chloe Ching * add mend Signed-off-by: Chloe Ching * unit tests for aio transformer Signed-off-by: Chloe Ching * add aio transformer to init Signed-off-by: Chloe Ching * add line under spark.py Signed-off-by: Chloe Ching * update eventime in unit tests Signed-off-by: Chloe Ching --------- Signed-off-by: Chloe Ching --- .whitesource | 13 +- .../pipelines/_pipeline_utils/spark.py | 10 ++ .../pipelines/transformers/__init__.py | 1 + .../transformers/spark/aio_json_to_pcdm.py | 114 ++++++++++++++++++ .../spark/test_aio_opcua_json_to_pcdm.py | 79 ++++++++++++ 5 files changed, 205 insertions(+), 12 deletions(-) create mode 100644 src/sdk/python/rtdip_sdk/pipelines/transformers/spark/aio_json_to_pcdm.py create mode 100644 tests/sdk/python/rtdip_sdk/pipelines/transformers/spark/test_aio_opcua_json_to_pcdm.py diff --git a/.whitesource b/.whitesource index 9c7ae90b4..a76605899 100644 --- a/.whitesource +++ b/.whitesource @@ -1,14 +1,3 @@ { - "scanSettings": { - "baseBranches": [] - }, - "checkRunSettings": { - "vulnerableCheckRunConclusionLevel": "failure", - "displayMode": "diff", - "useMendCheckNames": true - }, - "issueSettings": { - "minSeverityLevel": "LOW", - "issueType": "DEPENDENCY" - } + "settingsInheritedFrom": "sede-x/whitesource-config@main" } \ No newline at end of file diff --git a/src/sdk/python/rtdip_sdk/pipelines/_pipeline_utils/spark.py b/src/sdk/python/rtdip_sdk/pipelines/_pipeline_utils/spark.py index 5bd278e4b..1107deff4 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/_pipeline_utils/spark.py +++ b/src/sdk/python/rtdip_sdk/pipelines/_pipeline_utils/spark.py @@ -610,3 +610,13 @@ def get_dbutils( StructField("sourceName", StringType(), True), ] ) + +AIO_SCHEMA = MapType( + StringType(), + StructType( + [ + StructField("SourceTimestamp", TimestampType(), True), + StructField("Value", StringType(), True), + ] + ), +) diff --git a/src/sdk/python/rtdip_sdk/pipelines/transformers/__init__.py b/src/sdk/python/rtdip_sdk/pipelines/transformers/__init__.py index ecb4062f6..542eeedea 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/transformers/__init__.py +++ b/src/sdk/python/rtdip_sdk/pipelines/transformers/__init__.py @@ -18,6 +18,7 @@ from .spark.fledge_opcua_json_to_pcdm import * from .spark.ssip_pi_binary_file_to_pcdm import * from .spark.ssip_pi_binary_json_to_pcdm import * +from .spark.aio_json_to_pcdm import * from .spark.iso import * from .spark.edgex_opcua_json_to_pcdm import * from .spark.ecmwf.nc_extractbase_to_weather_data_model import * diff --git a/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/aio_json_to_pcdm.py b/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/aio_json_to_pcdm.py new file mode 100644 index 000000000..3ad52c3a7 --- /dev/null +++ b/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/aio_json_to_pcdm.py @@ -0,0 +1,114 @@ +# Copyright 2022 RTDIP +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pyspark.sql import DataFrame +from pyspark.sql.functions import from_json, col, explode, when, lit, expr + +from ..interfaces import TransformerInterface +from ..._pipeline_utils.models import Libraries, SystemType +from ..._pipeline_utils.spark import AIO_SCHEMA + + +class AIOJsonToPCDMTransformer(TransformerInterface): + """ + Converts a Spark Dataframe column containing a json string created by Fledge to the Process Control Data Model. + + Example + -------- + ```python + from rtdip_sdk.pipelines.transformers import AIOJsonToPCDMTransformer + + fledge_opcua_json_to_pcdm_transfromer = AIOJsonToPCDMTransformer( + data=df, + souce_column_name="body", + status_null_value="Good", + change_type_value="insert" + ) + + result = aio_json_to_pcdm_transfromer.transform() + ``` + + Parameters: + data (DataFrame): Dataframe containing the column with Json Fledge data + source_column_name (str): Spark Dataframe column containing the OPC Publisher Json OPC UA data + status_null_value (str): If populated, will replace 'Good' in the Status column with the specified value. + change_type_value (optional str): If populated, will replace 'insert' in the ChangeType column with the specified value. + """ + + data: DataFrame + source_column_name: str + status_null_value: str + change_type_value: str + + def __init__( + self, + data: DataFrame, + source_column_name: str, + status_null_value: str = "Good", + change_type_value: str = "insert", + ) -> None: # NOSONAR + self.data = data + self.source_column_name = source_column_name + self.status_null_value = status_null_value + self.change_type_value = change_type_value + + @staticmethod + def system_type(): + """ + Attributes: + SystemType (Environment): Requires PYSPARK + """ + return SystemType.PYSPARK + + @staticmethod + def libraries(): + libraries = Libraries() + return libraries + + @staticmethod + def settings() -> dict: + return {} + + def pre_transform_validation(self): + return True + + def post_transform_validation(self): + return True + + def transform(self) -> DataFrame: + """ + Returns: + DataFrame: A dataframe with the specified column converted to PCDM + """ + df = ( + self.data.select( + from_json(col(self.source_column_name), "Payload STRING").alias("body") + ) + .select(from_json(expr("body.Payload"), AIO_SCHEMA).alias("body")) + .select(explode("body")) + .select(col("key").alias("TagName"), "value.*") + .select(col("SourceTimestamp").alias("EventTime"), "TagName", "Value") + .withColumn("Status", lit(self.status_null_value)) + .withColumn( + "ValueType", + when(col("Value").cast("float").isNotNull(), "float").otherwise( + "string" + ), + ) + .withColumn("ChangeType", lit(self.change_type_value)) + ) + + return df.select( + "EventTime", "TagName", "Status", "Value", "ValueType", "ChangeType" + ) diff --git a/tests/sdk/python/rtdip_sdk/pipelines/transformers/spark/test_aio_opcua_json_to_pcdm.py b/tests/sdk/python/rtdip_sdk/pipelines/transformers/spark/test_aio_opcua_json_to_pcdm.py new file mode 100644 index 000000000..2345742b6 --- /dev/null +++ b/tests/sdk/python/rtdip_sdk/pipelines/transformers/spark/test_aio_opcua_json_to_pcdm.py @@ -0,0 +1,79 @@ +# Copyright 2022 RTDIP +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +sys.path.insert(0, ".") +from src.sdk.python.rtdip_sdk.pipelines.transformers.spark.aio_json_to_pcdm import ( + AIOJsonToPCDMTransformer, +) +from src.sdk.python.rtdip_sdk.pipelines._pipeline_utils.models import ( + Libraries, + SystemType, +) + +from pyspark.sql import SparkSession, DataFrame +from pyspark.sql.types import StructType, StructField, StringType, TimestampType +from dateutil import parser + + +def test_aio_json_to_pcdm(spark_session: SparkSession): + aio_json_data = '{"SequenceNumber":12345,"Timestamp":"2024-05-13T13:05:10.975317Z","DataSetWriterName":"test","MessageType":"test","Payload":{"test_tag1":{"SourceTimestamp":"2024-05-13T13:05:19.7278555Z","Value":67},"test_tag2":{"SourceTimestamp":"2024-05-13T13:05:19.7288616Z","Value":165.5}}}' + aio_df: DataFrame = spark_session.createDataFrame([aio_json_data], "string").toDF( + "body" + ) + + expected_schema = StructType( + [ + StructField("EventTime", TimestampType(), True), + StructField("TagName", StringType(), False), + StructField("Status", StringType(), False), + StructField("Value", StringType(), True), + StructField("ValueType", StringType(), False), + StructField("ChangeType", StringType(), False), + ] + ) + + expected_data = [ + { + "TagName": "test_tag1", + "Value": "67", + "EventTime": parser.parse("2024-05-13T13:05:19.7278555Z"), + "Status": "Good", + "ValueType": "float", + "ChangeType": "insert", + }, + { + "TagName": "test_tag2", + "Value": "165.5", + "EventTime": parser.parse("2024-05-13T13:05:19.7288616Z"), + "Status": "Good", + "ValueType": "float", + "ChangeType": "insert", + }, + ] + + expected_df: DataFrame = spark_session.createDataFrame( + schema=expected_schema, data=expected_data + ) + + eventhub_json_to_aio_transformer = AIOJsonToPCDMTransformer( + data=aio_df, source_column_name="body" + ) + actual_df = eventhub_json_to_aio_transformer.transform() + + assert eventhub_json_to_aio_transformer.system_type() == SystemType.PYSPARK + assert isinstance(eventhub_json_to_aio_transformer.libraries(), Libraries) + assert expected_schema == actual_df.schema + assert expected_df.collect() == actual_df.collect() From 08bdb98f800d3eaaab60b33c732a03ae1c71c14b Mon Sep 17 00:00:00 2001 From: GBBBAS <42962356+GBBBAS@users.noreply.github.com> Date: Tue, 21 May 2024 14:11:14 +0100 Subject: [PATCH 6/6] Update deprecated datetime.utcnow() (#748) * Update datetime utcnow Signed-off-by: GBBBAS * Update whitesource file Signed-off-by: GBBBAS --------- Signed-off-by: GBBBAS --- .whitesource | 13 ++++- .../spark/iso/pjm_historical_load_iso.py | 10 ++-- .../spark/iso/pjm_historical_pricing_iso.py | 10 ++-- .../raw_forecast_to_weather_data_model.py | 4 +- tests/api/v1/test_api_circular_average.py | 44 +++++++++++++---- .../test_api_circular_standard_deviation.py | 44 +++++++++++++---- tests/api/v1/test_api_interpolate.py | 44 +++++++++++++---- .../api/v1/test_api_interpolation_at_time.py | 36 ++++++++++---- tests/api/v1/test_api_latest.py | 48 ++++++++++--------- tests/api/v1/test_api_metadata.py | 9 ++-- tests/api/v1/test_api_plot.py | 20 ++++---- tests/api/v1/test_api_raw.py | 20 ++++---- tests/api/v1/test_api_resample.py | 44 +++++++++++++---- tests/api/v1/test_api_sql.py | 11 +++-- tests/api/v1/test_api_summary.py | 6 ++- .../api/v1/test_api_time_weighted_average.py | 44 +++++++++++++---- .../spark/iso/test_miso_daily_load_iso.py | 4 +- .../iso/test_miso_historical_load_iso.py | 4 +- .../spark/iso/test_pjm_historical_load_iso.py | 4 +- .../iso/test_pjm_historical_pricing_iso.py | 4 +- 20 files changed, 302 insertions(+), 121 deletions(-) diff --git a/.whitesource b/.whitesource index a76605899..9c7ae90b4 100644 --- a/.whitesource +++ b/.whitesource @@ -1,3 +1,14 @@ { - "settingsInheritedFrom": "sede-x/whitesource-config@main" + "scanSettings": { + "baseBranches": [] + }, + "checkRunSettings": { + "vulnerableCheckRunConclusionLevel": "failure", + "displayMode": "diff", + "useMendCheckNames": true + }, + "issueSettings": { + "minSeverityLevel": "LOW", + "issueType": "DEPENDENCY" + } } \ No newline at end of file diff --git a/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_load_iso.py b/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_load_iso.py index 7905c670f..e4ce5ea5c 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_load_iso.py +++ b/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_load_iso.py @@ -14,7 +14,7 @@ import logging import time -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from io import BytesIO import pandas as pd @@ -172,13 +172,17 @@ def _validate_options(self) -> bool: f"Unable to parse End date. Please specify in {self.user_datetime_format} format." ) - if start_date > datetime.utcnow() - timedelta(days=1): + if start_date > datetime.now(timezone.utc).replace(tzinfo=None) - timedelta( + days=1 + ): raise ValueError("Start date can't be in future.") if start_date > end_date: raise ValueError("Start date can't be ahead of End date.") - if end_date > datetime.utcnow() - timedelta(days=1): + if end_date > datetime.now(timezone.utc).replace(tzinfo=None) - timedelta( + days=1 + ): raise ValueError("End date can't be in future.") if self.sleep_duration < 0: diff --git a/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_pricing_iso.py b/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_pricing_iso.py index d3120e41b..8df0c52f7 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_pricing_iso.py +++ b/src/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/pjm_historical_pricing_iso.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime +from datetime import datetime, timezone import logging import time import pandas as pd @@ -142,13 +142,17 @@ def _validate_options(self) -> bool: f"Unable to parse End date. Please specify in {self.user_datetime_format} format." ) - if start_date > datetime.utcnow() - timedelta(days=1): + if start_date > datetime.now(timezone.utc).replace(tzinfo=None) - timedelta( + days=1 + ): raise ValueError("Start date can't be in future.") if start_date > end_date: raise ValueError("Start date can't be ahead of End date.") - if end_date > datetime.utcnow() - timedelta(days=1): + if end_date > datetime.now(timezone.utc).replace(tzinfo=None) - timedelta( + days=1 + ): raise ValueError("End date can't be in future.") return True diff --git a/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/the_weather_company/raw_forecast_to_weather_data_model.py b/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/the_weather_company/raw_forecast_to_weather_data_model.py index 557f3e5ec..108d2fa0e 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/the_weather_company/raw_forecast_to_weather_data_model.py +++ b/src/sdk/python/rtdip_sdk/pipelines/transformers/spark/the_weather_company/raw_forecast_to_weather_data_model.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime +from datetime import datetime, timezone from pyspark.sql import DataFrame, SparkSession from pyspark.sql.functions import when, substring, lit, col, concat @@ -91,7 +91,7 @@ def transform(self) -> DataFrame: self.pre_transform_validation() - processed_date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") + processed_date = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S") df = ( self.data.withColumn("WeatherDay", substring("FcstValidLocal", 0, 10)) diff --git a/tests/api/v1/test_api_circular_average.py b/tests/api/v1/test_api_circular_average.py index a70869c8b..bc7e01307 100644 --- a/tests/api/v1/test_api_circular_average.py +++ b/tests/api/v1/test_api_circular_average.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( CIRCULAR_AVERAGE_MOCKED_PARAMETER_DICT, CIRCULAR_AVERAGE_MOCKED_PARAMETER_ERROR_DICT, @@ -36,7 +36,11 @@ async def test_api_circular_average_get_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.5]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.5], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -49,7 +53,8 @@ async def test_api_circular_average_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -58,7 +63,11 @@ async def test_api_circular_average_get_success(mocker: MockerFixture): async def test_api_circular_average_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -79,7 +88,11 @@ async def test_api_circular_average_get_validation_error(mocker: MockerFixture): async def test_api_circular_average_get_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") @@ -99,7 +112,11 @@ async def test_api_circular_average_get_error(mocker: MockerFixture): async def test_api_circular_average_post_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.5]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.5], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -113,7 +130,8 @@ async def test_api_circular_average_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -122,7 +140,11 @@ async def test_api_circular_average_post_success(mocker: MockerFixture): async def test_api_circular_average_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -144,7 +166,11 @@ async def test_api_circular_average_post_validation_error(mocker: MockerFixture) async def test_api_circular_average_post_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") diff --git a/tests/api/v1/test_api_circular_standard_deviation.py b/tests/api/v1/test_api_circular_standard_deviation.py index 6239620cf..76cf36f54 100644 --- a/tests/api/v1/test_api_circular_standard_deviation.py +++ b/tests/api/v1/test_api_circular_standard_deviation.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( CIRCULAR_AVERAGE_MOCKED_PARAMETER_DICT, CIRCULAR_AVERAGE_MOCKED_PARAMETER_ERROR_DICT, @@ -38,7 +38,11 @@ async def test_api_circular_standard_deviation_get_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.5]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.5], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -51,7 +55,8 @@ async def test_api_circular_standard_deviation_get_success(mocker: MockerFixture actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -62,7 +67,11 @@ async def test_api_circular_standard_deviation_get_validation_error( mocker: MockerFixture, ): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -83,7 +92,11 @@ async def test_api_circular_standard_deviation_get_validation_error( async def test_api_circular_standard_deviation_get_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") @@ -103,7 +116,11 @@ async def test_api_circular_standard_deviation_get_error(mocker: MockerFixture): async def test_api_circular_standard_deviation_post_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.5]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.5], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -117,7 +134,8 @@ async def test_api_circular_standard_deviation_post_success(mocker: MockerFixtur actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -128,7 +146,11 @@ async def test_api_circular_standard_deviation_post_validation_error( mocker: MockerFixture, ): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -150,7 +172,11 @@ async def test_api_circular_standard_deviation_post_validation_error( async def test_api_circular_standard_deviation_post_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") diff --git a/tests/api/v1/test_api_interpolate.py b/tests/api/v1/test_api_interpolate.py index 850a7b80f..ddde12b35 100644 --- a/tests/api/v1/test_api_interpolate.py +++ b/tests/api/v1/test_api_interpolate.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( INTERPOLATE_MOCKED_PARAMETER_DICT, INTERPOLATE_MOCKED_PARAMETER_ERROR_DICT, @@ -36,7 +36,11 @@ async def test_api_interpolate_get_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -49,7 +53,8 @@ async def test_api_interpolate_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -58,7 +63,11 @@ async def test_api_interpolate_get_success(mocker: MockerFixture): async def test_api_interpolate_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -79,7 +88,11 @@ async def test_api_interpolate_get_validation_error(mocker: MockerFixture): async def test_api_interpolate_get_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") @@ -99,7 +112,11 @@ async def test_api_interpolate_get_error(mocker: MockerFixture): async def test_api_interpolate_post_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -113,7 +130,8 @@ async def test_api_interpolate_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -122,7 +140,11 @@ async def test_api_interpolate_post_success(mocker: MockerFixture): async def test_api_interpolate_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -144,7 +166,11 @@ async def test_api_interpolate_post_validation_error(mocker: MockerFixture): async def test_api_interpolate_post_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") diff --git a/tests/api/v1/test_api_interpolation_at_time.py b/tests/api/v1/test_api_interpolation_at_time.py index bfb7646f7..40f99f72d 100644 --- a/tests/api/v1/test_api_interpolation_at_time.py +++ b/tests/api/v1/test_api_interpolation_at_time.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( INTERPOLATION_AT_TIME_MOCKED_PARAMETER_DICT, INTERPOLATION_AT_TIME_POST_MOCKED_PARAMETER_DICT, @@ -35,7 +35,11 @@ async def test_api_interpolation_at_time_get_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -48,7 +52,8 @@ async def test_api_interpolation_at_time_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -58,7 +63,7 @@ async def test_api_interpolation_at_time_get_success(mocker: MockerFixture): # TODO: Readd this test when this github issue is resolved https://github.com/tiangolo/fastapi/issues/9920 # async def test_api_interpolation_at_time_get_validation_error(mocker: MockerFixture): # test_data = pd.DataFrame( -# {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} +# {"EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Value": [1.01]} # ) # mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -79,7 +84,11 @@ async def test_api_interpolation_at_time_get_success(mocker: MockerFixture): async def test_api_interpolation_at_time_get_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") @@ -99,7 +108,11 @@ async def test_api_interpolation_at_time_get_error(mocker: MockerFixture): async def test_api_interpolation_at_time_post_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -113,7 +126,8 @@ async def test_api_interpolation_at_time_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -123,7 +137,7 @@ async def test_api_interpolation_at_time_post_success(mocker: MockerFixture): # TODO: Readd this test when this github issue is resolved https://github.com/tiangolo/fastapi/issues/9920 # async def test_api_interpolation_at_time_post_validation_error(mocker: MockerFixture): # test_data = pd.DataFrame( -# {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} +# {"EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Value": [1.01]} # ) # mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -145,7 +159,11 @@ async def test_api_interpolation_at_time_post_success(mocker: MockerFixture): async def test_api_interpolation_at_time_post_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") diff --git a/tests/api/v1/test_api_latest.py b/tests/api/v1/test_api_latest.py index e591676cc..0a9bb6958 100644 --- a/tests/api/v1/test_api_latest.py +++ b/tests/api/v1/test_api_latest.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( METADATA_MOCKED_PARAMETER_DICT, METADATA_MOCKED_PARAMETER_ERROR_DICT, @@ -38,11 +38,11 @@ async def test_api_latest_get_tags_provided_success(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -57,7 +57,8 @@ async def test_api_latest_get_tags_provided_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -70,7 +71,7 @@ async def test_api_latest_get_no_good_values_tags_provided_success( test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], @@ -89,7 +90,8 @@ async def test_api_latest_get_no_good_values_tags_provided_success( actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -100,11 +102,11 @@ async def test_api_latest_get_no_tags_provided_success(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -123,7 +125,8 @@ async def test_api_latest_get_no_tags_provided_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -134,11 +137,11 @@ async def test_api_latest_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -165,11 +168,11 @@ async def test_api_latest_get_error(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -193,11 +196,11 @@ async def test_api_latest_post_tags_provided_success(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -215,7 +218,8 @@ async def test_api_latest_post_tags_provided_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -226,11 +230,11 @@ async def test_api_latest_post_no_tags_provided_error(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -259,11 +263,11 @@ async def test_api_latest_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } @@ -291,11 +295,11 @@ async def test_api_raw_post_error(mocker: MockerFixture): test_data = pd.DataFrame( { "TagName": ["TestTag"], - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "Status": ["Good"], "Value": ["1.01"], "ValueType": ["string"], - "GoodEventTime": [datetime.utcnow()], + "GoodEventTime": [datetime.now(timezone.utc)], "GoodValue": ["1.01"], "GoodValueType": ["string"], } diff --git a/tests/api/v1/test_api_metadata.py b/tests/api/v1/test_api_metadata.py index 577d65095..ca6d7dd35 100644 --- a/tests/api/v1/test_api_metadata.py +++ b/tests/api/v1/test_api_metadata.py @@ -46,7 +46,8 @@ async def test_api_metadata_get_tags_provided_success(mocker: MockerFixture): actual = response.text expected = TEST_DATA.to_json(orient="table", index=False) expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -67,7 +68,8 @@ async def test_api_metadata_get_no_tags_provided_success(mocker: MockerFixture): actual = response.text expected = TEST_DATA.to_json(orient="table", index=False) expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -120,7 +122,8 @@ async def test_api_metadata_post_tags_provided_success(mocker: MockerFixture): actual = response.text expected = TEST_DATA.to_json(orient="table", index=False) expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 diff --git a/tests/api/v1/test_api_plot.py b/tests/api/v1/test_api_plot.py index ea2f9b7a6..f2058f20d 100644 --- a/tests/api/v1/test_api_plot.py +++ b/tests/api/v1/test_api_plot.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( PLOT_MOCKED_PARAMETER_DICT, PLOT_MOCKED_PARAMETER_ERROR_DICT, @@ -37,7 +37,7 @@ async def test_api_plot_get_success(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Average": [1.01], "Min": [1.01], @@ -56,7 +56,8 @@ async def test_api_plot_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -66,7 +67,7 @@ async def test_api_plot_get_success(mocker: MockerFixture): async def test_api_plot_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Average": [1.01], "Min": [1.01], @@ -96,7 +97,7 @@ async def test_api_plot_get_validation_error(mocker: MockerFixture): async def test_api_pot_get_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Average": [1.01], "Min": [1.01], @@ -123,7 +124,7 @@ async def test_api_pot_get_error(mocker: MockerFixture): async def test_api_plot_post_success(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Average": [1.01], "Min": [1.01], @@ -145,7 +146,8 @@ async def test_api_plot_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -155,7 +157,7 @@ async def test_api_plot_post_success(mocker: MockerFixture): async def test_api_plot_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Average": [1.01], "Min": [1.01], @@ -186,7 +188,7 @@ async def test_api_plot_post_validation_error(mocker: MockerFixture): async def test_api_plot_post_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Average": [1.01], "Min": [1.01], diff --git a/tests/api/v1/test_api_raw.py b/tests/api/v1/test_api_raw.py index 51edebaec..c0e910801 100644 --- a/tests/api/v1/test_api_raw.py +++ b/tests/api/v1/test_api_raw.py @@ -16,7 +16,7 @@ from pytest_mock import MockerFixture import pandas as pd import numpy as np -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( RAW_MOCKED_PARAMETER_DICT, RAW_MOCKED_PARAMETER_ERROR_DICT, @@ -42,7 +42,7 @@ async def test_api_raw_get_success(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -57,7 +57,8 @@ async def test_api_raw_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -67,7 +68,7 @@ async def test_api_raw_get_success(mocker: MockerFixture): async def test_api_raw_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -91,7 +92,7 @@ async def test_api_raw_get_validation_error(mocker: MockerFixture): async def test_api_raw_get_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -114,7 +115,7 @@ async def test_api_raw_get_error(mocker: MockerFixture): async def test_api_raw_post_success(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -132,7 +133,8 @@ async def test_api_raw_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -142,7 +144,7 @@ async def test_api_raw_post_success(mocker: MockerFixture): async def test_api_raw_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -169,7 +171,7 @@ async def test_api_raw_post_validation_error(mocker: MockerFixture): async def test_api_raw_post_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], diff --git a/tests/api/v1/test_api_resample.py b/tests/api/v1/test_api_resample.py index f5fafbc39..fb37a287e 100644 --- a/tests/api/v1/test_api_resample.py +++ b/tests/api/v1/test_api_resample.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( RESAMPLE_MOCKED_PARAMETER_DICT, RESAMPLE_MOCKED_PARAMETER_ERROR_DICT, @@ -36,7 +36,11 @@ async def test_api_resample_get_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -47,7 +51,8 @@ async def test_api_resample_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -56,7 +61,11 @@ async def test_api_resample_get_success(mocker: MockerFixture): async def test_api_resample_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -77,7 +86,11 @@ async def test_api_resample_get_validation_error(mocker: MockerFixture): async def test_api_resample_get_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") @@ -95,7 +108,11 @@ async def test_api_resample_get_error(mocker: MockerFixture): async def test_api_resample_post_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -109,7 +126,8 @@ async def test_api_resample_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -118,7 +136,11 @@ async def test_api_resample_post_success(mocker: MockerFixture): async def test_api_resample_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -140,7 +162,11 @@ async def test_api_resample_post_validation_error(mocker: MockerFixture): async def test_api_resample_post_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") diff --git a/tests/api/v1/test_api_sql.py b/tests/api/v1/test_api_sql.py index 685136773..4e0a1aaa9 100644 --- a/tests/api/v1/test_api_sql.py +++ b/tests/api/v1/test_api_sql.py @@ -16,7 +16,7 @@ from pytest_mock import MockerFixture import pandas as pd import numpy as np -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( SQL_POST_MOCKED_PARAMETER_DICT, SQL_POST_BODY_MOCKED_PARAMETER_DICT, @@ -37,7 +37,7 @@ async def test_api_raw_post_success(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -55,7 +55,8 @@ async def test_api_raw_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":100,"offset":100,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":100,"offset":100,"next":null}}' ) assert response.status_code == 200 @@ -65,7 +66,7 @@ async def test_api_raw_post_success(mocker: MockerFixture): async def test_api_raw_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], @@ -92,7 +93,7 @@ async def test_api_raw_post_validation_error(mocker: MockerFixture): async def test_api_raw_post_error(mocker: MockerFixture): test_data = pd.DataFrame( { - "EventTime": [datetime.utcnow()], + "EventTime": [datetime.now(timezone.utc)], "TagName": ["TestTag"], "Status": ["Good"], "Value": [1.01], diff --git a/tests/api/v1/test_api_summary.py b/tests/api/v1/test_api_summary.py index a731b31bf..813d29c55 100644 --- a/tests/api/v1/test_api_summary.py +++ b/tests/api/v1/test_api_summary.py @@ -57,7 +57,8 @@ async def test_api_summary_get_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -110,7 +111,8 @@ async def test_api_summary_post_success(mocker: MockerFixture): actual = response.text expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 diff --git a/tests/api/v1/test_api_time_weighted_average.py b/tests/api/v1/test_api_time_weighted_average.py index 01a9a6e8c..64280f9da 100644 --- a/tests/api/v1/test_api_time_weighted_average.py +++ b/tests/api/v1/test_api_time_weighted_average.py @@ -15,7 +15,7 @@ import pytest from pytest_mock import MockerFixture import pandas as pd -from datetime import datetime +from datetime import datetime, timezone from tests.api.v1.api_test_objects import ( TIME_WEIGHTED_AVERAGE_MOCKED_PARAMETER_DICT, TIME_WEIGHTED_AVERAGE_MOCKED_PARAMETER_ERROR_DICT, @@ -36,7 +36,11 @@ async def test_api_time_weighted_average_get_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) test_data = test_data.set_index("EventTime") mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -51,7 +55,8 @@ async def test_api_time_weighted_average_get_success(mocker: MockerFixture): test_data = test_data.reset_index() expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -60,7 +65,11 @@ async def test_api_time_weighted_average_get_success(mocker: MockerFixture): async def test_api_time_weighted_average_get_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -81,7 +90,11 @@ async def test_api_time_weighted_average_get_validation_error(mocker: MockerFixt async def test_api_time_weighted_average_get_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") @@ -101,7 +114,11 @@ async def test_api_time_weighted_average_get_error(mocker: MockerFixture): async def test_api_time_weighted_average_post_success(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) test_data = test_data.set_index("EventTime") mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -117,7 +134,8 @@ async def test_api_time_weighted_average_post_success(mocker: MockerFixture): test_data = test_data.reset_index() expected = test_data.to_json(orient="table", index=False, date_unit="ns") expected = ( - expected.rstrip("}") + ',"pagination":{"limit":null,"offset":null,"next":null}}' + expected.replace(',"tz":"UTC"', "").rstrip("}") + + ',"pagination":{"limit":null,"offset":null,"next":null}}' ) assert response.status_code == 200 @@ -126,7 +144,11 @@ async def test_api_time_weighted_average_post_success(mocker: MockerFixture): async def test_api_time_weighted_average_post_validation_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup(mocker, MOCK_METHOD, test_data) @@ -148,7 +170,11 @@ async def test_api_time_weighted_average_post_validation_error(mocker: MockerFix async def test_api_time_weighted_average_post_error(mocker: MockerFixture): test_data = pd.DataFrame( - {"EventTime": [datetime.utcnow()], "TagName": ["TestTag"], "Value": [1.01]} + { + "EventTime": [datetime.now(timezone.utc)], + "TagName": ["TestTag"], + "Value": [1.01], + } ) mocker = mocker_setup( mocker, MOCK_METHOD, test_data, Exception("Error Connecting to Database") diff --git a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_daily_load_iso.py b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_daily_load_iso.py index bce27e670..372be5827 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_daily_load_iso.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_daily_load_iso.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import sys from io import StringIO @@ -147,7 +147,7 @@ def test_miso_daily_load_iso_invalid_date_format(spark_session: SparkSession): def test_miso_daily_load_iso_invalid_date(spark_session: SparkSession): - future_date = (datetime.utcnow() + timedelta(days=10)).strftime("%Y%m%d") + future_date = (datetime.now(timezone.utc) + timedelta(days=10)).strftime("%Y%m%d") with pytest.raises(ValueError) as exc_info: iso_source = MISODailyLoadISOSource( diff --git a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_historical_load_iso.py b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_historical_load_iso.py index 649180595..a4b9e6b84 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_historical_load_iso.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_miso_historical_load_iso.py @@ -13,7 +13,7 @@ # limitations under the License. from io import StringIO -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pandas as pd import pytest @@ -156,7 +156,7 @@ def test_miso_historical_load_iso_invalid_dates(spark_session: SparkSession): assert str(exc_info.value) == "Start date can't be ahead of End date." - future_date = (datetime.utcnow() + timedelta(days=10)).strftime("%Y%m%d") + future_date = (datetime.now(timezone.utc) + timedelta(days=10)).strftime("%Y%m%d") with pytest.raises(ValueError) as exc_info: iso_source = MISOHistoricalLoadISOSource( diff --git a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_load_iso.py b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_load_iso.py index f78b1df23..296a8c735 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_load_iso.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_load_iso.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import sys from io import StringIO @@ -141,7 +141,7 @@ def test_miso_historical_load_iso_invalid_dates(spark_session: SparkSession): assert str(exc_info.value) == "Start date can't be ahead of End date." - future_date = (datetime.utcnow() + timedelta(days=10)).strftime("%Y-%m-%d") + future_date = (datetime.now(timezone.utc) + timedelta(days=10)).strftime("%Y-%m-%d") with pytest.raises(ValueError) as exc_info: iso_source = PJMHistoricalLoadISOSource( diff --git a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_pricing_iso.py b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_pricing_iso.py index b278257f9..372e1ed50 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_pricing_iso.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/sources/spark/iso/test_pjm_historical_pricing_iso.py @@ -14,7 +14,7 @@ import io import json import sys -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from io import StringIO import numpy as np @@ -368,7 +368,7 @@ def test_miso_historical_pricing_iso_invalid_dates(spark_session: SparkSession): assert str(exc_info.value) == "Start date can't be ahead of End date." - future_date = (datetime.utcnow() + timedelta(days=10)).strftime("%Y-%m-%d") + future_date = (datetime.now(timezone.utc) + timedelta(days=10)).strftime("%Y-%m-%d") with pytest.raises(ValueError) as exc_info: iso_source = PJMHistoricalPricingISOSource(