Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

# Always prefer setuptools over distutils
from setuptools import setup, find_packages, sic
from setuptools.extern import packaging
import pathlib
import os

Expand Down
30 changes: 29 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import os
import shutil

from src.sdk.python.rtdip_sdk.connectors.grpc.spark_connector import SparkConnection
from src.sdk.python.rtdip_sdk.pipelines.destinations import * # NOSONAR
from src.sdk.python.rtdip_sdk.pipelines.sources import * # NOSONAR
from src.sdk.python.rtdip_sdk.pipelines.utilities.spark.session import (
Expand All @@ -29,6 +30,8 @@
"spark.master": "local[*]",
}

datetime_format = "%Y-%m-%dT%H:%M:%S.%f000Z"


@pytest.fixture(scope="session")
def spark_session():
Expand All @@ -45,7 +48,32 @@ def spark_session():
shutil.rmtree(path)


datetime_format = "%Y-%m-%dT%H:%M:%S.%f000Z"
@pytest.fixture(scope="session")
def spark_connection(spark_session: SparkSession):
table_name = "test_table"
data = [
{
"EventTime": datetime(2022, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
"TagName": "TestTag",
"Status": "Good",
"Value": 1.5,
},
{
"EventTime": datetime(2022, 1, 1, 12, 0, 0, tzinfo=timezone.utc),
"TagName": "TestTag",
"Status": "Good",
"Value": 2.0,
},
{
"EventTime": datetime(2022, 1, 2, 0, 0, 0, tzinfo=timezone.utc),
"TagName": "TestTag",
"Status": "Good",
"Value": 1.0,
},
]
df = spark_session.createDataFrame(data)
df.write.format("delta").mode("overwrite").saveAsTable(table_name)
return SparkConnection(spark=spark_session)


def expected_result(data, limit="null", offset="null", next="null"):
Expand Down
19 changes: 19 additions & 0 deletions tests/sdk/python/rtdip_sdk/queries/sql/test_sql_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,22 @@ def test_sql_query_fail(mocker: MockerFixture):

with pytest.raises(Exception):
SQLQueryBuilder().get(mocked_connection, MOCKED_SQL_QUERY)


@pytest.mark.parametrize(
"parameters, expected",
[
(
{
"sql_statement": "SELECT EventTime, TagName, Status, Value FROM test_table",
},
{"count": 3},
),
# Add more test cases as needed
],
)
def test_raw_query(spark_connection, parameters, expected):
df = SQLQueryBuilder().get(spark_connection, parameters["sql_statement"])
assert df.columns == ["EventTime", "TagName", "Status", "Value"]
df.show()
assert df.count() == expected["count"]
33 changes: 33 additions & 0 deletions tests/sdk/python/rtdip_sdk/queries/time_series/test_raw.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import sys

sys.path.insert(0, ".")
import pytest
from pytest_mock import MockerFixture
from src.sdk.python.rtdip_sdk.queries.time_series.raw import get as raw_get
from tests.sdk.python.rtdip_sdk.queries.time_series._test_base import (
Expand Down Expand Up @@ -62,3 +63,35 @@ def test_raw_offset_limit(mocker: MockerFixture):

def test_raw_fails(mocker: MockerFixture):
_test_base_fails(mocker, MOCKED_PARAMETER_DICT, raw_get)


@pytest.mark.parametrize(
"parameters, expected",
[
(
{
"source": "test_table",
"start_date": "2022-01-01",
"end_date": "2022-01-01",
"tag_names": ["TestTag"],
"include_bad_data": True,
},
{"count": 2},
),
(
{
"source": "test_table",
"start_date": "2022-01-01T00:00:00",
"end_date": "2022-01-01T23:59:59",
"tag_names": ["TestTag"],
"include_bad_data": True,
},
{"count": 2},
),
# Add more test cases as needed
],
)
def test_raw_query(spark_connection, parameters, expected):
df = raw_get(spark_connection, parameters)
assert df.columns == ["EventTime", "TagName", "Status", "Value"]
assert df.count() == expected["count"]