From 8d7683c1c63eb9e3b12e496e504f80df23650b49 Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Wed, 31 Jul 2024 13:52:02 +0100 Subject: [PATCH 1/4] Inital Commit Signed-off-by: GBBBAS --- docs/assets/extra.css | 16 +-- docs/getting-started/installation.md | 17 ++- docs/sdk/authentication/azure.md | 11 +- docs/sdk/authentication/databricks.md | 6 +- .../query/functions/metadata.md | 2 +- .../functions/time_series/circular-average.md | 2 +- .../circular-standard-deviation.md | 2 +- .../functions/time_series/interpolate.md | 2 +- .../time_series/interpolation-at-time.md | 2 +- .../query/functions/time_series/latest.md | 2 +- .../query/functions/time_series/plot.md | 2 +- .../query/functions/time_series/raw.md | 2 +- .../query/functions/time_series/resample.md | 2 +- .../query/functions/time_series/summary.md | 2 +- .../time_series/time-weighted-average.md | 2 +- .../query/functions/weather/latest.md | 46 +------- .../query/functions/weather/raw.md | 58 +--------- docs/sdk/examples/query/Circular-Average.md | 2 +- .../query/Circular-Standard-Deviation.md | 2 +- docs/sdk/examples/query/Interpolate.md | 2 +- .../examples/query/Interpolation-at-Time.md | 2 +- docs/sdk/examples/query/Metadata.md | 2 +- docs/sdk/examples/query/Plot.md | 2 +- docs/sdk/examples/query/Raw.md | 2 +- docs/sdk/examples/query/Resample.md | 2 +- docs/sdk/examples/query/Summary.md | 2 +- .../examples/query/Time-Weighted-Average.md | 2 +- docs/sdk/queries/connectors.md | 20 +++- docs/sdk/queries/functions.md | 73 ++++++------- docs/university/essentials/rtdip/overview.md | 9 ++ .../essentials/sdk/authentication/azure.md | 13 +++ .../sdk/authentication/databricks.md | 13 +++ .../essentials/sdk/authentication/exercise.md | 26 +++++ .../connectors/databricks-sql-connector.md | 14 +++ .../essentials/sdk/connectors/exercise.md | 8 ++ .../sdk/connectors/odbc-connectors.md | 14 +++ .../sdk/connectors/spark-connector.md | 14 +++ .../sdk/getting-started/exercise.md | 24 ++++ .../sdk/getting-started/installation.md | 14 +++ .../sdk/getting-started/introduction.md | 15 +++ .../sdk/getting-started/prerequisites.md | 14 +++ .../essentials/sdk/queries/exercise.md | 9 ++ docs/university/essentials/sdk/queries/sql.md | 17 +++ .../essentials/sdk/queries/timeseries.md | 103 ++++++++++++++++++ .../essentials/sdk/queries/weather.md | 32 ++++++ mkdocs.yml | 31 +++++- .../time_series/time_series_query_builder.py | 46 ++++---- 47 files changed, 501 insertions(+), 204 deletions(-) create mode 100644 docs/university/essentials/rtdip/overview.md create mode 100644 docs/university/essentials/sdk/authentication/azure.md create mode 100644 docs/university/essentials/sdk/authentication/databricks.md create mode 100644 docs/university/essentials/sdk/authentication/exercise.md create mode 100644 docs/university/essentials/sdk/connectors/databricks-sql-connector.md create mode 100644 docs/university/essentials/sdk/connectors/exercise.md create mode 100644 docs/university/essentials/sdk/connectors/odbc-connectors.md create mode 100644 docs/university/essentials/sdk/connectors/spark-connector.md create mode 100644 docs/university/essentials/sdk/getting-started/exercise.md create mode 100644 docs/university/essentials/sdk/getting-started/installation.md create mode 100644 docs/university/essentials/sdk/getting-started/introduction.md create mode 100644 docs/university/essentials/sdk/getting-started/prerequisites.md create mode 100644 docs/university/essentials/sdk/queries/exercise.md create mode 100644 docs/university/essentials/sdk/queries/sql.md create mode 100644 docs/university/essentials/sdk/queries/timeseries.md create mode 100644 docs/university/essentials/sdk/queries/weather.md diff --git a/docs/assets/extra.css b/docs/assets/extra.css index 8d1a93671..7c20a6596 100644 --- a/docs/assets/extra.css +++ b/docs/assets/extra.css @@ -15,13 +15,13 @@ */ :root { - --md-primary-fg-color: #4e08c7 !important; - --md-primary-mg-color: #d445a3 !important; - --md-accent-fg-color: #bb1fa4 !important; - --md-primary-bg-color: white !important; - --md-primary-text-slate: white !important; - --md-primary-bg-slate: #2f303e !important; - } + --md-primary-fg-color: #4e08c7 !important; + --md-primary-mg-color: #d445a3 !important; + --md-accent-fg-color: #bb1fa4 !important; + --md-primary-bg-color: white !important; + --md-primary-text-slate: white !important; + --md-primary-bg-slate: #2f303e !important; +} /* header font colour */ .md-header { @@ -41,7 +41,7 @@ } .md-nav__item .md-nav__link--active { - color:#d445a3; + color: #d445a3; } .image-center { diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 20fb19556..6cf65bae9 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -12,6 +12,8 @@ This article provides a guide on how to install the RTDIP SDK. Get started by en ## Prerequisites + + ### Python There are a few things to note before using the RTDIP SDK. The following prerequisites will need to be installed on your local machine. @@ -55,6 +57,8 @@ Installing the RTDIP can be done using a package installer, such as [Pip](https: micromamba self-update + + ### ODBC To use pyodbc or turbodbc python libraries, ensure it is installed as per the below and the ODBC driver is installed as per these [instructions](https://docs.microsoft.com/en-us/azure/databricks/integrations/bi/jdbc-odbc-bi#download-the-odbc-driver). @@ -87,7 +91,7 @@ To use RTDIP Pipelines components in your own environment that leverages [pyspar - defaults dependencies: - python==3.11 - - pip==23.0.1 + - pip - openjdk==11.0.15 - pip: - rtdip-sdk @@ -108,6 +112,8 @@ To use RTDIP Pipelines components in your own environment that leverages [pyspar ## Installing the RTDIP SDK + + RTDIP SDK is a PyPi package that can be found [here](https://pypi.org/project/rtdip-sdk/). On this page you can find the **project description**, **release history**, **statistics**, **project links** and **maintainers**. Features of the SDK can be installed using different extras statements when installing the **rtdip-sdk** package: @@ -128,7 +134,7 @@ Features of the SDK can be installed using different extras statements when inst pip install "rtdip-sdk[pipelines,pyspark]" !!! note "Java" - Ensure that Java is installed prior to installing the rtdip-sdk with the **[pipelines,pyspark]**. See [here](#java) for more information. + Ensure that Java is installed prior to installing the rtdip-sdk with the **[pipelines,pyspark]**. See [here](https://www.rtdip.io/getting-started/installation/#java) for more information. The following provides examples of how to install the RTDIP SDK package with Pip, Conda or Micromamba. Please note the section above to update any extra packages to be installed as part of the RTDIP SDK. @@ -153,7 +159,7 @@ The following provides examples of how to install the RTDIP SDK package with Pip - defaults dependencies: - python==3.11 - - pip==23.0.1 + - pip - pip: - rtdip-sdk ``` @@ -177,7 +183,7 @@ The following provides examples of how to install the RTDIP SDK package with Pip - defaults dependencies: - python==3.11 - - pip==23.0.1 + - pip - pip: - rtdip-sdk ``` @@ -190,6 +196,9 @@ The following provides examples of how to install the RTDIP SDK package with Pip micromamba update -f environment.yml + + + ## Next steps Once the installation is complete you can learn how to use the SDK [here.](../sdk/overview.md) diff --git a/docs/sdk/authentication/azure.md b/docs/sdk/authentication/azure.md index 8497e8ace..231587a1b 100644 --- a/docs/sdk/authentication/azure.md +++ b/docs/sdk/authentication/azure.md @@ -14,10 +14,12 @@ The RTDIP SDK includes several Azure AD authentication methods to cater to the p ## Authentication -The following section describes authentication using [Azure Active Directory.](../code-reference/authentication/azure.md). + + +The following section describes authentication using [Azure Active Directory.](https://www.rtdip.io/sdk/code-reference/authentication/azure/). !!! note "Note" - If you are using the SDK directly in Databricks please note that DefaultAuth will not work.
+ If you are using the SDK directly in Databricks please note that DefaultAuth will not work. 1\. Import **rtdip-sdk** authentication methods with the following: @@ -43,5 +45,8 @@ Once authenticated, it is possible to retrieve tokens for specific Azure Resourc === "Databricks" access_token = DefaultAzureCredential.get_token("2ff814a6-3304-4ab8-85cb-cd0e6f879c1d/.default").token + + !!! note "Note" - RTDIP are continuously adding more to this list so check back regularly!
\ No newline at end of file + RTDIP are continuously adding more to this list so check back regularly! + diff --git a/docs/sdk/authentication/databricks.md b/docs/sdk/authentication/databricks.md index 5f589b2c7..2c786d808 100644 --- a/docs/sdk/authentication/databricks.md +++ b/docs/sdk/authentication/databricks.md @@ -1,5 +1,7 @@ # Databricks + + Databricks supports authentication using Personal Access Tokens (PAT) and information about this authentication method is available [here.](https://docs.databricks.com/dev-tools/api/latest/authentication.html) ## Authentication @@ -22,4 +24,6 @@ access_token = "dbapi......." connection = DatabricksSQLConnection(server_hostname, http_path, access_token) ``` -Replace **server_hostname**, **http_path** with your own information and specify your Databricks PAT token for the **access_token**. \ No newline at end of file +Replace **server_hostname**, **http_path** with your own information and specify your Databricks PAT token for the **access_token**. + + \ No newline at end of file diff --git a/docs/sdk/code-reference/query/functions/metadata.md b/docs/sdk/code-reference/query/functions/metadata.md index 2d82dd74a..74e0454b3 100644 --- a/docs/sdk/code-reference/query/functions/metadata.md +++ b/docs/sdk/code-reference/query/functions/metadata.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Metadata/metadata.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Metadata/metadata.py" ``` This example is using [```DefaultAuth()```](../../authentication/azure.md) and [```DatabricksSQLConnection()```](../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/circular-average.md b/docs/sdk/code-reference/query/functions/time_series/circular-average.md index af453cc29..64ad9f050 100644 --- a/docs/sdk/code-reference/query/functions/time_series/circular-average.md +++ b/docs/sdk/code-reference/query/functions/time_series/circular-average.md @@ -3,7 +3,7 @@ ## Example ```python - --8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Circular-Average/circular_average.py" + --8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Circular-Average/circular_average.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/circular-standard-deviation.md b/docs/sdk/code-reference/query/functions/time_series/circular-standard-deviation.md index eaefc5754..f5de6a2a6 100644 --- a/docs/sdk/code-reference/query/functions/time_series/circular-standard-deviation.md +++ b/docs/sdk/code-reference/query/functions/time_series/circular-standard-deviation.md @@ -3,7 +3,7 @@ ## Example ```python - --8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Circular-Standard-Deviation/circular_standard_deviation.py" + --8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Circular-Standard-Deviation/circular_standard_deviation.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/interpolate.md b/docs/sdk/code-reference/query/functions/time_series/interpolate.md index 7a2289c61..ac5d70ebf 100644 --- a/docs/sdk/code-reference/query/functions/time_series/interpolate.md +++ b/docs/sdk/code-reference/query/functions/time_series/interpolate.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Interpolate/interpolate.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Interpolate/interpolate.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/interpolation-at-time.md b/docs/sdk/code-reference/query/functions/time_series/interpolation-at-time.md index a92a5f90f..19b4c2b1c 100644 --- a/docs/sdk/code-reference/query/functions/time_series/interpolation-at-time.md +++ b/docs/sdk/code-reference/query/functions/time_series/interpolation-at-time.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Interpolation-at-Time/interpolation_at_time.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Interpolation-at-Time/interpolation_at_time.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/latest.md b/docs/sdk/code-reference/query/functions/time_series/latest.md index 1690c30b7..09c2ab7a5 100644 --- a/docs/sdk/code-reference/query/functions/time_series/latest.md +++ b/docs/sdk/code-reference/query/functions/time_series/latest.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Latest/latest.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Latest/latest.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/plot.md b/docs/sdk/code-reference/query/functions/time_series/plot.md index 9d4dd190f..80271b651 100644 --- a/docs/sdk/code-reference/query/functions/time_series/plot.md +++ b/docs/sdk/code-reference/query/functions/time_series/plot.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Plot/plot.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Plot/plot.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/raw.md b/docs/sdk/code-reference/query/functions/time_series/raw.md index 4d260a90a..2108b398d 100644 --- a/docs/sdk/code-reference/query/functions/time_series/raw.md +++ b/docs/sdk/code-reference/query/functions/time_series/raw.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Raw/raw.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Raw/raw.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/resample.md b/docs/sdk/code-reference/query/functions/time_series/resample.md index ed247c6a4..3d0fec3e9 100644 --- a/docs/sdk/code-reference/query/functions/time_series/resample.md +++ b/docs/sdk/code-reference/query/functions/time_series/resample.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Resample/resample.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Resample/resample.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/summary.md b/docs/sdk/code-reference/query/functions/time_series/summary.md index 2929faaf1..74ad09d07 100644 --- a/docs/sdk/code-reference/query/functions/time_series/summary.md +++ b/docs/sdk/code-reference/query/functions/time_series/summary.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Summary/summary.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Summary/summary.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/time_series/time-weighted-average.md b/docs/sdk/code-reference/query/functions/time_series/time-weighted-average.md index 0fca0f5dc..0788d260c 100644 --- a/docs/sdk/code-reference/query/functions/time_series/time-weighted-average.md +++ b/docs/sdk/code-reference/query/functions/time_series/time-weighted-average.md @@ -3,7 +3,7 @@ ## Example ```python ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Time-Weighted-Average/time_weighted_average.py" +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Time-Weighted-Average/time_weighted_average.py" ``` This example is using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/weather/latest.md b/docs/sdk/code-reference/query/functions/weather/latest.md index 01532d537..ba1bda91b 100644 --- a/docs/sdk/code-reference/query/functions/weather/latest.md +++ b/docs/sdk/code-reference/query/functions/weather/latest.md @@ -4,55 +4,13 @@ ## Example get_point ```python -from rtdip_sdk.authentication.azure import DefaultAuth -from rtdip_sdk.queries.weather.latest import get_point -from rtdip_sdk.connectors import DatabricksSQLConnection - -auth = DefaultAuth().authenticate() -token = auth.get_token("2ff814a6-3304-4ab8-85cb-cd0e6f879c1d/.default").token -connection = DatabricksSQLConnection("{server_hostname}", "{http_path}", token) - -params = { - "forecast": "mock_forecast", - "forecast_type": "mock_weather", - "region": "mock_region", - "data_security_level": "mock_security", - "data_type": "mock_data_type", - "lat": 1.1, - "lon": 1.1, -} - -x = get_point(connection, params) - -print(x) +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Latest-Point/latest_point.py" ``` ## Example get_grid ```python -from rtdip_sdk.authentication.azure import DefaultAuth -from rtdip_sdk.queries.weather.latest import get_point -from rtdip_sdk.connectors import DatabricksSQLConnection - -auth = DefaultAuth().authenticate() -token = auth.get_token("2ff814a6-3304-4ab8-85cb-cd0e6f879c1d/.default").token -connection = DatabricksSQLConnection("{server_hostname}", "{http_path}", token) - -params = { - "forecast": "mock_forecast", - "forecast_type": "mock_weather", - "region": "mock_region", - "data_security_level": "mock_security", - "data_type": "mock_data_type", - "min_lat": 36, - "max_lat": 38, - "min_lon": -109.1, - "max_lon": -107.1, -} - -x = get_grid(connection, params) - -print(x) +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Latest-Grid/latest_grid.py" ``` These examples are using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/code-reference/query/functions/weather/raw.md b/docs/sdk/code-reference/query/functions/weather/raw.md index bdbaa0f73..b90c1e3db 100644 --- a/docs/sdk/code-reference/query/functions/weather/raw.md +++ b/docs/sdk/code-reference/query/functions/weather/raw.md @@ -4,67 +4,13 @@ ## Example get_point ```python -from rtdip_sdk.authentication.azure import DefaultAuth -from rtdip_sdk.queries.weather.raw import get_point -from rtdip_sdk.connectors import DatabricksSQLConnection - -auth = DefaultAuth().authenticate() -token = auth.get_token("2ff814a6-3304-4ab8-85cb-cd0e6f879c1d/.default").token -connection = DatabricksSQLConnection("{server_hostname}", "{http_path}", token) - -params = { - "forecast": "mock_forecast", - "forecast_type": "mock_weather", - "region": "mock_region", - "data_security_level": "mock_security", - "data_type": "mock_data_type", - "lat": 1.1, - "lon": 1.1, - "start_date": "2020-01-01", - "end_date": "2020-01-02", - "forecast_run_start_date": "2020-01-01", - "forecast_run_end_date": "2020-01-02", - "timestamp_column": "EventTime", - "forecast_run_timestamp_column": "EnqueuedTime", -} - -x = get_point(connection, params) - -print(x) +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Raw-Point/raw_point.py" ``` ## Example get_grid ```python -from rtdip_sdk.authentication.azure import DefaultAuth -from rtdip_sdk.queries.weather.raw import get_grid -from rtdip_sdk.connectors import DatabricksSQLConnection - -auth = DefaultAuth().authenticate() -token = auth.get_token("2ff814a6-3304-4ab8-85cb-cd0e6f879c1d/.default").token -connection = DatabricksSQLConnection("{server_hostname}", "{http_path}", token) - -params = { - "forecast": "mock_forecast", - "forecast_type": "mock_weather", - "region": "mock_region", - "data_security_level": "mock_security", - "data_type": "mock_data_type", - "min_lat": 36, - "max_lat": 38, - "min_lon": -109.1, - "max_lon": -107.1, - "start_date": "2020-01-01", - "end_date": "2020-01-02", - "forecast_run_start_date": "2020-01-01", - "forecast_run_end_date": "2020-01-02", - "timestamp_column": "EventTime", - "forecast_run_timestamp_column": "EnqueuedTime", -} - -x = get_grid(connection, params) - -print(x) +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Raw-Grid/raw_grid.py" ``` These examples are using [```DefaultAuth()```](../../../authentication/azure.md) and [```DatabricksSQLConnection()```](../../connectors/db-sql-connector.md) to authenticate and connect. You can find other ways to authenticate [here](../../../authentication/azure.md). The alternative built in connection methods are either by [```PYODBCSQLConnection()```](../../connectors/pyodbc-sql-connector.md), [```TURBODBCSQLConnection()```](../../connectors/turbodbc-sql-connector.md) or [```SparkConnection()```](../../connectors/spark-connector.md). diff --git a/docs/sdk/examples/query/Circular-Average.md b/docs/sdk/examples/query/Circular-Average.md index ed68e54d3..716e73696 100644 --- a/docs/sdk/examples/query/Circular-Average.md +++ b/docs/sdk/examples/query/Circular-Average.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Circular-Average/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Circular-Average/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Circular-Standard-Deviation.md b/docs/sdk/examples/query/Circular-Standard-Deviation.md index c54d2b4c0..efc7effb2 100644 --- a/docs/sdk/examples/query/Circular-Standard-Deviation.md +++ b/docs/sdk/examples/query/Circular-Standard-Deviation.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Circular-Standard-Deviation/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Circular-Standard-Deviation/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Interpolate.md b/docs/sdk/examples/query/Interpolate.md index 55be9e265..e236d85bd 100644 --- a/docs/sdk/examples/query/Interpolate.md +++ b/docs/sdk/examples/query/Interpolate.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Interpolate/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Interpolate/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Interpolation-at-Time.md b/docs/sdk/examples/query/Interpolation-at-Time.md index 0a37eabe4..78d29a302 100644 --- a/docs/sdk/examples/query/Interpolation-at-Time.md +++ b/docs/sdk/examples/query/Interpolation-at-Time.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Interpolation-at-Time/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Interpolation-at-Time/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Metadata.md b/docs/sdk/examples/query/Metadata.md index 0cdc7f836..467d9a78c 100644 --- a/docs/sdk/examples/query/Metadata.md +++ b/docs/sdk/examples/query/Metadata.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Metadata/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Metadata/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Plot.md b/docs/sdk/examples/query/Plot.md index a87aaa508..5779bf46e 100644 --- a/docs/sdk/examples/query/Plot.md +++ b/docs/sdk/examples/query/Plot.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Plot/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Plot/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Raw.md b/docs/sdk/examples/query/Raw.md index 522408f98..2d7a4c27e 100644 --- a/docs/sdk/examples/query/Raw.md +++ b/docs/sdk/examples/query/Raw.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Raw/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Raw/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Resample.md b/docs/sdk/examples/query/Resample.md index d90ae710f..04fcf55b1 100644 --- a/docs/sdk/examples/query/Resample.md +++ b/docs/sdk/examples/query/Resample.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Resample/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Resample/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Summary.md b/docs/sdk/examples/query/Summary.md index 8af63d2c0..00e65fc34 100644 --- a/docs/sdk/examples/query/Summary.md +++ b/docs/sdk/examples/query/Summary.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Summary/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Summary/README.md" \ No newline at end of file diff --git a/docs/sdk/examples/query/Time-Weighted-Average.md b/docs/sdk/examples/query/Time-Weighted-Average.md index ed1d44f7c..225c959fa 100644 --- a/docs/sdk/examples/query/Time-Weighted-Average.md +++ b/docs/sdk/examples/query/Time-Weighted-Average.md @@ -1 +1 @@ ---8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/Time-Weighted-Average/README.md" \ No newline at end of file +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Time-Weighted-Average/README.md" \ No newline at end of file diff --git a/docs/sdk/queries/connectors.md b/docs/sdk/queries/connectors.md index 7e34bed52..be8c99ba5 100644 --- a/docs/sdk/queries/connectors.md +++ b/docs/sdk/queries/connectors.md @@ -6,9 +6,11 @@ RTDIP SDK provides functionality to connect to and query its data using connecto ### Databricks SQL Connector + + Enables connectivity to Databricks using the [Databricks SQL Connector](https://pypi.org/project/databricks-sql-connector/) which does not require any ODBC installation. -For more information refer to this [documentation](https://docs.databricks.com/dev-tools/python-sql-connector.html) and for the specific implementation within the RTDIP SDK, refer to this [link](../code-reference/query/connectors//db-sql-connector.md) +For more information refer to this [documentation](https://docs.databricks.com/dev-tools/python-sql-connector.html) and for the specific implementation within the RTDIP SDK, refer to this [link](https://www.rtdip.io/sdk/code-reference/query/connectors/db-sql-connector/). ```python from rtdip_sdk.connectors import DatabricksSQLConnection @@ -22,6 +24,10 @@ connection = DatabricksSQLConnection(server_hostname, http_path, access_token) Replace **server_hostname**, **http_path** and **access_token** with your own information. + + + + ### PYODBC SQL Connector [PYDOBC](https://pypi.org/project/pyodbc/) is a popular python package for querying data using ODBC. Refer to their [documentation](https://github.com/mkleehammer/pyodbc/wiki) for more information about pyodbc, how to install it and how you can leverage it in your code. @@ -29,7 +35,7 @@ Replace **server_hostname**, **http_path** and **access_token** with your own in !!! Warning The RTDIP SDK does not specify `pyodbc` as one of its package dependencies. It will need to be installed into your environment separately. -View information about how pyodbc is implemented in the RTDIP SDK [here.](../code-reference/query/connectors/pyodbc-sql-connector.md) +View information about how pyodbc is implemented in the RTDIP SDK [here.](https://www.rtdip.io/sdk/code-reference/query/connectors/pyodbc-sql-connector/) ```python from rtdip_sdk.connectors import PYODBCSQLConnection @@ -46,7 +52,7 @@ Replace **server_hostname**, **http_path** and **access_token** with your own in ### TURBODBC SQL Connector -Turbodbc is a powerful python ODBC package that has advanced options for querying performance. Find out more about installing it on your operation system and what Turbodbc can do [here](https://turbodbc.readthedocs.io/en/latest/) and refer to this [documentation](../code-reference/query/connectors/turbodbc-sql-connector.md) for more information about how it is implemented in the RTDIP SDK. +Turbodbc is a powerful python ODBC package that has advanced options for querying performance. Find out more about installing it on your operation system and what Turbodbc can do [here](https://turbodbc.readthedocs.io/en/latest/) and refer to this [documentation](https://www.rtdip.io/sdk/code-reference/query/connectors/turbodbc-sql-connector/) for more information about how it is implemented in the RTDIP SDK. !!! Warning The RTDIP SDK does not specify `turbodbc` as one of its package dependencies. It will need to be installed into your environment separately. @@ -63,10 +69,14 @@ connection = TURBODBCSQLConnection(server_hostname, http_path, access_token) Replace **server_hostname**, **http_path** and **access_token** with your own information. + + ## Spark ### Spark Connector + + The Spark Connector enables querying of data using a Spark Session. This is useful for querying local instances of Spark or Delta. However, the most useful application of this connector is to leverage [Spark Connect](https://spark.apache.org/docs/latest/spark-connect-overview.html) to enable connecting to a remote Spark Cluster to provide the compute for the query being run from a local machine. ```python @@ -79,7 +89,9 @@ spark_remote = "sc://{}:443;token={}".format(spark_server, access_token) connection = SparkConnection(spark_remote=spark_remote) ``` -Replace the **access_token** with your own information. +Replace the **access_token** with your own authentiction token. + + ## LLMs diff --git a/docs/sdk/queries/functions.md b/docs/sdk/queries/functions.md index c01072c2d..c2af80791 100644 --- a/docs/sdk/queries/functions.md +++ b/docs/sdk/queries/functions.md @@ -5,87 +5,82 @@ The RTDIP SDK enables users to perform complex queries, including aggregation on ## Time Series Events ### Raw - -[Raw](../code-reference/query/functions/time_series/raw.md) facilitates performing raw extracts of time series data, typically filtered by a Tag Name or Device Name and an event time. - + +[Raw](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/raw/) facilitates performing raw extracts of time series data, typically filtered by a Tag Name or Device Name and an event time. + ### Latest - -[Latest](../code-reference/query/functions/time_series/latest.md) queries provides the latest event values. The RTDIP SDK requires the following parameters to retrieve the latest event values: + +[Latest](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/latest/) queries provides the latest event values. The RTDIP SDK requires the following parameters to retrieve the latest event values: - TagNames - A list of tag names - + ### Resample + +[Resample](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/resample/) enables changing the frequency of time series observations. This is achieved by providing the following parameters: -[Resample](../code-reference/query/functions/time_series/resample.md) enables changing the frequency of time series observations. This is achieved by providing the following parameters: - -- Sample Rate - (deprecated) -- Sample Unit - (deprecated) - Time Interval Rate - The time interval rate - Time Interval Unit - The time interval unit (second, minute, day, hour) - Aggregation Method - Aggregations including first, last, avg, min, max - -!!! note "Note" - Sample Rate and Sample Unit parameters are deprecated and will be removed in v1.0.0. Please use Time Interval Rate and Time Interval Unit instead.
+ ### Plot + +[Plot](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/plot/) enables changing the frequency of time series observations and performing Average, Min, Max, First, Last and StdDev aggregations. This is achieved by providing the following parameters: -[Plot](../code-reference/query/functions/time_series/plot.md) enables changing the frequency of time series observations and performing Average, Min, Max, First, Last and StdDev aggregations. This is achieved by providing the following parameters: - -- Sample Rate - (deprecated) -- Sample Unit - (deprecated) - Time Interval Rate - The time interval rate - Time Interval Unit - The time interval unit (second, minute, day, hour) - -!!! note "Note" - Sample Rate and Sample Unit parameters are deprecated and will be removed in v1.0.0. Please use Time Interval Rate and Time Interval Unit instead.
+ ### Interpolate - -[Interpolate](../code-reference/query/functions/time_series/interpolate.md) - takes [resampling](#resample) one step further to estimate the values of unknown data points that fall between existing, known data points. In addition to the resampling parameters, interpolation also requires: + +[Interpolate](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/plot/) - takes [resampling](#resample) one step further to estimate the values of unknown data points that fall between existing, known data points. In addition to the resampling parameters, interpolation also requires: - Interpolation Method - Forward Fill, Backward Fill or Linear - + ### Interpolation at Time - -[Interpolation at Time](../code-reference/query/functions/time_series/interpolation-at-time.md) - works out the linear interpolation at a specific time based on the points before and after. This is achieved by providing the following parameter: + +[Interpolation at Time](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/interpolate-at-time/) - works out the linear interpolation at a specific time based on the points before and after. This is achieved by providing the following parameter: - Timestamps - A list of timestamp or timestamps - + ### Time Weighted Averages + +[Time Weighted Averages](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/time-weighted-average/) provide an unbiased average when working with irregularly sampled data. The RTDIP SDK requires the following parameters to perform time weighted average queries: -[Time Weighted Averages](../code-reference/query/functions/time_series/time-weighted-average.md) provide an unbiased average when working with irregularly sampled data. The RTDIP SDK requires the following parameters to perform time weighted average queries: - -- Window Size Mins - (deprecated) - Time Interval Rate - The time interval rate - Time Interval Unit - The time interval unit (second, minute, day, hour) - Window Length - Adds a longer window time for the start or end of specified date to cater for edge cases - Step - Data points with step "enabled" or "disabled". The options for step are "true", "false" or "metadata" as string types. For "metadata", the query requires that the TagName has a step column configured correctly in the meta data table - -!!! note "Note" - Window Size Mins is deprecated and will be removed in v1.0.0. Please use Time Interval Rate and Time Interval Unit instead.
+ ### Circular Averages - -[Circular Averages](../code-reference/query/functions/time_series/circular-average.md) computes the circular average for samples in a range. The RTDIP SDK requires the following parameters to perform circular average queries: + +[Circular Averages](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/circular-average/) computes the circular average for samples in a range. The RTDIP SDK requires the following parameters to perform circular average queries: - Time Interval Rate - The time interval rate - Time Interval Unit - The time interval unit (second, minute, day, hour) - Lower Bound - The lower boundary for the sample range - Upper Bound - The upper boundary for the sample range - + ### Circular Standard Deviations - -[Circular Standard Deviations](..//code-reference/query/functions/time_series/circular-standard-deviation.md) computes the circular standard deviations for samples assumed to be in the range. The RTDIP SDK requires the following parameters to perform circular average queries: + +[Circular Standard Deviations](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/circular-standard-deviation/) computes the circular standard deviations for samples assumed to be in the range. The RTDIP SDK requires the following parameters to perform circular average queries: - Time Interval Rate - The time interval rate - Time Interval Unit - The time interval unit (second, minute, day, hour) - Lower Bound - The lower boundary for the sample range - Upper Bound - The upper boundary for the sample range + +### Summary + +[Summary](https://www.rtdip.io/sdk/code-reference/query/functions/time_series/summary/) computes a summary of statistics (Avg, Min, Max, Count, StDev, Sum, Variance). + ## Time Series Metadata ### Metadata -[Metadata](../code-reference/query/functions/metadata.md) queries provide contextual information for time series measurements and include information such as names, descriptions and units of measure. - + +[Metadata](https://www.rtdip.io/sdk/code-reference/query/functions/metadata/) queries provide contextual information for time series measurements and include information such as names, descriptions and units of measure. + !!! note "Note" RTDIP are continuously adding more to this list so check back regularly.
diff --git a/docs/university/essentials/rtdip/overview.md b/docs/university/essentials/rtdip/overview.md new file mode 100644 index 000000000..a5209da16 --- /dev/null +++ b/docs/university/essentials/rtdip/overview.md @@ -0,0 +1,9 @@ +# Introduction to RTDIP + +Welcome to the RTDIP Essentials training course. This section introduces you to the Real Time Data Ingestion Platform, a scalable solution for ingesting and processing data from a variety of sources. + +## Course Progress + +- [X] Overview +- [ ] SDK +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/authentication/azure.md b/docs/university/essentials/sdk/authentication/azure.md new file mode 100644 index 000000000..332305b8f --- /dev/null +++ b/docs/university/essentials/sdk/authentication/azure.md @@ -0,0 +1,13 @@ +--8<-- "sdk/authentication/azure.md:azuread" + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [ ] Authentication + + [X] Azure Active Directory + + [ ] Databricks + + [ ] Exercise + * [ ] Connectors + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/authentication/databricks.md b/docs/university/essentials/sdk/authentication/databricks.md new file mode 100644 index 000000000..8ec8c7a15 --- /dev/null +++ b/docs/university/essentials/sdk/authentication/databricks.md @@ -0,0 +1,13 @@ +--8<-- "sdk/authentication/databricks.md:databrickspat" + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [ ] Authentication + + [X] Azure Active Directory + + [X] Databricks + + [ ] Exercise + * [ ] Connectors + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/authentication/exercise.md b/docs/university/essentials/sdk/authentication/exercise.md new file mode 100644 index 000000000..13f0f4f7e --- /dev/null +++ b/docs/university/essentials/sdk/authentication/exercise.md @@ -0,0 +1,26 @@ + + + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [ ] Connectors + * [ ] Queries +- [ ] APIs + diff --git a/docs/university/essentials/sdk/connectors/databricks-sql-connector.md b/docs/university/essentials/sdk/connectors/databricks-sql-connector.md new file mode 100644 index 000000000..76e2b71ce --- /dev/null +++ b/docs/university/essentials/sdk/connectors/databricks-sql-connector.md @@ -0,0 +1,14 @@ +--8<-- "sdk/queries/connectors.md:databrickssql" + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [ ] Connectors + + [X] Databricks SQL + + [ ] ODBC + + [ ] Spark + + [ ] Exercise + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/connectors/exercise.md b/docs/university/essentials/sdk/connectors/exercise.md new file mode 100644 index 000000000..22899dd48 --- /dev/null +++ b/docs/university/essentials/sdk/connectors/exercise.md @@ -0,0 +1,8 @@ +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [X] Connectors + * [ ] Queries +- [ ] APIs diff --git a/docs/university/essentials/sdk/connectors/odbc-connectors.md b/docs/university/essentials/sdk/connectors/odbc-connectors.md new file mode 100644 index 000000000..126089c90 --- /dev/null +++ b/docs/university/essentials/sdk/connectors/odbc-connectors.md @@ -0,0 +1,14 @@ +--8<-- "sdk/queries/connectors.md:odbcconnectors" + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [ ] Connectors + + [X] Databricks SQL + + [X] ODBC + + [ ] Spark + + [ ] Exercise + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/connectors/spark-connector.md b/docs/university/essentials/sdk/connectors/spark-connector.md new file mode 100644 index 000000000..82a33e011 --- /dev/null +++ b/docs/university/essentials/sdk/connectors/spark-connector.md @@ -0,0 +1,14 @@ +--8<-- "sdk/queries/connectors.md:sparkconnector" + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [ ] Connectors + + [X] Databricks SQL + + [X] ODBC + + [X] Spark + + [ ] Exercise + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/getting-started/exercise.md b/docs/university/essentials/sdk/getting-started/exercise.md new file mode 100644 index 000000000..ea1f02196 --- /dev/null +++ b/docs/university/essentials/sdk/getting-started/exercise.md @@ -0,0 +1,24 @@ + + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [ ] Authentication + * [ ] Connectors + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/getting-started/installation.md b/docs/university/essentials/sdk/getting-started/installation.md new file mode 100644 index 000000000..7fc9c1e19 --- /dev/null +++ b/docs/university/essentials/sdk/getting-started/installation.md @@ -0,0 +1,14 @@ +--8<-- "getting-started/installation.md:installation" + +## Course Progress +- [X] Overview +- [ ] SDK + * [ ] Getting Started + + [X] Introduction + + [X] Prerequisites + + [X] Installation + + [ ] Exercise + * [ ] Authentication + * [ ] Connectors + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/getting-started/introduction.md b/docs/university/essentials/sdk/getting-started/introduction.md new file mode 100644 index 000000000..e8a2c4538 --- /dev/null +++ b/docs/university/essentials/sdk/getting-started/introduction.md @@ -0,0 +1,15 @@ +# What is the RTDIP SDK? + + +## Course Progress +- [X] Overview +- [ ] SDK + * [ ] Getting Started + + [X] Introduction + + [ ] Prerequisites + + [ ] Installation + + [ ] Exercise + * [ ] Authentication + * [ ] Connectors + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/getting-started/prerequisites.md b/docs/university/essentials/sdk/getting-started/prerequisites.md new file mode 100644 index 000000000..6257b1f1d --- /dev/null +++ b/docs/university/essentials/sdk/getting-started/prerequisites.md @@ -0,0 +1,14 @@ +--8<-- "getting-started/installation.md:prerequisites" + +## Course Progress +- [X] Overview +- [ ] SDK + * [ ] Getting Started + + [X] Introduction + + [X] Prerequisites + + [ ] Installation + + [ ] Exercise + * [ ] Authentication + * [ ] Connectors + * [ ] Queries +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/queries/exercise.md b/docs/university/essentials/sdk/queries/exercise.md new file mode 100644 index 000000000..4f8324fbf --- /dev/null +++ b/docs/university/essentials/sdk/queries/exercise.md @@ -0,0 +1,9 @@ +## Course Progress +- [X] Overview +- [X] SDK + * [X] Getting Started + * [X] Authentication + * [X] Connectors + * [X] Queries +- [ ] APIs + diff --git a/docs/university/essentials/sdk/queries/sql.md b/docs/university/essentials/sdk/queries/sql.md new file mode 100644 index 000000000..275a42e29 --- /dev/null +++ b/docs/university/essentials/sdk/queries/sql.md @@ -0,0 +1,17 @@ +## Execute +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/SQLQueryBuilder/get.py" +``` + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [X] Connectors + * [ ] Queries + + [X] Time Series + + [X] SQL + + [ ] Weather + + [ ] Exercise +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/queries/timeseries.md b/docs/university/essentials/sdk/queries/timeseries.md new file mode 100644 index 000000000..5e14c5e28 --- /dev/null +++ b/docs/university/essentials/sdk/queries/timeseries.md @@ -0,0 +1,103 @@ + +## Raw + +--8<-- "sdk/queries/functions.md:raw" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Raw/raw.py" +``` + +## Resample + +--8<-- "sdk/queries/functions.md:resample" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Resample/resample.py" +``` + +## Interpolate + +--8<-- "sdk/queries/functions.md:interpolate" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Interpolate/interpolate.py" +``` + +## Interpolate At Time + +--8<-- "sdk/queries/functions.md:interpolateattime" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Interpolation-at-Time/interpolation_at_time.py" +``` + +## Time Weighted Average + +--8<-- "sdk/queries/functions.md:timeweightedaverage" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Time-Weighted-Average/time_weighted_average.py" +``` + + +## Circular Averages + +--8<-- "sdk/queries/functions.md:circularaverages" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Circular-Average/circular_average.py" +``` + +## Circular Standard Deviation + +--8<-- "sdk/queries/functions.md:circularstandarddeviation" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Circular-Standard-Deviation/circular_standard_deviation.py" +``` + +## Latest + +--8<-- "sdk/queries/functions.md:latest" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Latest/latest.py" +``` + +## Plot + +--8<-- "sdk/queries/functions.md:plot" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Plot/plot.py" +``` + +## Summary + +--8<-- "sdk/queries/functions.md:summary" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Summary/summary.py" +``` + + +## Metadata + +--8<-- "sdk/queries/functions.md:metadata" + +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/TimeSeriesQueryBuilder/Metadata/metadata.py" +``` + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [X] Connectors + * [ ] Queries + + [X] Time Series + + [ ] SQL + + [ ] Weather + + [ ] Exercise +- [ ] APIs \ No newline at end of file diff --git a/docs/university/essentials/sdk/queries/weather.md b/docs/university/essentials/sdk/queries/weather.md new file mode 100644 index 000000000..c2bcf8604 --- /dev/null +++ b/docs/university/essentials/sdk/queries/weather.md @@ -0,0 +1,32 @@ +## Raw Point +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Raw-Point/raw_point.py" +``` + +## Raw Grid +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Raw-Grid/raw_grid.py" +``` + +## Latest Point +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Latest-Point/latest_point.py" +``` + +## Latest Grid +```python +--8<-- "https://raw.githubusercontent.com/rtdip/samples/main/queries/WeatherQueryBuilder/Latest-Grid/latest_grid.py" +``` + +## Course Progress +- [X] Overview +- [ ] SDK + * [X] Getting Started + * [X] Authentication + * [X] Connectors + * [ ] Queries + + [X] Time Series + + [X] SQL + + [X] Weather + + [ ] Exercise +- [ ] APIs \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index cee646a19..9da19477d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -116,7 +116,13 @@ markdown_extensions: emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_generator: !!python/name:material.extensions.emoji.to_svg # Page tree - pymdownx.snippets: + base_path: + - docs + - src/sdk + check_paths: true url_download: true + - pymdownx.tasklist: + custom_checkbox: true nav: - Home: index.md @@ -321,4 +327,27 @@ nav: - Releases: - core: releases/core.md - Blog: - - blog/index.md \ No newline at end of file + - blog/index.md + - University: + - RTDIP Essentials: + - Overview: university/essentials/rtdip/overview.md + - SDK: + - Getting Started: + - Introduction: university/essentials/sdk/getting-started/introduction.md + - Prerequisites: university/essentials/sdk/getting-started/prerequisites.md + - Installation: university/essentials/sdk/getting-started/installation.md + - Exercise: university/essentials/sdk/getting-started/exercise.md + - Authentication: + - Azure Active Directory: university/essentials/sdk/authentication/azure.md + - Databricks: university/essentials/sdk/authentication/databricks.md + - Exercise: university/essentials/sdk/authentication/exercise.md + - Connectors: + - Databricks SQL: university/essentials/sdk/connectors/databricks-sql-connector.md + - ODBC: university/essentials/sdk/connectors/odbc-connectors.md + - Spark: university/essentials/sdk/connectors/spark-connector.md + - Exercise: university/essentials/sdk/connectors/exercise.md + - Queries: + - Time Series: university/essentials/sdk/queries/timeseries.md + - SQL: university/essentials/sdk/queries/sql.md + - Weather: university/essentials/sdk/queries/weather.md + - Exercise: university/essentials/sdk/queries/exercise.md diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py b/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py index cef0534c9..7e9fb3944 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/time_series_query_builder.py @@ -96,7 +96,7 @@ def source( TimeSeriesQueryBuilder() .connect(connection) .source( - source="{table_path}" + source="{tablename_or_path}" ) ) @@ -142,7 +142,7 @@ def raw( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .raw( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -150,7 +150,7 @@ def raw( ) ) - display(data) + print(data) ``` @@ -213,7 +213,7 @@ def resample( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .resample( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -224,7 +224,7 @@ def resample( ) ) - display(data) + print(data) ``` @@ -296,7 +296,7 @@ def plot( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .plot( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -306,7 +306,7 @@ def plot( ) ) - display(data) + print(data) ``` @@ -378,7 +378,7 @@ def interpolate( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .interpolate( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -390,7 +390,7 @@ def interpolate( ) ) - display(data) + print(data) ``` @@ -461,14 +461,14 @@ def interpolation_at_time( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .interpolation_at_time( tagname_filter=["{tag_name_1}", "{tag_name_2}"], timestamp_filter=["2023-01-01T09:30:00", "2023-01-02T12:00:00"], ) ) - display(data) + print(data) ``` @@ -538,7 +538,7 @@ def time_weighted_average( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .time_weighted_average( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -549,7 +549,7 @@ def time_weighted_average( ) ) - display(data) + print(data) ``` @@ -623,13 +623,13 @@ def metadata( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .metadata( tagname_filter=["{tag_name_1}", "{tag_name_2}"], ) ) - display(data) + print(data) ``` @@ -675,13 +675,13 @@ def latest( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .latest( tagname_filter=["{tag_name_1}", "{tag_name_2}"], ) ) - display(data) + print(data) ``` @@ -737,7 +737,7 @@ def circular_average( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .circular_average( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -749,7 +749,7 @@ def circular_average( ) ) - display(data) + print(data) ``` @@ -824,7 +824,7 @@ def circular_standard_deviation( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .circular_standard_deviation( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -836,7 +836,7 @@ def circular_standard_deviation( ) ) - display(data) + print(data) ``` @@ -908,7 +908,7 @@ def summary( data = ( TimeSeriesQueryBuilder() .connect(connection) - .source("{table_path}") + .source("{tablename_or_path}") .summary( tagname_filter=["{tag_name_1}", "{tag_name_2}"], start_date="2023-01-01", @@ -916,7 +916,7 @@ def summary( ) ) - display(data) + print(data) ``` From bc7bd90b1bfb6500726b4c2be577edbdda961b59 Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Wed, 31 Jul 2024 14:55:55 +0100 Subject: [PATCH 2/4] Interpolate At Time Update Signed-off-by: GBBBAS --- .../rtdip_sdk/queries/time_series/_time_series_query_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py b/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py index 29817372c..ac7f9b7da 100644 --- a/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py +++ b/src/sdk/python/rtdip_sdk/queries/time_series/_time_series_query_builder.py @@ -446,7 +446,7 @@ def _interpolation_at_time(parameters_dict: dict) -> str: "explode(array('{{ tag_names | join('\\', \\'') }}')) AS `{{ tagname_column }}`) " "{% endif %} " ", interpolation_events AS (SELECT coalesce(a.`{{ tagname_column }}`, b.`{{ tagname_column }}`) AS `{{ tagname_column }}`, coalesce(a.`{{ timestamp_column }}`, b.`{{ timestamp_column }}`) AS `{{ timestamp_column }}`, a.`{{ timestamp_column }}` AS `Requested_{{ timestamp_column }}`, b.`{{ timestamp_column }}` AS `Found_{{ timestamp_column }}`, b.`{{ status_column }}`, b.`{{ value_column }}` FROM date_array a FULL OUTER JOIN raw_events b ON a.`{{ timestamp_column }}` = b.`{{ timestamp_column }}` AND a.`{{ tagname_column }}` = b.`{{ tagname_column }}`) " - ", interpolation_calculations AS (SELECT *, lag(`{{ timestamp_column }}`) OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Previous_{{ timestamp_column }}`, lag(`{{ value_column }}`) OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Previous_{{ value_column }}`, lead(`{{ timestamp_column }}`) OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Next_{{ timestamp_column }}`, lead(`{{ value_column }}`) OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Next_{{ value_column }}`, " + ", interpolation_calculations AS (SELECT *, lag(`Found_{{ timestamp_column }}`) IGNORE NULLS OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Previous_{{ timestamp_column }}`, lag(`{{ value_column }}`) IGNORE NULLS OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Previous_{{ value_column }}`, lead(`Found_{{ timestamp_column }}`) IGNORE NULLS OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Next_{{ timestamp_column }}`, lead(`{{ value_column }}`) IGNORE NULLS OVER (PARTITION BY `{{ tagname_column }}` ORDER BY `{{ timestamp_column }}`) AS `Next_{{ value_column }}`, " "CASE WHEN `Requested_{{ timestamp_column }}` = `Found_{{ timestamp_column }}` THEN `{{ value_column }}` WHEN `Next_{{ timestamp_column }}` IS NULL THEN `Previous_{{ value_column }}` WHEN `Previous_{{ timestamp_column }}` IS NULL AND `Next_{{ timestamp_column }}` IS NULL THEN NULL " "ELSE `Previous_{{ value_column }}` + ((`Next_{{ value_column }}` - `Previous_{{ value_column }}`) * ((unix_timestamp(`{{ timestamp_column }}`) - unix_timestamp(`Previous_{{ timestamp_column }}`)) / (unix_timestamp(`Next_{{ timestamp_column }}`) - unix_timestamp(`Previous_{{ timestamp_column }}`)))) END AS `Interpolated_{{ value_column }}` FROM interpolation_events) " ",project AS (SELECT `{{ tagname_column }}`, `{{ timestamp_column }}`, `Interpolated_{{ value_column }}` AS `{{ value_column }}` FROM interpolation_calculations WHERE `{{ timestamp_column }}` IN ( " From 2853c10841f4e5eebdb9557f8d8699fd5a43ae34 Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Wed, 31 Jul 2024 15:01:17 +0100 Subject: [PATCH 3/4] Update Tests Signed-off-by: GBBBAS --- .../rtdip_sdk/queries/_test_utils/sdk_test_objects.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py b/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py index a0f9c886f..29c4fbe90 100644 --- a/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py +++ b/tests/sdk/python/rtdip_sdk/queries/_test_utils/sdk_test_objects.py @@ -63,10 +63,10 @@ TWA_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT `TagName`, from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-02T23:59:59+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) ,date_array AS (SELECT explode(sequence(from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000"), from_utc_timestamp(to_timestamp("2011-01-02T23:59:59+00:00"), "+0000"), INTERVAL \'15 minute\')) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) ,boundary_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) ,window_buckets AS (SELECT `EventTime` AS window_start, LEAD(`EventTime`) OVER (ORDER BY `EventTime`) AS window_end FROM (SELECT distinct `EventTime` FROM date_array) ) ,window_events AS (SELECT /*+ RANGE_JOIN(b, 900 ) */ b.`TagName`, b.`EventTime`, a.window_start AS `WindowEventTime`, b.`Status`, b.`Value` FROM boundary_events b LEFT OUTER JOIN window_buckets a ON a.window_start <= b.`EventTime` AND a.window_end > b.`EventTime`) ,fill_status AS (SELECT *, last_value(`Status`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Status`, CASE WHEN `Fill_Status` <> "Bad" THEN `Value` ELSE null END AS `Good_Value` FROM window_events) ,fill_value AS (SELECT *, last_value(`Good_Value`, true) OVER (PARTITION BY `TagName` ORDER BY `EventTime` ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS `Fill_Value` FROM fill_status) ,fill_step AS (SELECT *, false AS Step FROM fill_value) ,interpolate AS (SELECT *, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lag(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Previous_Fill_Value`, lead(`EventTime`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) AS `Next_EventTime`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN lead(`Fill_Value`) OVER ( PARTITION BY `TagName` ORDER BY `EventTime` ) ELSE NULL END AS `Next_Fill_Value`, CASE WHEN `Step` = false AND `Status` IS NULL AND `Value` IS NULL THEN `Previous_Fill_Value` + ( (`Next_Fill_Value` - `Previous_Fill_Value`) * ( ( unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`) ) / ( unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`) ) ) ) ELSE NULL END AS `Interpolated_Value`, coalesce(`Interpolated_Value`, `Fill_Value`) as `Event_Value` FROM fill_step ),twa_calculations AS (SELECT `TagName`, `EventTime`, `WindowEventTime`, `Step`, `Status`, `Value`, `Previous_EventTime`, `Previous_Fill_Value`, `Next_EventTime`, `Next_Fill_Value`, `Interpolated_Value`, `Fill_Status`, `Fill_Value`, `Event_Value`, lead(`Fill_Status`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Status` , CASE WHEN `Next_Status` <> "Bad" OR (`Fill_Status` <> "Bad" AND `Next_Status` = "Bad") THEN lead(`Event_Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) ELSE `Value` END AS `Next_Value_For_Status` , CASE WHEN `Fill_Status` <> "Bad" THEN `Next_Value_For_Status` ELSE 0 END AS `Next_Value` , CASE WHEN `Fill_Status` <> "Bad" AND `Next_Status` <> "Bad" THEN ((cast(`Next_EventTime` AS double) - cast(`EventTime` AS double)) / 60) WHEN `Fill_Status` <> "Bad" AND `Next_Status` = "Bad" THEN ((cast(`Next_EventTime` AS integer) - cast(`EventTime` AS double)) / 60) ELSE 0 END AS good_minutes , CASE WHEN Step == false THEN ((`Event_Value` + `Next_Value`) * 0.5) * good_minutes ELSE (`Event_Value` * good_minutes) END AS twa_value FROM interpolate) ,twa AS (SELECT `TagName`, `WindowEventTime` AS `EventTime`, sum(twa_value) / sum(good_minutes) AS `Value` from twa_calculations GROUP BY `TagName`, `WindowEventTime`) ,project AS (SELECT * FROM twa WHERE `EventTime` BETWEEN to_timestamp("2011-01-01T00:00:00") AND to_timestamp("2011-01-02T23:59:59")) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' # Interpolation at Time -IAT_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' -IAT_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) , date_array AS (SELECT DISTINCT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(`TagName`)) AS `TagName` FROM raw_events) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' -IAT_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' -IAT_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`EventTime`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' +IAT_MOCKED_QUERY = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' +IAT_MOCKED_QUERY_CHECK_TAGS = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND UPPER(`TagName`) IN (\'MOCKED-TAGNAME\') ) , date_array AS (SELECT DISTINCT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(`TagName`)) AS `TagName` FROM raw_events) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT * FROM project ORDER BY `TagName`, `EventTime` ' +IAT_MOCKED_QUERY_PIVOT = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) ,pivot AS (SELECT * FROM (SELECT `EventTime`, `Value`, `TagName` AS `TagName` FROM project) PIVOT (FIRST(`Value`) FOR `TagName` IN (\'mocked-TAGNAME\' AS `mocked-TAGNAME`))) SELECT * FROM pivot ORDER BY `EventTime` ' +IAT_MOCKED_QUERY_UOM = 'WITH raw_events AS (SELECT DISTINCT from_utc_timestamp(to_timestamp(date_format(`EventTime`, \'yyyy-MM-dd HH:mm:ss.SSS\')), "+0000") AS `EventTime`, `TagName`, `Status`, `Value` FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_events_mocked-data-type` WHERE to_date(`EventTime`) BETWEEN date_sub(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND date_add(to_date(to_timestamp("2011-01-01T00:00:00+00:00")), 1) AND `TagName` IN (\'mocked-TAGNAME\') ) , date_array AS (SELECT explode(array( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") )) AS `EventTime`, explode(array(\'mocked-TAGNAME\')) AS `TagName`) , interpolation_events AS (SELECT coalesce(a.`TagName`, b.`TagName`) AS `TagName`, coalesce(a.`EventTime`, b.`EventTime`) AS `EventTime`, a.`EventTime` AS `Requested_EventTime`, b.`EventTime` AS `Found_EventTime`, b.`Status`, b.`Value` FROM date_array a FULL OUTER JOIN raw_events b ON a.`EventTime` = b.`EventTime` AND a.`TagName` = b.`TagName`) , interpolation_calculations AS (SELECT *, lag(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_EventTime`, lag(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Previous_Value`, lead(`Found_EventTime`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_EventTime`, lead(`Value`) IGNORE NULLS OVER (PARTITION BY `TagName` ORDER BY `EventTime`) AS `Next_Value`, CASE WHEN `Requested_EventTime` = `Found_EventTime` THEN `Value` WHEN `Next_EventTime` IS NULL THEN `Previous_Value` WHEN `Previous_EventTime` IS NULL AND `Next_EventTime` IS NULL THEN NULL ELSE `Previous_Value` + ((`Next_Value` - `Previous_Value`) * ((unix_timestamp(`EventTime`) - unix_timestamp(`Previous_EventTime`)) / (unix_timestamp(`Next_EventTime`) - unix_timestamp(`Previous_EventTime`)))) END AS `Interpolated_Value` FROM interpolation_events) ,project AS (SELECT `TagName`, `EventTime`, `Interpolated_Value` AS `Value` FROM interpolation_calculations WHERE `EventTime` IN ( from_utc_timestamp(to_timestamp("2011-01-01T00:00:00+00:00"), "+0000") ) ) SELECT p.`EventTime`, p.`TagName`, p.`Value`, m.`UoM` FROM project p LEFT OUTER JOIN `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` m ON p.`TagName` = m.`TagName` ORDER BY `TagName`, `EventTime` ' # Metadata METADATA_MOCKED_QUERY = "SELECT * FROM `mocked-buiness-unit`.`sensors`.`mocked-asset_mocked-data-security-level_metadata` WHERE `TagName` IN ('mocked-TAGNAME') ORDER BY `TagName` " From cfaed7ace7993263b5e229ea7eab2afdc0d0972b Mon Sep 17 00:00:00 2001 From: GBBBAS Date: Wed, 31 Jul 2024 15:59:18 +0100 Subject: [PATCH 4/4] Remove Uni Signed-off-by: GBBBAS --- mkdocs.yml | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index 9da19477d..ef67bfe05 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -328,26 +328,26 @@ nav: - core: releases/core.md - Blog: - blog/index.md - - University: - - RTDIP Essentials: - - Overview: university/essentials/rtdip/overview.md - - SDK: - - Getting Started: - - Introduction: university/essentials/sdk/getting-started/introduction.md - - Prerequisites: university/essentials/sdk/getting-started/prerequisites.md - - Installation: university/essentials/sdk/getting-started/installation.md - - Exercise: university/essentials/sdk/getting-started/exercise.md - - Authentication: - - Azure Active Directory: university/essentials/sdk/authentication/azure.md - - Databricks: university/essentials/sdk/authentication/databricks.md - - Exercise: university/essentials/sdk/authentication/exercise.md - - Connectors: - - Databricks SQL: university/essentials/sdk/connectors/databricks-sql-connector.md - - ODBC: university/essentials/sdk/connectors/odbc-connectors.md - - Spark: university/essentials/sdk/connectors/spark-connector.md - - Exercise: university/essentials/sdk/connectors/exercise.md - - Queries: - - Time Series: university/essentials/sdk/queries/timeseries.md - - SQL: university/essentials/sdk/queries/sql.md - - Weather: university/essentials/sdk/queries/weather.md - - Exercise: university/essentials/sdk/queries/exercise.md +# - University: +# - RTDIP Essentials: +# - Overview: university/essentials/rtdip/overview.md +# - SDK: +# - Getting Started: +# - Introduction: university/essentials/sdk/getting-started/introduction.md +# - Prerequisites: university/essentials/sdk/getting-started/prerequisites.md +# - Installation: university/essentials/sdk/getting-started/installation.md +# - Exercise: university/essentials/sdk/getting-started/exercise.md +# - Authentication: +# - Azure Active Directory: university/essentials/sdk/authentication/azure.md +# - Databricks: university/essentials/sdk/authentication/databricks.md +# - Exercise: university/essentials/sdk/authentication/exercise.md +# - Connectors: +# - Databricks SQL: university/essentials/sdk/connectors/databricks-sql-connector.md +# - ODBC: university/essentials/sdk/connectors/odbc-connectors.md +# - Spark: university/essentials/sdk/connectors/spark-connector.md +# - Exercise: university/essentials/sdk/connectors/exercise.md +# - Queries: +# - Time Series: university/essentials/sdk/queries/timeseries.md +# - SQL: university/essentials/sdk/queries/sql.md +# - Weather: university/essentials/sdk/queries/weather.md +# - Exercise: university/essentials/sdk/queries/exercise.md