Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ dependencies:
- openai>=1.13.3,<2.0.0
- mkdocs-material==9.5.20
- mkdocs-material-extensions==1.3.1
- mkdocstrings==0.22.0
- mkdocstrings-python==1.4.0
- mkdocstrings==0.25.0
- mkdocstrings-python==1.10.8
- mkdocs-macros-plugin==1.0.1
- pygments==2.16.1
- pymdown-extensions==10.8.1
Expand Down
11 changes: 7 additions & 4 deletions src/api/v1/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from src.sdk.python.rtdip_sdk.queries.time_series import batch

from src.api.v1.models import (
BaseQueryParams,
BatchBaseQueryParams,
BaseHeaders,
BatchBodyParams,
BatchResponse,
Expand Down Expand Up @@ -128,8 +128,11 @@ async def batch_events_get(
# Parse requests into dicts required by sdk
parsed_requests = parse_batch_requests(batch_query_parameters.requests)

# Obtain max workers from environment var, otherwise default to one less than cpu count
max_workers = os.environ.get("BATCH_THREADPOOL_WORKERS", os.cpu_count() - 1)
# Obtain max workers from environment var, otherwise default to 10
max_workers = os.environ.get("BATCH_THREADPOOL_WORKERS", 10)

# ensure max_workers is an integer
max_workers = int(max_workers)

# Request the data for each concurrently with threadpool
with ThreadPoolExecutor(max_workers=max_workers) as executor:
Expand Down Expand Up @@ -172,7 +175,7 @@ async def batch_events_get(
},
)
async def batch_post(
base_query_parameters: BaseQueryParams = Depends(),
base_query_parameters: BatchBaseQueryParams = Depends(),
batch_query_parameters: BatchBodyParams = Body(default=...),
base_headers: BaseHeaders = Depends(),
limit_offset_query_parameters: LimitOffsetQueryParams = Depends(),
Expand Down
3 changes: 3 additions & 0 deletions src/api/v1/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,9 @@ def lookup_before_get(
# make default workers 3 as within one query typically will request from only a few tables at once
max_workers = os.environ.get("LOOKUP_THREADPOOL_WORKERS", 3)

# ensure max_workers is an integer
max_workers = int(max_workers)

# run function with each parameters concurrently
results = batch.get(connection, request_list, threadpool_max_workers=max_workers)

Expand Down
10 changes: 10 additions & 0 deletions src/api/v1/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,16 @@ def __init__(
self.authorization = authorization


class BatchBaseQueryParams:
def __init__(
self,
region: str = Query(..., description="Region"),
authorization: str = Depends(oauth2_scheme),
):
self.region = region
self.authorization = authorization


class MetadataQueryParams:
def __init__(
self,
Expand Down