diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c9b147f..f9a68af0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,20 @@ +## 1.40.0 [2024-01-30] + +### Features +1. [#625](https://github.com/influxdata/influxdb-client-python/pull/625): Make class `Point` equatable + +### Bug Fixes +1. [#562](https://github.com/influxdata/influxdb-client-python/pull/562): Use `ThreadPoolScheduler` for `WriteApi`'s batch subject instead of `TimeoutScheduler` to prevent creating unnecessary threads repeatedly +1. [#631](https://github.com/influxdata/influxdb-client-python/pull/631): Logging HTTP requests without query parameters + +### Documentation +1. [#635](https://github.com/influxdata/influxdb-client-python/pull/635): Fix render `README.rst` at GitHub + +## 1.39.0 [2023-12-05] + +### Features +1. [#616](https://github.com/influxdata/influxdb-client-python/pull/616): Add `find_tasks_iter` function that allow iterate through all pages of tasks. + ## 1.38.0 [2023-10-02] ### Bug Fixes diff --git a/README.rst b/README.rst index a613a41a..e22f8eab 100644 --- a/README.rst +++ b/README.rst @@ -626,7 +626,7 @@ Queries The result retrieved by `QueryApi `_ could be formatted as a: 1. Flux data structure: `FluxTable `_, `FluxColumn `_ and `FluxRecord `_ -2. :class:`~influxdb_client.client.flux_table.CSVIterator` which will iterate over CSV lines +2. :code:`influxdb_client.client.flux_table.CSVIterator` which will iterate over CSV lines 3. Raw unprocessed results as a ``str`` iterator 4. `Pandas DataFrame `_ @@ -1403,12 +1403,12 @@ or use the ``[async]`` extra: Async APIs """""""""" -All async APIs are available via :class:`~influxdb_client.client.influxdb_client_async.InfluxDBClientAsync`. +All async APIs are available via :code:`influxdb_client.client.influxdb_client_async.InfluxDBClientAsync`. The ``async`` version of the client supports following asynchronous APIs: -* :class:`~influxdb_client.client.write_api_async.WriteApiAsync` -* :class:`~influxdb_client.client.query_api_async.QueryApiAsync` -* :class:`~influxdb_client.client.delete_api_async.DeleteApiAsync` +* :code:`influxdb_client.client.write_api_async.WriteApiAsync` +* :code:`influxdb_client.client.query_api_async.QueryApiAsync` +* :code:`influxdb_client.client.delete_api_async.DeleteApiAsync` * Management services into ``influxdb_client.service`` supports async operation and also check to readiness of the InfluxDB via ``/ping`` endpoint: @@ -1432,7 +1432,7 @@ and also check to readiness of the InfluxDB via ``/ping`` endpoint: Async Write API """"""""""""""" -The :class:`~influxdb_client.client.write_api_async.WriteApiAsync` supports ingesting data as: +The :code:`influxdb_client.client.write_api_async.WriteApiAsync` supports ingesting data as: * ``string`` or ``bytes`` that is formatted as a InfluxDB's line protocol * `Data Point `__ structure @@ -1470,13 +1470,13 @@ The :class:`~influxdb_client.client.write_api_async.WriteApiAsync` supports inge Async Query API """"""""""""""" -The :class:`~influxdb_client.client.query_api_async.QueryApiAsync` supports retrieve data as: +The :code:`influxdb_client.client.query_api_async.QueryApiAsync` supports retrieve data as: -* List of :class:`~influxdb_client.client.flux_table.FluxTable` -* Stream of :class:`~influxdb_client.client.flux_table.FluxRecord` via :class:`~typing.AsyncGenerator` +* List of :code:`influxdb_client.client.flux_table.FluxTable` +* Stream of :code:`influxdb_client.client.flux_table.FluxRecord` via :code:`typing.AsyncGenerator` * `Pandas DataFrame `_ -* Stream of `Pandas DataFrame `_ via :class:`~typing.AsyncGenerator` -* Raw :class:`~str` output +* Stream of `Pandas DataFrame `_ via :code:`typing.AsyncGenerator` +* Raw :code:`str` output .. code-block:: python diff --git a/conda/meta.yaml b/conda/meta.yaml index 83a66a9c..a2291164 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,5 +1,5 @@ {% set name = "influxdb_client" %} -{% set version = "1.37.0" %} +{% set version = "1.39.0" %} package: @@ -7,8 +7,8 @@ package: version: {{ version }} source: - url: https://files.pythonhosted.org/packages/6d/3d/afc78200a27e3e64ee822296a1c8fe67a8fce40ea7b19b87a1af347a37b6/influxdb_client-1.37.0.tar.gz - sha256: 01ac44d6a16a965ae2e0fa3238e2edeb147c11935a89b61439c9a752458001da + url: https://files.pythonhosted.org/packages/f1/0e/d4da1d18316eab78b7041e60dbf4fe6062ae7e32dd55ed22bda316b1d217/influxdb_client-1.39.0.tar.gz + sha256: 6a534913523bd262f1928e4ff80046bf95e313c1694ce13e45fd17eea90fe691 build: number: 0 diff --git a/examples/task_example.py b/examples/task_example.py index 55595ba9..242dcf4d 100644 --- a/examples/task_example.py +++ b/examples/task_example.py @@ -25,3 +25,9 @@ task_request = TaskCreateRequest(flux=flux, org=org, description="Task Description", status="active") task = tasks_api.create_task(task_create_request=task_request) print(task) + + tasks = tasks_api.find_tasks_iter() + + # print all tasks id + for task in tasks: + print(task.id) diff --git a/influxdb_client/_sync/rest.py b/influxdb_client/_sync/rest.py index 2d80de13..eadbf061 100644 --- a/influxdb_client/_sync/rest.py +++ b/influxdb_client/_sync/rest.py @@ -170,7 +170,7 @@ def request(self, method, url, query_params=None, headers=None, headers['Content-Type'] = 'application/json' if self.configuration.debug: - _BaseRESTClient.log_request(method, f"{url}?{urlencode(query_params)}") + _BaseRESTClient.log_request(method, f"{url}{'' if query_params is None else '?' + urlencode(query_params)}") _BaseRESTClient.log_headers(headers, '>>>') _BaseRESTClient.log_body(body, '>>>') diff --git a/influxdb_client/client/tasks_api.py b/influxdb_client/client/tasks_api.py index dd85683b..9edb2ec9 100644 --- a/influxdb_client/client/tasks_api.py +++ b/influxdb_client/client/tasks_api.py @@ -11,6 +11,38 @@ AddResourceMemberRequestBody, RunManually, Run, LogEvent +class _Page: + def __init__(self, values, has_next, next_after): + self.has_next = has_next + self.values = values + self.next_after = next_after + + @staticmethod + def empty(): + return _Page([], False, None) + + @staticmethod + def initial(after): + return _Page([], True, after) + + +class _PageIterator: + def __init__(self, page: _Page, get_next_page): + self.page = page + self.get_next_page = get_next_page + + def __iter__(self): + return self + + def __next__(self): + if not self.page.values: + if self.page.has_next: + self.page = self.get_next_page(self.page) + if not self.page.values: + raise StopIteration + return self.page.values.pop(0) + + class TasksApi(object): """Implementation for '/api/v2/tasks' endpoint.""" @@ -25,7 +57,7 @@ def find_task_by_id(self, task_id) -> Task: return task def find_tasks(self, **kwargs): - """List all tasks. + """List all tasks up to set limit (max 500). :key str name: only returns tasks with the specified name :key str after: returns tasks after specified ID @@ -37,6 +69,23 @@ def find_tasks(self, **kwargs): """ return self._service.get_tasks(**kwargs).tasks + def find_tasks_iter(self, **kwargs): + """Iterate over all tasks with pagination. + + :key str name: only returns tasks with the specified name + :key str after: returns tasks after specified ID + :key str user: filter tasks to a specific user ID + :key str org: filter tasks to a specific organization name + :key str org_id: filter tasks to a specific organization ID + :key int limit: the number of tasks in one page + :return: Tasks iterator + """ + + def get_next_page(page: _Page): + return self._find_tasks_next_page(page, **kwargs) + + return iter(_PageIterator(_Page.initial(kwargs.get('after')), get_next_page)) + def create_task(self, task: Task = None, task_create_request: TaskCreateRequest = None) -> Task: """Create a new task.""" if task_create_request is not None: @@ -210,3 +259,16 @@ def get_logs(self, task_id: str) -> List['LogEvent']: def find_tasks_by_user(self, task_user_id): """List all tasks by user.""" return self.find_tasks(user=task_user_id) + + def _find_tasks_next_page(self, page: _Page, **kwargs): + if not page.has_next: + return _Page.empty() + + args = {**kwargs, 'after': page.next_after} if page.next_after is not None else kwargs + tasks_response = self._service.get_tasks(**args) + + tasks = tasks_response.tasks + has_next = tasks_response.links.next is not None + last_id = tasks[-1].id if tasks else None + + return _Page(tasks, has_next, last_id) diff --git a/influxdb_client/client/write/point.py b/influxdb_client/client/write/point.py index 60ce7c40..31d44d5c 100644 --- a/influxdb_client/client/write/point.py +++ b/influxdb_client/client/write/point.py @@ -251,6 +251,18 @@ def __str__(self): """Create string representation of this Point.""" return self.to_line_protocol() + def __eq__(self, other): + """Return true iff other is equal to self.""" + if not isinstance(other, Point): + return False + # assume points are equal iff their instance fields are equal + return (self._tags == other._tags and + self._fields == other._fields and + self._name == other._name and + self._time == other._time and + self._write_precision == other._write_precision and + self._field_types == other._field_types) + def _append_tags(tags): _return = [] diff --git a/influxdb_client/client/write_api.py b/influxdb_client/client/write_api.py index 61242446..050a7a5c 100644 --- a/influxdb_client/client/write_api.py +++ b/influxdb_client/client/write_api.py @@ -258,7 +258,8 @@ def __init__(self, self._disposable = self._subject.pipe( # Split incoming data to windows by batch_size or flush_interval ops.window_with_time_or_count(count=write_options.batch_size, - timespan=timedelta(milliseconds=write_options.flush_interval)), + timespan=timedelta(milliseconds=write_options.flush_interval), + scheduler=ThreadPoolScheduler(1)), # Map window into groups defined by 'organization', 'bucket' and 'precision' ops.flat_map(lambda window: window.pipe( # Group window by 'organization', 'bucket' and 'precision' diff --git a/influxdb_client/version.py b/influxdb_client/version.py index 3721fd19..f8c5bb31 100644 --- a/influxdb_client/version.py +++ b/influxdb_client/version.py @@ -1,3 +1,3 @@ """Version of the Client that is used in User-Agent header.""" -VERSION = '1.38.0' +VERSION = '1.40.0' diff --git a/setup.py b/setup.py index 5c63fa2d..546290de 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ 'aioresponses>=0.7.3', 'sphinx==1.8.5', 'sphinx_rtd_theme', - 'jinja2==3.1.2' + 'jinja2==3.1.3' ] extra_requires = [ diff --git a/tests/test_InfluxDBClient.py b/tests/test_InfluxDBClient.py index ca37291b..c2f9b0a5 100644 --- a/tests/test_InfluxDBClient.py +++ b/tests/test_InfluxDBClient.py @@ -415,6 +415,18 @@ def test_custom_debug_logging_handler(self): logger = logging.getLogger('influxdb_client.client.http') self.assertEqual(2, len(logger.handlers)) + def test_debug_request_without_query_parameters(self): + httpretty.register_uri(httpretty.GET, uri="http://localhost/ping", status=200, body="") + self.influxdb_client = InfluxDBClient("http://localhost", "my-token", debug=True) + + log_stream = StringIO() + logger = logging.getLogger("influxdb_client.client.http") + logger.addHandler(logging.StreamHandler(log_stream)) + + self.influxdb_client.api_client.call_api('/ping', 'GET') + + self.assertIn("'GET http://localhost/ping'", log_stream.getvalue()) + class ServerWithSelfSingedSSL(http.server.SimpleHTTPRequestHandler): def _set_headers(self, response: bytes): diff --git a/tests/test_TasksApi.py b/tests/test_TasksApi.py index 2bea7659..dc936dfd 100644 --- a/tests/test_TasksApi.py +++ b/tests/test_TasksApi.py @@ -184,6 +184,37 @@ def test_find_task_by_user_id(self): print(tasks) self.assertEqual(len(tasks), 1) + def test_find_tasks_iter(self): + task_name = self.generate_name("it task") + num_of_tasks = 10 + + for _ in range(num_of_tasks): + self.tasks_api.create_task_cron(task_name, TASK_FLUX, "0 2 * * *", self.organization.id) + + def count_unique_ids(tasks): + return len(set(map(lambda task: task.id, tasks))) + + # get tasks in 3-4 batches + tasks = self.tasks_api.find_tasks_iter(name= task_name, limit= num_of_tasks // 3) + self.assertEqual(count_unique_ids(tasks), num_of_tasks) + + # get tasks in one equaly size batch + tasks = self.tasks_api.find_tasks_iter(name= task_name, limit= num_of_tasks) + self.assertEqual(count_unique_ids(tasks), num_of_tasks) + + # get tasks in one batch + tasks = self.tasks_api.find_tasks_iter(name= task_name, limit= num_of_tasks + 1) + self.assertEqual(count_unique_ids(tasks), num_of_tasks) + + # get no tasks + tasks = self.tasks_api.find_tasks_iter(name= task_name + "blah") + self.assertEqual(count_unique_ids(tasks), 0) + + # skip some tasks + *_, split_task = self.tasks_api.find_tasks(name= task_name, limit= num_of_tasks // 3) + tasks = self.tasks_api.find_tasks_iter(name= task_name, limit= 3, after= split_task.id) + self.assertEqual(count_unique_ids(tasks), num_of_tasks - num_of_tasks // 3) + def test_delete_task(self): task = self.tasks_api.create_task_cron(self.generate_name("it_task"), TASK_FLUX, "0 2 * * *", self.organization.id) diff --git a/tests/test_point.py b/tests/test_point.py index 992ac354..e799ae9c 100644 --- a/tests/test_point.py +++ b/tests/test_point.py @@ -557,6 +557,110 @@ def test_name_start_with_hash(self): self.assertEqual('#hash_start,location=europe level=2.2', point.to_line_protocol()) self.assertEqual(1, len(warnings)) + def test_equality_from_dict(self): + point_dict = { + "measurement": "h2o_feet", + "tags": {"location": "coyote_creek"}, + "fields": { + "water_level": 1.0, + "some_counter": 108913123234 + }, + "field_types": {"some_counter": "float"}, + "time": 1 + } + point_a = Point.from_dict(point_dict) + point_b = Point.from_dict(point_dict) + self.assertEqual(point_a, point_b) + + def test_equality(self): + # https://github.com/influxdata/influxdb-client-python/issues/623#issue-2048573579 + point_a = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + + point_b = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + self.assertEqual(point_a, point_b) + + def test_not_equal_if_tags_differ(self): + point_a = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + + point_b = ( + Point("asd") + .tag("foo", "baz") # not "bar" + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + self.assertNotEqual(point_a, point_b) + + def test_not_equal_if_fields_differ(self): + point_a = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + + point_b = ( + Point("asd") + .tag("foo", "bar") + .field("value", 678.90) # not 123.45 + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + self.assertNotEqual(point_a, point_b) + + def test_not_equal_if_measurements_differ(self): + point_a = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + + point_b = ( + Point("fgh") # not "asd" + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + self.assertNotEqual(point_a, point_b) + + def test_not_equal_if_times_differ(self): + point_a = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + + point_b = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2024, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + self.assertNotEqual(point_a, point_b) + def test_not_equal_if_other_is_no_point(self): + point_a = ( + Point("asd") + .tag("foo", "bar") + .field("value", 123.45) + .time(datetime(2023, 12, 19, 13, 27, 42, 215000, tzinfo=timezone.utc)) + ) + not_a_point = "not a point but a string" + self.assertNotEqual(point_a, not_a_point) if __name__ == '__main__': unittest.main()