Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit c3d92d4

Browse files
Some updates
1 parent bd4937d commit c3d92d4

File tree

4 files changed

+106
-98
lines changed

4 files changed

+106
-98
lines changed

discos_client/client.py

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,11 @@
44
import weakref
55
from concurrent.futures import ProcessPoolExecutor, Future
66
from collections import defaultdict
7-
from typing import Any, Tuple, Dict
7+
from typing import Tuple
88
import zmq
99
from .namespace import DISCOSNamespace
10-
from .utils import rand_id, initialize_worker
11-
from .merger import SchemaMerger
10+
from .utils import rand_id
11+
from .merger import SchemaMerger, initialize_worker
1212

1313

1414
class DISCOSClient:
@@ -77,9 +77,7 @@ def __init__(
7777
topics = merger.get_topics()
7878
self._topics = list(topics)
7979
for t in self._topics:
80-
self.__update_namespace__(t, DISCOSNamespace(
81-
**merger.merge_schema(t, {})
82-
))
80+
self.__dict__[t] = merger.merge_schema(t)
8381
self._socket.subscribe(f'{self._client_id}{t}')
8482
self._recv_thread.start()
8583

@@ -142,18 +140,18 @@ def __recv__(self_ref: weakref.ReferenceType["DISCOSClient"]) -> None:
142140
@staticmethod
143141
def __merge_task__(
144142
topic: str,
145-
payload: memoryview
146-
) -> Tuple[str, Dict[str, Any]]:
143+
payload: bytes
144+
) -> Tuple[str, DISCOSNamespace]:
147145
"""
148146
Performs the merging between payload and schema. This task can be very
149147
CPU expensive, therefore it is executed as a separate process.
150148
151149
:param topic: The topic on which the payload was received.
152-
:param payload: The memoryview object that points to the ZMQ payload.
150+
:param payload: The bytes containing the ZMQ payload.
153151
"""
154152
return topic, SchemaMerger.get_instance().merge_schema(
155153
topic,
156-
json.loads(payload)
154+
payload
157155
)
158156

159157
@staticmethod
@@ -172,7 +170,6 @@ def __update_task__(
172170
return
173171
try:
174172
topic, payload = fut.result()
175-
payload = DISCOSNamespace(**payload)
176173
with self._locks[topic]:
177174
self.__update_namespace__(topic, payload)
178175
finally:
@@ -190,10 +187,7 @@ def __update_namespace__(
190187
:param payload: The new DISCOSNamespace object, used to update
191188
the current one if already present in self.__dict__.
192189
"""
193-
if topic in self.__dict__:
194-
self.__dict__[topic] <<= payload
195-
else:
196-
self.__dict__[topic] = payload
190+
self.__dict__[topic] <<= payload
197191

198192
def __repr__(self) -> str:
199193
"""

discos_client/merger.py

Lines changed: 55 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,18 @@
11
from __future__ import annotations
22
import re
33
import json
4+
import signal
45
from pathlib import Path
56
from typing import Any
67
from importlib.resources import files
78
from collections.abc import Iterable
9+
from .utils import META_KEYS
10+
from .namespace import DISCOSNamespace
811

912

10-
META_KEYS = ("type", "title", "description", "format", "unit", "enum")
11-
1213
__all__ = [
1314
"SchemaMerger",
15+
"initialize_worker"
1416
]
1517

1618

@@ -22,33 +24,37 @@ def __init__(self, telescope: str | None = None):
2224
self._pp_cache: \
2325
dict[int, list[tuple[str, "re.Pattern", str, dict]]] = {}
2426
self.schemas, definitions, self.node_to_id = \
25-
self.__load_schemas(base_dir, telescope)
27+
self._load_schemas(base_dir, telescope)
2628

2729
for def_id, definition in definitions.items():
28-
definition = self.__absolutize_refs(definition, base_dir, def_id)
29-
definition = self.__expand_refs(definition, definitions)
30-
definition = self.__merge_all_of(definition)
31-
self.__precompile_patternprops(definition)
30+
definition = self._absolutize_refs(definition, base_dir, def_id)
31+
definition = self._expand_refs(definition, definitions)
32+
definition = self._merge_all_of(definition)
33+
self._precompile_patternprops(definition)
3234
definitions[def_id] = definition
3335

3436
for schema_id, schema in self.schemas.items():
35-
schema = self.__absolutize_refs(schema, base_dir, schema_id)
36-
schema = self.__expand_refs(schema, definitions)
37-
schema = self.__merge_all_of(schema)
37+
schema = self._absolutize_refs(schema, base_dir, schema_id)
38+
schema = self._expand_refs(schema, definitions)
39+
schema = self._merge_all_of(schema)
3840
schema.pop("$defs", None)
39-
self.__precompile_patternprops(schema)
41+
self._precompile_patternprops(schema)
4042
self.schemas[schema_id] = schema
4143

4244
def merge_schema(
4345
self,
44-
name: str,
45-
message: dict[str, Any]
46+
topic: str,
47+
payload: bytes | None = None
4648
) -> dict[str, Any]:
47-
if name not in self.node_to_id: # pragma: no cover
48-
raise ValueError(f"Schema '{name}' was not loaded.")
49-
name = self.node_to_id[name]
50-
schema = self.schemas[name]
51-
return self._enrich_object(schema, message)
49+
if topic not in self.node_to_id: # pragma: no cover
50+
raise ValueError(f"Schema '{topic}' was not loaded.")
51+
topic = self.node_to_id[topic]
52+
schema = self.schemas[topic]
53+
if not payload:
54+
payload = self._enrich_object(schema, {})
55+
else:
56+
payload = self._enrich_object(schema, json.loads(payload))
57+
return DISCOSNamespace(**payload)
5258

5359
def _literal_prefix(self, pat: str) -> str:
5460
i = 0
@@ -64,16 +70,16 @@ def _literal_prefix(self, pat: str) -> str:
6470
i += 1
6571
return ''.join(out)
6672

67-
def __precompile_patternprops(self, obj: dict | list) -> None:
68-
for d in self.__walk_dicts(obj):
73+
def _precompile_patternprops(self, obj: dict | list) -> None:
74+
for d in self._walk_dicts(obj):
6975
pp = d.get("patternProperties")
7076
if not isinstance(pp, dict) or not pp:
7177
continue
7278
key = id(pp)
7379
if key not in self._pp_cache:
74-
self._pp_cache[key] = self.__build_pp_list(pp)
80+
self._pp_cache[key] = self._build_pp_list(pp)
7581

76-
def __walk_dicts(self, root: dict | list) -> Iterable[dict]:
82+
def _walk_dicts(self, root: dict | list) -> Iterable[dict]:
7783
stack: list[dict | list] = [root]
7884
while stack:
7985
cur = stack.pop()
@@ -87,7 +93,7 @@ def __walk_dicts(self, root: dict | list) -> Iterable[dict]:
8793
if isinstance(v, (dict, list)):
8894
stack.append(v)
8995

90-
def __build_pp_list(
96+
def _build_pp_list(
9197
self,
9298
pp: dict
9399
) -> list[tuple[str, re.Pattern | None, str, dict]]:
@@ -101,7 +107,7 @@ def __build_pp_list(
101107
compiled.append((pat, rx, pref, pschema))
102108
return compiled
103109

104-
def __load_schemas(
110+
def _load_schemas(
105111
self,
106112
base_dir: Path,
107113
telescope: str | None
@@ -119,7 +125,7 @@ def __load_schemas(
119125
if f.is_file() and f.name.endswith(".json"):
120126
rel_path = f.resolve().relative_to(base_dir).as_posix()
121127
schema = json.loads(f.read_text(encoding="utf-8"))
122-
self.__absolutize_refs(schema, base_dir, rel_path)
128+
self._absolutize_refs(schema, base_dir, rel_path)
123129
schema_id = schema.get("$id", rel_path)
124130
definitions[schema_id] = schema
125131
for d in schemas_dirs:
@@ -128,7 +134,7 @@ def __load_schemas(
128134
rel_path = \
129135
f.resolve().relative_to(base_dir).as_posix()
130136
schema = json.loads(f.read_text(encoding="utf-8"))
131-
self.__absolutize_refs(schema, base_dir, rel_path)
137+
self._absolutize_refs(schema, base_dir, rel_path)
132138
schema_id = schema.get("$id", rel_path)
133139
node_name = schema.get("node")
134140
if not node_name: # pragma: no cover
@@ -139,7 +145,7 @@ def __load_schemas(
139145
definitions[f"{schema_id}#/$defs/{k}"] = v
140146
return schemas, definitions, node_to_id
141147

142-
def __absolutize_refs(
148+
def _absolutize_refs(
143149
self,
144150
schema: dict[str, Any],
145151
base_dir: Path,
@@ -148,7 +154,7 @@ def __absolutize_refs(
148154
def recurse(obj: Any):
149155
if isinstance(obj, dict):
150156
if "$ref" in obj:
151-
obj["$ref"] = self.__normalize_ref(
157+
obj["$ref"] = self._normalize_ref(
152158
obj["$ref"],
153159
base_dir,
154160
Path(current_file)
@@ -161,7 +167,7 @@ def recurse(obj: Any):
161167
recurse(schema)
162168
return schema
163169

164-
def __normalize_ref(
170+
def _normalize_ref(
165171
self,
166172
ref: str,
167173
base_dir: Path,
@@ -183,7 +189,7 @@ def __normalize_ref(
183189
result = result.as_posix()
184190
return f"{result}#{fragment}" if fragment else result
185191

186-
def __expand_refs(
192+
def _expand_refs(
187193
self,
188194
schema: dict[str, Any],
189195
definitions: dict[str, Any]
@@ -206,7 +212,7 @@ def recurse(obj: Any):
206212
return obj
207213
return recurse(schema)
208214

209-
def __merge_all_of(self, schema: dict[str, Any]) -> dict[str, Any]:
215+
def _merge_all_of(self, schema: dict[str, Any]) -> dict[str, Any]:
210216
def recurse(obj: Any):
211217
if isinstance(obj, dict):
212218
if "allOf" in obj:
@@ -226,7 +232,7 @@ def _merge_subschemas(
226232
merged: dict[str, Any] = {}
227233
required_fields: set[str] = set()
228234
for subschema in subschemas:
229-
subschema = self.__merge_all_of(subschema)
235+
subschema = self._merge_all_of(subschema)
230236
merged.setdefault("properties", {}).update(
231237
subschema.get("properties", {})
232238
)
@@ -261,7 +267,7 @@ def _merge_with_parent(
261267
merged[k] = v
262268
return merged
263269

264-
def __score_candidate(
270+
def _score_candidate(
265271
self,
266272
message: dict[str, Any],
267273
candidate: dict[str, Any]
@@ -291,7 +297,7 @@ def __score_candidate(
291297
)
292298
return common_keys + pattern_matches
293299

294-
def __expand_schema_keywords(
300+
def _expand_schema_keywords(
295301
self,
296302
obj: dict[str, Any],
297303
message: dict[str, Any]
@@ -300,7 +306,7 @@ def __expand_schema_keywords(
300306
best_score = -1
301307
best_candidate = None
302308
for candidate in obj["anyOf"]:
303-
score = self.__score_candidate(message, candidate)
309+
score = self._score_candidate(message, candidate)
304310
if score is not None and score > best_score:
305311
best_score = score
306312
best_candidate = candidate
@@ -312,7 +318,7 @@ def __expand_schema_keywords(
312318
return {}
313319
return obj
314320

315-
def __replace_patterns_with_properties(
321+
def _replace_patterns_with_properties(
316322
self,
317323
schema: dict[str, Any],
318324
message: dict[str, Any]
@@ -345,28 +351,28 @@ def __replace_patterns_with_properties(
345351

346352
return out
347353

348-
def __enrich_properties(
354+
def _enrich_properties(
349355
self,
350356
schema: dict[str, Any],
351357
values: dict[str, Any],
352358
) -> dict[str, Any]:
353-
schema = self.__expand_schema_keywords(schema, values)
354-
schema = self.__replace_patterns_with_properties(schema, values)
359+
schema = self._expand_schema_keywords(schema, values)
360+
schema = self._replace_patterns_with_properties(schema, values)
355361
properties = schema.get("properties", {})
356362
required = set(schema.get("required", []))
357363
result = {}
358364
for key, prop_schema in properties.items():
359365
if key in required or key in values:
360366
prop_value = values.get(key, {})
361-
prop_schema = self.__expand_schema_keywords(
367+
prop_schema = self._expand_schema_keywords(
362368
prop_schema,
363369
prop_value if isinstance(prop_value, dict) else {}
364370
)
365-
prop_schema = self.__replace_patterns_with_properties(
371+
prop_schema = self._replace_patterns_with_properties(
366372
prop_schema,
367373
values.get(key, {})
368374
)
369-
result[key] = self.__enrich_named_property(
375+
result[key] = self._enrich_named_property(
370376
key, prop_schema, values
371377
)
372378
return result
@@ -394,7 +400,7 @@ def _enrich_object(
394400
if obj_value is None and not required:
395401
return self._meta(obj_schema)
396402
nested_values = obj_value if isinstance(obj_value, dict) else {}
397-
nested = self.__enrich_properties(obj_schema, nested_values)
403+
nested = self._enrich_properties(obj_schema, nested_values)
398404
meta = self._meta(obj_schema)
399405
if nested:
400406
meta.update(nested)
@@ -419,7 +425,7 @@ def _enrich_array(
419425
out["value"] = out_list
420426
return out
421427

422-
def __enrich_named_property(
428+
def _enrich_named_property(
423429
self,
424430
key: str,
425431
schema: dict[str, Any],
@@ -444,3 +450,8 @@ def get_instance(telescope: str | None = None):
444450
if SchemaMerger._instance is None:
445451
SchemaMerger._instance = SchemaMerger(telescope)
446452
return SchemaMerger._instance
453+
454+
455+
def initialize_worker(telescope: str | None = None):
456+
signal.signal(signal.SIGINT, signal.SIG_IGN)
457+
SchemaMerger.get_instance(telescope)

discos_client/namespace.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@
55
from collections.abc import Iterable
66
from typing import Any, Callable, Iterator
77
from .utils import delegated_operations, delegated_comparisons
8-
from .utils import public_dict
9-
from .merger import META_KEYS
8+
from .utils import public_dict, META_KEYS
109

1110

1211
__all__ = ["DISCOSNamespace"]
@@ -669,3 +668,16 @@ def __dir__(self) -> None:
669668
attrs.discard("get_value")
670669
attrs = set(dir(value)).union(attrs)
671670
return sorted(attrs)
671+
672+
def __getstate__(self):
673+
state = self.__dict__.copy()
674+
state.pop("_lock", None)
675+
state.pop("_observers_lock", None)
676+
state.pop("_observers", None)
677+
return state
678+
679+
def __setstate__(self, state):
680+
self.__dict__.update(state)
681+
object.__setattr__(self, "_lock", threading.RLock())
682+
object.__setattr__(self, "_observers", {})
683+
object.__setattr__(self, "_observers_lock", threading.Lock())

0 commit comments

Comments
 (0)