From 1dd12d3d629df8945f9f28ea677b31ee6a743d88 Mon Sep 17 00:00:00 2001 From: gbischof Date: Tue, 24 May 2022 16:37:02 -0400 Subject: [PATCH 1/3] reserve time and seq_num keys --- event_model/schemas/event_descriptor.json | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/event_model/schemas/event_descriptor.json b/event_model/schemas/event_descriptor.json index 70787d91..7472d04a 100644 --- a/event_model/schemas/event_descriptor.json +++ b/event_model/schemas/event_descriptor.json @@ -97,7 +97,13 @@ }, "type": "object", "description": "This describes the data in the Event Documents.", - "title": "data_keys" + "title": "data_keys", + "not": { + "required": [ + "time", + "seq_num" + ] + } }, "uid": { "type": "string", From c4f4aa1e3db12e8612fb1c62b4df33648c6e26a0 Mon Sep 17 00:00:00 2001 From: gbischof Date: Tue, 24 May 2022 17:19:45 -0400 Subject: [PATCH 2/3] add a test --- event_model/schemas/event_descriptor.json | 6 +- event_model/tests/test_em.py | 723 +++++++++++++--------- 2 files changed, 428 insertions(+), 301 deletions(-) diff --git a/event_model/schemas/event_descriptor.json b/event_model/schemas/event_descriptor.json index 7472d04a..aaf2bd04 100644 --- a/event_model/schemas/event_descriptor.json +++ b/event_model/schemas/event_descriptor.json @@ -99,9 +99,9 @@ "description": "This describes the data in the Event Documents.", "title": "data_keys", "not": { - "required": [ - "time", - "seq_num" + "anyOf": [ + { "required": ["time"] }, + { "required": ["seq_num"] } ] } }, diff --git a/event_model/tests/test_em.py b/event_model/tests/test_em.py index 383e60d2..a4e889bb 100644 --- a/event_model/tests/test_em.py +++ b/event_model/tests/test_em.py @@ -6,6 +6,7 @@ import jsonschema import numpy import pytest +from jsonschema.exceptions import ValidationError JSONSCHEMA_2 = LooseVersion(jsonschema.__version__) < LooseVersion("3.0.0") @@ -13,9 +14,18 @@ def test_documents(): dn = event_model.DocumentNames - for k in ('stop', 'start', 'descriptor', - 'event', 'bulk_events', 'datum', - 'resource', 'bulk_datum', 'event_page', 'datum_page'): + for k in ( + "stop", + "start", + "descriptor", + "event", + "bulk_events", + "datum", + "resource", + "bulk_datum", + "event_page", + "datum_page", + ): assert dn(k) == getattr(dn, k) @@ -36,6 +46,41 @@ def test_schema_validators(): assert len(event_model.schema_validators) == len(event_model.schemas) +def test_reserved_keys(): + # time and seq_num are keys reserved by Bluesky, so they cannot be used in data_keys + bundle = event_model.compose_run() + start_doc, compose_descriptor, compose_resource, compose_stop = bundle + with pytest.raises(ValidationError): + bundle = compose_descriptor( + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "time": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) + with pytest.raises(ValidationError): + bundle = compose_descriptor( + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "seq_num": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) + + + def test_compose_run(): # Compose each kind of document type. These calls will trigger # jsonschema.validate and ensure that the document-generation code composes @@ -47,114 +92,129 @@ def test_compose_run(): assert bundle.compose_resource is compose_resource assert bundle.compose_stop is compose_stop bundle = compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) descriptor_doc, compose_event, compose_event_page = bundle assert bundle.descriptor_doc is descriptor_doc assert bundle.compose_event is compose_event assert bundle.compose_event_page is compose_event_page bundle = compose_resource( - spec='TIFF', root='/tmp', resource_path='stack.tiff', - resource_kwargs={}) + spec="TIFF", root="/tmp", resource_path="stack.tiff", resource_kwargs={} + ) resource_doc, compose_datum, compose_datum_page = bundle assert bundle.resource_doc is resource_doc assert bundle.compose_datum is compose_datum assert bundle.compose_datum_page is compose_datum_page - datum_doc = compose_datum(datum_kwargs={'slice': 5}) + datum_doc = compose_datum(datum_kwargs={"slice": 5}) event_doc = compose_event( - data={'motor': 0, 'image': datum_doc['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}) - datum_page = compose_datum_page(datum_kwargs={'slice': [10, 15]}) - event_page = compose_event_page(data={'motor': [1, 2], 'image': - datum_page['datum_id']}, - timestamps={'motor': [0, 0], - 'image': [0, 0]}, - filled={'image': [False, False]}, - seq_num=[1, 2]) - assert 'descriptor' in event_doc - assert 'descriptor' in event_page - assert event_doc['seq_num'] == 1 + data={"motor": 0, "image": datum_doc["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + ) + datum_page = compose_datum_page(datum_kwargs={"slice": [10, 15]}) + event_page = compose_event_page( + data={"motor": [1, 2], "image": datum_page["datum_id"]}, + timestamps={"motor": [0, 0], "image": [0, 0]}, + filled={"image": [False, False]}, + seq_num=[1, 2], + ) + assert "descriptor" in event_doc + assert "descriptor" in event_page + assert event_doc["seq_num"] == 1 stop_doc = compose_stop() - assert 'primary' in stop_doc['num_events'] - assert stop_doc['num_events']['primary'] == 3 + assert "primary" in stop_doc["num_events"] + assert stop_doc["num_events"]["primary"] == 3 def test_round_trip_pagination(): run_bundle = event_model.compose_run() desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) res_bundle = run_bundle.compose_resource( - spec='TIFF', root='/tmp', resource_path='stack.tiff', - resource_kwargs={}) - datum_doc1 = res_bundle.compose_datum(datum_kwargs={'slice': 5}) - datum_doc2 = res_bundle.compose_datum(datum_kwargs={'slice': 10}) - datum_doc3 = res_bundle.compose_datum(datum_kwargs={'slice': 15}) + spec="TIFF", root="/tmp", resource_path="stack.tiff", resource_kwargs={} + ) + datum_doc1 = res_bundle.compose_datum(datum_kwargs={"slice": 5}) + datum_doc2 = res_bundle.compose_datum(datum_kwargs={"slice": 10}) + datum_doc3 = res_bundle.compose_datum(datum_kwargs={"slice": 15}) event_doc1 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc1['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) + data={"motor": 0, "image": datum_doc1["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) event_doc2 = desc_bundle.compose_event( - data={'motor': 1, 'image': datum_doc2['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) + data={"motor": 1, "image": datum_doc2["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) event_doc3 = desc_bundle.compose_event( - data={'motor': 2, 'image': datum_doc3['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) + data={"motor": 2, "image": datum_doc3["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) # Round trip single event -> event_page -> event. expected = event_doc1 - actual, = event_model.unpack_event_page( - event_model.pack_event_page(expected)) + (actual,) = event_model.unpack_event_page(event_model.pack_event_page(expected)) assert actual == expected # Round trip two events -> event_page -> events. expected = [event_doc1, event_doc2] - actual = list(event_model.unpack_event_page( - event_model.pack_event_page(*expected))) + actual = list(event_model.unpack_event_page(event_model.pack_event_page(*expected))) assert actual == expected # Round trip three events -> event_page -> events. expected = [event_doc1, event_doc2, event_doc3] - actual = list(event_model.unpack_event_page( - event_model.pack_event_page(*expected))) + actual = list(event_model.unpack_event_page(event_model.pack_event_page(*expected))) assert actual == expected # Round trip on docs that don't have a filled key unfilled_doc1 = event_doc1 - unfilled_doc1.pop('filled') + unfilled_doc1.pop("filled") unfilled_doc2 = event_doc2 - unfilled_doc2.pop('filled') + unfilled_doc2.pop("filled") unfilled_doc3 = event_doc3 - unfilled_doc3.pop('filled') + unfilled_doc3.pop("filled") expected = [unfilled_doc1, unfilled_doc2, unfilled_doc3] - actual = list(event_model.unpack_event_page( - event_model.pack_event_page(*expected))) + actual = list(event_model.unpack_event_page(event_model.pack_event_page(*expected))) for doc in actual: - doc.pop('filled') + doc.pop("filled") assert actual == expected # Round trip one datum -> datum_page -> datum. expected = datum_doc1 - actual, = event_model.unpack_datum_page( - event_model.pack_datum_page(expected)) + (actual,) = event_model.unpack_datum_page(event_model.pack_datum_page(expected)) assert actual == expected # Round trip two datum -> datum_page -> datum. expected = [datum_doc1, datum_doc2] - actual = list(event_model.unpack_datum_page( - event_model.pack_datum_page(*expected))) + actual = list(event_model.unpack_datum_page(event_model.pack_datum_page(*expected))) assert actual == expected # Round trip three datum -> datum_page -> datum. expected = [datum_doc1, datum_doc2, datum_doc3] - actual = list(event_model.unpack_datum_page( - event_model.pack_datum_page(*expected))) + actual = list(event_model.unpack_datum_page(event_model.pack_datum_page(*expected))) assert actual == expected # Check edge case where datum_kwargs are empty. @@ -164,57 +224,65 @@ def test_round_trip_pagination(): # Round trip one datum -> datum_page -> datum. expected = datum_doc1 - actual, = event_model.unpack_datum_page( - event_model.pack_datum_page(expected)) + (actual,) = event_model.unpack_datum_page(event_model.pack_datum_page(expected)) assert actual == expected # Round trip two datum -> datum_page -> datum. expected = [datum_doc1, datum_doc2] - actual = list(event_model.unpack_datum_page( - event_model.pack_datum_page(*expected))) + actual = list(event_model.unpack_datum_page(event_model.pack_datum_page(*expected))) assert actual == expected # Round trip three datum -> datum_page -> datum. expected = [datum_doc1, datum_doc2, datum_doc3] - actual = list(event_model.unpack_datum_page( - event_model.pack_datum_page(*expected))) + actual = list(event_model.unpack_datum_page(event_model.pack_datum_page(*expected))) assert actual == expected def test_bulk_events_to_event_page(tmp_path): run_bundle = event_model.compose_run() desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) desc_bundle_baseline = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}}, - name='baseline') + data_keys={"motor": {"shape": [], "dtype": "number", "source": "..."}}, + name="baseline", + ) path_root = str(tmp_path) res_bundle = run_bundle.compose_resource( - spec='TIFF', root=path_root, resource_path='stack.tiff', - resource_kwargs={}) - datum_doc1 = res_bundle.compose_datum(datum_kwargs={'slice': 5}) - datum_doc2 = res_bundle.compose_datum(datum_kwargs={'slice': 10}) + spec="TIFF", root=path_root, resource_path="stack.tiff", resource_kwargs={} + ) + datum_doc1 = res_bundle.compose_datum(datum_kwargs={"slice": 5}) + datum_doc2 = res_bundle.compose_datum(datum_kwargs={"slice": 10}) event1 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc1['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) + data={"motor": 0, "image": datum_doc1["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) event2 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc2['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=2) + data={"motor": 0, "image": datum_doc2["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=2, + ) event3 = desc_bundle_baseline.compose_event( - data={'motor': 0}, - timestamps={'motor': 0}, - seq_num=1) + data={"motor": 0}, timestamps={"motor": 0}, seq_num=1 + ) primary_event_page = event_model.pack_event_page(event1, event2) baseline_event_page = event_model.pack_event_page(event3) - bulk_events = {'primary': [event1, event2], 'baseline': [event3]} + bulk_events = {"primary": [event1, event2], "baseline": [event3]} pages = event_model.bulk_events_to_event_pages(bulk_events) assert tuple(pages) == (primary_event_page, baseline_event_page) @@ -222,28 +290,39 @@ def test_bulk_events_to_event_page(tmp_path): def test_sanitize_doc(): run_bundle = event_model.compose_run() desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) desc_bundle_baseline = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}}, - name='baseline') + data_keys={"motor": {"shape": [], "dtype": "number", "source": "..."}}, + name="baseline", + ) event1 = desc_bundle.compose_event( - data={'motor': 0, 'image': numpy.ones((512, 512))}, - timestamps={'motor': 0, 'image': 0}, filled={'image': True}, - seq_num=1) + data={"motor": 0, "image": numpy.ones((512, 512))}, + timestamps={"motor": 0, "image": 0}, + filled={"image": True}, + seq_num=1, + ) event2 = desc_bundle.compose_event( - data={'motor': 0, 'image': numpy.ones((512, 512))}, - timestamps={'motor': 0, 'image': 0}, filled={'image': True}, - seq_num=2) + data={"motor": 0, "image": numpy.ones((512, 512))}, + timestamps={"motor": 0, "image": 0}, + filled={"image": True}, + seq_num=2, + ) event3 = desc_bundle_baseline.compose_event( - data={'motor': 0}, - timestamps={'motor': 0}, - seq_num=1) + data={"motor": 0}, timestamps={"motor": 0}, seq_num=1 + ) event_page = event_model.pack_event_page(event1, event2) - bulk_events = {'primary': [event1, event2], 'baseline': [event3]} + bulk_events = {"primary": [event1, event2], "baseline": [event3]} json.dumps(event_model.sanitize_doc(event_page)) json.dumps(event_model.sanitize_doc(bulk_events)) json.dumps(event_model.sanitize_doc(event1)) @@ -252,16 +331,17 @@ def test_sanitize_doc(): def test_bulk_datum_to_datum_page(): run_bundle = event_model.compose_run() res_bundle = run_bundle.compose_resource( - spec='TIFF', root='/tmp', resource_path='stack.tiff', - resource_kwargs={}) - datum1 = res_bundle.compose_datum(datum_kwargs={'slice': 5}) - datum2 = res_bundle.compose_datum(datum_kwargs={'slice': 10}) + spec="TIFF", root="/tmp", resource_path="stack.tiff", resource_kwargs={} + ) + datum1 = res_bundle.compose_datum(datum_kwargs={"slice": 5}) + datum2 = res_bundle.compose_datum(datum_kwargs={"slice": 10}) actual = event_model.pack_datum_page(datum1, datum2) - bulk_datum = {'resource': res_bundle.resource_doc['uid'], - 'datum_kwarg_list': [datum1['datum_kwargs'], - datum2['datum_kwargs']], - 'datum_ids': [datum1['datum_id'], datum2['datum_id']]} + bulk_datum = { + "resource": res_bundle.resource_doc["uid"], + "datum_kwarg_list": [datum1["datum_kwargs"], datum2["datum_kwargs"]], + "datum_ids": [datum1["datum_id"], datum2["datum_id"]], + } expected = event_model.bulk_datum_to_datum_page(bulk_datum) assert actual == expected @@ -269,81 +349,103 @@ def test_bulk_datum_to_datum_page(): def test_document_router_smoke_test(): dr = event_model.DocumentRouter() run_bundle = event_model.compose_run() - dr('start', run_bundle.start_doc) + dr("start", run_bundle.start_doc) desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') - dr('descriptor', desc_bundle.descriptor_doc) + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) + dr("descriptor", desc_bundle.descriptor_doc) desc_bundle_baseline = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}}, - name='baseline') - dr('descriptor', desc_bundle_baseline.descriptor_doc) + data_keys={"motor": {"shape": [], "dtype": "number", "source": "..."}}, + name="baseline", + ) + dr("descriptor", desc_bundle_baseline.descriptor_doc) res_bundle = run_bundle.compose_resource( - spec='TIFF', root='/tmp', resource_path='stack.tiff', - resource_kwargs={}) - dr('resource', res_bundle.resource_doc) - datum_doc1 = res_bundle.compose_datum(datum_kwargs={'slice': 5}) - datum_doc2 = res_bundle.compose_datum(datum_kwargs={'slice': 10}) - dr('datum', datum_doc1) - dr('datum', datum_doc2) + spec="TIFF", root="/tmp", resource_path="stack.tiff", resource_kwargs={} + ) + dr("resource", res_bundle.resource_doc) + datum_doc1 = res_bundle.compose_datum(datum_kwargs={"slice": 5}) + datum_doc2 = res_bundle.compose_datum(datum_kwargs={"slice": 10}) + dr("datum", datum_doc1) + dr("datum", datum_doc2) event1 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc1['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) - dr('event', event1) + data={"motor": 0, "image": datum_doc1["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) + dr("event", event1) event2 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc2['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=2) - dr('event', event2) + data={"motor": 0, "image": datum_doc2["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=2, + ) + dr("event", event2) event3 = desc_bundle_baseline.compose_event( - data={'motor': 0}, - timestamps={'motor': 0}, - seq_num=1) - dr('event', event3) - dr('stop', run_bundle.compose_stop()) + data={"motor": 0}, timestamps={"motor": 0}, seq_num=1 + ) + dr("event", event3) + dr("stop", run_bundle.compose_stop()) def test_document_router_with_validation(): dr = event_model.DocumentRouter() run_bundle = event_model.compose_run() - dr('start', run_bundle.start_doc, validate=True) + dr("start", run_bundle.start_doc, validate=True) desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') - dr('descriptor', desc_bundle.descriptor_doc, validate=True) + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) + dr("descriptor", desc_bundle.descriptor_doc, validate=True) desc_bundle_baseline = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}}, - name='baseline') - dr('descriptor', desc_bundle_baseline.descriptor_doc, validate=True) + data_keys={"motor": {"shape": [], "dtype": "number", "source": "..."}}, + name="baseline", + ) + dr("descriptor", desc_bundle_baseline.descriptor_doc, validate=True) res_bundle = run_bundle.compose_resource( - spec='TIFF', root='/tmp', resource_path='stack.tiff', - resource_kwargs={}) - dr('resource', res_bundle.resource_doc, validate=True) - datum_doc1 = res_bundle.compose_datum(datum_kwargs={'slice': 5}) - datum_doc2 = res_bundle.compose_datum(datum_kwargs={'slice': 10}) - dr('datum', datum_doc1, validate=True) - dr('datum', datum_doc2, validate=True) + spec="TIFF", root="/tmp", resource_path="stack.tiff", resource_kwargs={} + ) + dr("resource", res_bundle.resource_doc, validate=True) + datum_doc1 = res_bundle.compose_datum(datum_kwargs={"slice": 5}) + datum_doc2 = res_bundle.compose_datum(datum_kwargs={"slice": 10}) + dr("datum", datum_doc1, validate=True) + dr("datum", datum_doc2, validate=True) event1 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc1['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) - dr('event', event1, validate=True) + data={"motor": 0, "image": datum_doc1["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) + dr("event", event1, validate=True) event2 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc2['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=2) - dr('event', event2, validate=True) + data={"motor": 0, "image": datum_doc2["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=2, + ) + dr("event", event2, validate=True) event3 = desc_bundle_baseline.compose_event( - data={'motor': 0}, - timestamps={'motor': 0}, - seq_num=1) - dr('event', event3, validate=True) - dr('stop', run_bundle.compose_stop(), validate=True) + data={"motor": 0}, timestamps={"motor": 0}, seq_num=1 + ) + dr("event", event3, validate=True) + dr("stop", run_bundle.compose_stop(), validate=True) def test_document_router_dispatch_event(): @@ -352,18 +454,22 @@ def test_document_router_dispatch_event(): event_page_calls = [] # used for counting calls # example documents - event1 = {'data': {'x': 1}, - 'timestamps': {'x': 0.}, - 'uid': 'placeholder X', - 'descriptor': 'placeholder Y', - 'time': 0., - 'seq_num': 1} - event2 = {'data': {'x': 2}, - 'timestamps': {'x': 1.}, - 'uid': 'placeholder X', - 'descriptor': 'placeholder Y', - 'time': 1., - 'seq_num': 2} + event1 = { + "data": {"x": 1}, + "timestamps": {"x": 0.0}, + "uid": "placeholder X", + "descriptor": "placeholder Y", + "time": 0.0, + "seq_num": 1, + } + event2 = { + "data": {"x": 2}, + "timestamps": {"x": 1.0}, + "uid": "placeholder X", + "descriptor": "placeholder Y", + "time": 1.0, + "seq_num": 2, + } event_page = event_model.pack_event_page(event1, event2) def check(ret, original=None): @@ -373,8 +479,8 @@ def check(ret, original=None): if original is not None: # Verify that a copy is returned. assert doc is not original # ret is such a poser, dude. - doc.pop('filled', None) - original.pop('filled', None) + doc.pop("filled", None) + original.pop("filled", None) assert doc == original class DefinesNeitherEventNorEventPage(event_model.DocumentRouter): @@ -390,14 +496,14 @@ def event_page(self, doc): dr = DefinesNeitherEventNorEventPage() # Test that Event is routed to Event and EventPage. - check(dr('event', event1)) + check(dr("event", event1)) assert len(event_calls) == 1 assert len(event_page_calls) == 1 event_calls.clear() event_page_calls.clear() # Test that EventPage is routed to EventPage and Event *once* before # giving up. - check(dr('event_page', event_page)) + check(dr("event_page", event_page)) assert len(event_page_calls) == 1 assert len(event_calls) == 1 event_calls.clear() @@ -407,7 +513,7 @@ class DefinesEventNotEventPage(event_model.DocumentRouter): def event(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['data']['x'] == doc['seq_num'] + assert doc["data"]["x"] == doc["seq_num"] event_calls.append(object()) return dict(doc) @@ -418,13 +524,13 @@ def event_page(self, doc): dr = DefinesEventNotEventPage() # Test that Event is routed to Event. - check(dr('event', event1), event1) + check(dr("event", event1), event1) assert len(event_calls) == 1 assert len(event_page_calls) == 0 event_calls.clear() event_page_calls.clear() # Test that EventPage is unpacked and routed to Event one at a time. - check(dr('event_page', event_page), event_page) + check(dr("event_page", event_page), event_page) assert len(event_page_calls) == 1 assert len(event_calls) == 2 event_calls.clear() @@ -439,19 +545,19 @@ def event(self, doc): def event_page(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['data']['x'][0] == 1 + assert doc["data"]["x"][0] == 1 event_page_calls.append(object()) return dict(doc) dr = DefinesEventPageNotEvent() # Test that Event is packed and routed to EventPage. - check(dr('event', event1), event1) + check(dr("event", event1), event1) assert len(event_calls) == 1 assert len(event_page_calls) == 1 event_calls.clear() event_page_calls.clear() # Test that EventPage is routed to EventPage. - check(dr('event_page', event_page), event_page) + check(dr("event_page", event_page), event_page) assert len(event_page_calls) == 1 assert len(event_calls) == 0 event_calls.clear() @@ -461,26 +567,26 @@ class DefinesEventPageAndEvent(event_model.DocumentRouter): def event(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['data']['x'] == doc['seq_num'] + assert doc["data"]["x"] == doc["seq_num"] event_calls.append(object()) return dict(doc) def event_page(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['data']['x'][0] == 1 + assert doc["data"]["x"][0] == 1 event_page_calls.append(object()) return dict(doc) dr = DefinesEventPageAndEvent() # Test that Event is routed to Event. - check(dr('event', event1), event1) + check(dr("event", event1), event1) assert len(event_calls) == 1 assert len(event_page_calls) == 0 event_calls.clear() event_page_calls.clear() # Test that EventPage is routed to EventPage. - check(dr('event_page', event_page), event_page) + check(dr("event_page", event_page), event_page) assert len(event_page_calls) == 1 assert len(event_calls) == 0 event_calls.clear() @@ -493,12 +599,16 @@ def test_document_router_dispatch_datum(): datum_page_calls = [] # used for counting calls # example documents - datum1 = {'datum_id': 'placeholder/1', - 'resource': 'placeholder', - 'datum_kwargs': {'index': 1}} - datum2 = {'datum_id': 'placholder/2', - 'resource': 'placeholder', - 'datum_kwargs': {'index': 2}} + datum1 = { + "datum_id": "placeholder/1", + "resource": "placeholder", + "datum_kwargs": {"index": 1}, + } + datum2 = { + "datum_id": "placholder/2", + "resource": "placeholder", + "datum_kwargs": {"index": 2}, + } datum_page = event_model.pack_datum_page(datum1, datum2) def check(ret, original=None): @@ -523,14 +633,14 @@ def datum_page(self, doc): dr = DefinesNeitherDatumNorDatumPage() # Test that Datum is routed to Datum and DatumPage. - check(dr('datum', datum1)) + check(dr("datum", datum1)) assert len(datum_calls) == 1 assert len(datum_page_calls) == 1 datum_calls.clear() datum_page_calls.clear() # Test that DatumPage is routed to DatumPage and Datum *once* before giving # up. - check(dr('datum_page', datum_page)) + check(dr("datum_page", datum_page)) assert len(datum_page_calls) == 1 assert len(datum_calls) == 1 datum_calls.clear() @@ -540,7 +650,7 @@ class DefinesDatumNotDatumPage(event_model.DocumentRouter): def datum(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['datum_kwargs']['index'] == int(doc['datum_id'][-1]) + assert doc["datum_kwargs"]["index"] == int(doc["datum_id"][-1]) datum_calls.append(object()) return dict(doc) @@ -551,13 +661,13 @@ def datum_page(self, doc): dr = DefinesDatumNotDatumPage() # Test that Datum is routed to Datum. - check(dr('datum', datum1), datum1) + check(dr("datum", datum1), datum1) assert len(datum_calls) == 1 assert len(datum_page_calls) == 0 datum_calls.clear() datum_page_calls.clear() # Test that DatumPage is unpacked and routed to Datum one at a time. - check(dr('datum_page', datum_page), datum_page) + check(dr("datum_page", datum_page), datum_page) assert len(datum_page_calls) == 1 assert len(datum_calls) == 2 datum_calls.clear() @@ -572,19 +682,19 @@ def datum(self, doc): def datum_page(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['datum_kwargs']['index'][0] == int(doc['datum_id'][0][-1]) + assert doc["datum_kwargs"]["index"][0] == int(doc["datum_id"][0][-1]) datum_page_calls.append(object()) return dict(doc) dr = DefinesDatumPageNotDatum() # Test that Datum is packed and routed to DatumPage. - check(dr('datum', datum1), datum1) + check(dr("datum", datum1), datum1) assert len(datum_calls) == 1 assert len(datum_page_calls) == 1 datum_calls.clear() datum_page_calls.clear() # Test that DatumPage is routed to DatumPage. - check(dr('datum_page', datum_page), datum_page) + check(dr("datum_page", datum_page), datum_page) assert len(datum_page_calls) == 1 assert len(datum_calls) == 0 datum_calls.clear() @@ -595,26 +705,26 @@ class DefinesDatumPageAndDatum(event_model.DocumentRouter): def datum(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['datum_kwargs']['index'] == int(doc['datum_id'][-1]) + assert doc["datum_kwargs"]["index"] == int(doc["datum_id"][-1]) datum_calls.append(object()) return dict(doc) def datum_page(self, doc): # Just a dumb test that check something particular to these example # documents. - assert doc['datum_kwargs']['index'][0] == int(doc['datum_id'][0][-1]) + assert doc["datum_kwargs"]["index"][0] == int(doc["datum_id"][0][-1]) datum_page_calls.append(object()) return dict(doc) dr = DefinesDatumPageAndDatum() # Test that Datum is routed to Datum. - check(dr('datum', datum1), datum1) + check(dr("datum", datum1), datum1) assert len(datum_calls) == 1 assert len(datum_page_calls) == 0 datum_calls.clear() datum_page_calls.clear() # Test that DatumPage is routed to DatumPage. - check(dr('datum_page', datum_page), datum_page) + check(dr("datum_page", datum_page), datum_page) assert len(datum_page_calls) == 1 assert len(datum_calls) == 0 datum_calls.clear() @@ -627,42 +737,53 @@ def test_single_run_document_router(): sr.get_start() run_bundle = event_model.compose_run() - sr('start', run_bundle.start_doc) + sr("start", run_bundle.start_doc) assert sr.get_start() == run_bundle.start_doc desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') - sr('descriptor', desc_bundle.descriptor_doc) + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) + sr("descriptor", desc_bundle.descriptor_doc) desc_bundle_baseline = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}}, - name='baseline') - sr('descriptor', desc_bundle_baseline.descriptor_doc) + data_keys={"motor": {"shape": [], "dtype": "number", "source": "..."}}, + name="baseline", + ) + sr("descriptor", desc_bundle_baseline.descriptor_doc) res_bundle = run_bundle.compose_resource( - spec='TIFF', root='/tmp', resource_path='stack.tiff', - resource_kwargs={}) - sr('resource', res_bundle.resource_doc) - datum_doc1 = res_bundle.compose_datum(datum_kwargs={'slice': 5}) - datum_doc2 = res_bundle.compose_datum(datum_kwargs={'slice': 10}) - sr('datum', datum_doc1) - sr('datum', datum_doc2) + spec="TIFF", root="/tmp", resource_path="stack.tiff", resource_kwargs={} + ) + sr("resource", res_bundle.resource_doc) + datum_doc1 = res_bundle.compose_datum(datum_kwargs={"slice": 5}) + datum_doc2 = res_bundle.compose_datum(datum_kwargs={"slice": 10}) + sr("datum", datum_doc1) + sr("datum", datum_doc2) event1 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc1['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=1) - sr('event', event1) + data={"motor": 0, "image": datum_doc1["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=1, + ) + sr("event", event1) event2 = desc_bundle.compose_event( - data={'motor': 0, 'image': datum_doc2['datum_id']}, - timestamps={'motor': 0, 'image': 0}, filled={'image': False}, - seq_num=2) - sr('event', event2) + data={"motor": 0, "image": datum_doc2["datum_id"]}, + timestamps={"motor": 0, "image": 0}, + filled={"image": False}, + seq_num=2, + ) + sr("event", event2) event3 = desc_bundle_baseline.compose_event( - data={'motor': 0}, - timestamps={'motor': 0}, - seq_num=1) - sr('event', event3) + data={"motor": 0}, timestamps={"motor": 0}, seq_num=1 + ) + sr("event", event3) with pytest.raises(event_model.EventModelValueError): sr.get_descriptor(res_bundle.resource_doc) @@ -671,19 +792,19 @@ def test_single_run_document_router(): sr.get_descriptor(datum_doc1) assert sr.get_descriptor(event1) == desc_bundle.descriptor_doc - assert sr.get_stream_name(event1) == desc_bundle.descriptor_doc.get('name') + assert sr.get_stream_name(event1) == desc_bundle.descriptor_doc.get("name") assert sr.get_descriptor(event2) == desc_bundle.descriptor_doc - assert sr.get_stream_name(event2) == desc_bundle.descriptor_doc.get('name') + assert sr.get_stream_name(event2) == desc_bundle.descriptor_doc.get("name") assert sr.get_descriptor(event3) == desc_bundle_baseline.descriptor_doc - assert sr.get_stream_name(event3) == desc_bundle_baseline.descriptor_doc.get('name') + assert sr.get_stream_name(event3) == desc_bundle_baseline.descriptor_doc.get("name") desc_bundle_unused = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}}, - name='unused') + data_keys={"motor": {"shape": [], "dtype": "number", "source": "..."}}, + name="unused", + ) event4 = desc_bundle_unused.compose_event( - data={'motor': 0}, - timestamps={'motor': 0}, - seq_num=1) + data={"motor": 0}, timestamps={"motor": 0}, seq_num=1 + ) with pytest.raises(event_model.EventModelValueError): sr.get_descriptor(event4) @@ -691,42 +812,50 @@ def test_single_run_document_router(): with pytest.raises(event_model.EventModelValueError): sr.get_stream_name(event4) - sr('stop', run_bundle.compose_stop()) + sr("stop", run_bundle.compose_stop()) # tests against a second run run_bundle = event_model.compose_run() with pytest.raises(event_model.EventModelValueError): - sr('start', run_bundle.start_doc) + sr("start", run_bundle.start_doc) desc_bundle = run_bundle.compose_descriptor( - data_keys={'motor': {'shape': [], 'dtype': 'number', 'source': '...'}, - 'image': {'shape': [512, 512], 'dtype': 'number', - 'source': '...', 'external': 'FILESTORE:'}}, - name='primary') + data_keys={ + "motor": {"shape": [], "dtype": "number", "source": "..."}, + "image": { + "shape": [512, 512], + "dtype": "number", + "source": "...", + "external": "FILESTORE:", + }, + }, + name="primary", + ) with pytest.raises(event_model.EventModelValueError): - sr('descriptor', desc_bundle.descriptor_doc) + sr("descriptor", desc_bundle.descriptor_doc) def test_rechunk_event_pages(): - def event_page_gen(page_size, num_pages): """ Generator event_pages for testing. """ - data_keys = ['x', 'y', 'z'] - array_keys = ['seq_num', 'time', 'uid'] + data_keys = ["x", "y", "z"] + array_keys = ["seq_num", "time", "uid"] for _ in range(num_pages): - yield {'descriptor': 'DESCRIPTOR', - **{key: list(range(page_size)) for key in array_keys}, - 'data': {key: list(range(page_size)) for key in data_keys}, - 'timestamps': {key: list(range(page_size)) for key in data_keys}, - 'filled': {key: list(range(page_size)) for key in data_keys}} + yield { + "descriptor": "DESCRIPTOR", + **{key: list(range(page_size)) for key in array_keys}, + "data": {key: list(range(page_size)) for key in data_keys}, + "timestamps": {key: list(range(page_size)) for key in data_keys}, + "filled": {key: list(range(page_size)) for key in data_keys}, + } # Get a list of event pages of size 13. event_pages = list(event_page_gen(13, 31)) # Change the size of the event_pages to size 7. event_pages_7 = list(event_model.rechunk_event_pages(event_pages, 7)) - assert [7] * 57 + [4] == [len(page['uid']) for page in event_pages_7] + assert [7] * 57 + [4] == [len(page["uid"]) for page in event_pages_7] # Change the size back to 13. event_pages_13 = event_model.rechunk_event_pages(event_pages_7, 13) # Check that it is equal to the original list of event_pages. @@ -734,24 +863,24 @@ def event_page_gen(page_size, num_pages): def test_rechunk_datum_pages(): - def datum_page_gen(page_size, num_pages): """ Generator datum_pages for testing. """ - data_keys = ['x', 'y', 'z'] - array_keys = ['datum_id'] + data_keys = ["x", "y", "z"] + array_keys = ["datum_id"] for _ in range(num_pages): - yield {'resource': 'RESOURCE', - **{key: list(range(page_size)) for key in array_keys}, - 'datum_kwargs': {key: list(range(page_size)) - for key in data_keys}} + yield { + "resource": "RESOURCE", + **{key: list(range(page_size)) for key in array_keys}, + "datum_kwargs": {key: list(range(page_size)) for key in data_keys}, + } # Get a list of datum pages of size 13. datum_pages = list(datum_page_gen(13, 31)) # Change the size of the datum_pages to size 7. datum_pages_7 = list(event_model.rechunk_datum_pages(datum_pages, 7)) - assert [7] * 57 + [4] == [len(page['datum_id']) for page in datum_pages_7] + assert [7] * 57 + [4] == [len(page["datum_id"]) for page in datum_pages_7] # Change the size back to 13. datum_pages_13 = event_model.rechunk_datum_pages(datum_pages_7, 13) # Check that it is equal to the original list of datum_pages. @@ -765,17 +894,16 @@ def test_pack_empty_raises(): event_model.pack_datum_page() -@pytest.mark.parametrize('retry_intervals', [(1,), [1], (), [], None]) +@pytest.mark.parametrize("retry_intervals", [(1,), [1], (), [], None]) def test_retry_intervals_input_normalization(retry_intervals): - filler = event_model.Filler({}, retry_intervals=retry_intervals, - inplace=False) + filler = event_model.Filler({}, retry_intervals=retry_intervals, inplace=False) assert isinstance(filler.retry_intervals, list) def test_attempt_with_retires(): mutable = [] expected_args = (1, 2) - expected_kwargs = {'c': 3, 'd': 4} + expected_kwargs = {"c": 3, "d": 4} expected_result = 10 class LocalException1(Exception): @@ -801,7 +929,8 @@ def func(*args, **kwargs): kwargs=expected_kwargs, error_to_catch=LocalException1, error_to_raise=LocalException2, - intervals=[0, 0.01, 0.01]) + intervals=[0, 0.01, 0.01], + ) assert result == expected_result mutable.clear() @@ -814,18 +943,20 @@ def func(*args, **kwargs): kwargs=expected_kwargs, error_to_catch=LocalException1, error_to_raise=LocalException2, - intervals=[0, 0.01]) + intervals=[0, 0.01], + ) def test_round_trip_event_page_with_empty_data(): event_page = { - 'time': [1, 2, 3], - 'seq_num': [1, 2, 3], - 'uid': ['a', 'b', 'c'], - 'descriptor': 'd', - 'data': {}, - 'timestamps': {}, - 'filled': {}} + "time": [1, 2, 3], + "seq_num": [1, 2, 3], + "uid": ["a", "b", "c"], + "descriptor": "d", + "data": {}, + "timestamps": {}, + "filled": {}, + } events = list(event_model.unpack_event_page(event_page)) assert len(events) == 3 @@ -834,10 +965,7 @@ def test_round_trip_event_page_with_empty_data(): def test_round_trip_datum_page_with_empty_data(): - datum_page = { - 'datum_id': ['a', 'b', 'c'], - 'resource': 'd', - 'datum_kwargs': {}} + datum_page = {"datum_id": ["a", "b", "c"], "resource": "d", "datum_kwargs": {}} datums = list(event_model.unpack_datum_page(datum_page)) assert len(datums) == 3 @@ -847,23 +975,23 @@ def test_round_trip_datum_page_with_empty_data(): def test_register_coercion(): # Re-registration should be fine. - assert 'as_is' in event_model._coercion_registry # implementation detail - event_model.register_coercion('as_is', event_model.as_is) + assert "as_is" in event_model._coercion_registry # implementation detail + event_model.register_coercion("as_is", event_model.as_is) # but registering something different to the same name should raise. with pytest.raises(event_model.EventModelValueError): - event_model.register_coercion('as_is', object) + event_model.register_coercion("as_is", object) def test_register_coercion_misspelled(): "The function register_coercion was originally released as register_coersion." # Re-registration should be fine. - assert 'as_is' in event_model._coercion_registry # implementation detail - event_model.register_coersion('as_is', event_model.as_is) + assert "as_is" in event_model._coercion_registry # implementation detail + event_model.register_coersion("as_is", event_model.as_is) # but registering something different to the same name should raise. with pytest.raises(event_model.EventModelValueError): - event_model.register_coersion('as_is', object) + event_model.register_coersion("as_is", object) def test_pickle_filler(): @@ -879,13 +1007,12 @@ def test_array_like(): dask_array = pytest.importorskip("dask.array") bundle = event_model.compose_run() desc_bundle = bundle.compose_descriptor( - data_keys={"a": {"shape": (3,), "dtype": "array", "source": ""}}, - name="primary" + data_keys={"a": {"shape": (3,), "dtype": "array", "source": ""}}, name="primary" ) desc_bundle.compose_event_page( data={"a": dask_array.ones((5, 3))}, timestamps={"a": [1, 2, 3]}, - seq_num=[1, 2, 3] + seq_num=[1, 2, 3], ) From 1aa1bebd4e2ee21e9e56881c5790eef089b2c7a2 Mon Sep 17 00:00:00 2001 From: gbischof Date: Tue, 24 May 2022 17:20:21 -0400 Subject: [PATCH 3/3] run black --- event_model/tests/test_em.py | 1 - 1 file changed, 1 deletion(-) diff --git a/event_model/tests/test_em.py b/event_model/tests/test_em.py index a4e889bb..67ead8da 100644 --- a/event_model/tests/test_em.py +++ b/event_model/tests/test_em.py @@ -80,7 +80,6 @@ def test_reserved_keys(): ) - def test_compose_run(): # Compose each kind of document type. These calls will trigger # jsonschema.validate and ensure that the document-generation code composes