From 4316cd14958f8eae75dcdd8e17ecc1a5f58049be Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 26 Jul 2019 14:58:39 +0100 Subject: [PATCH 01/64] Add redis to docker-compose and config, conftest --- docker-compose.yml | 6 ++++++ mypy.ini | 2 +- requirements.txt | 2 ++ src/allocation/config.py | 6 ++++++ tests/conftest.py | 31 +++++++++++++++++++++++++++++-- 5 files changed, 44 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 039400e9..74ac28b6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,6 +7,7 @@ services: dockerfile: Dockerfile depends_on: - postgres + - redis environment: - DB_HOST=postgres - DB_PASSWORD=abc123 @@ -27,3 +28,8 @@ services: ports: - "54321:5432" + redis: + image: redis:alpine + ports: + - "63791:6379" + diff --git a/mypy.ini b/mypy.ini index 62194f35..601283d7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3,5 +3,5 @@ ignore_missing_imports = False mypy_path = ./src check_untyped_defs = True -[mypy-pytest.*,sqlalchemy.*] +[mypy-pytest.*,sqlalchemy.*,redis.*] ignore_missing_imports = True diff --git a/requirements.txt b/requirements.txt index 8c779254..5ae975d2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,11 @@ sqlalchemy flask psycopg2-binary +redis # tests pytest pytest-icdiff mypy requests +tenacity diff --git a/src/allocation/config.py b/src/allocation/config.py index f3b55cc9..30a8eb07 100644 --- a/src/allocation/config.py +++ b/src/allocation/config.py @@ -13,3 +13,9 @@ def get_api_url(): host = os.environ.get("API_HOST", "localhost") port = 5005 if host == "localhost" else 80 return f"http://{host}:{port}" + + +def get_redis_host_and_port(): + host = os.environ.get("REDIS_HOST", "localhost") + port = 63791 if host == "localhost" else 6379 + return dict(host=host, port=port) diff --git a/tests/conftest.py b/tests/conftest.py index d51c20f6..918a121f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,14 @@ # pylint: disable=redefined-outer-name +import shutil +import subprocess import time from pathlib import Path import pytest +import redis import requests -from requests.exceptions import ConnectionError +from requests.exceptions import RequestException +from redis.exceptions import RedisError from sqlalchemy.exc import OperationalError from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, clear_mappers @@ -48,11 +52,22 @@ def wait_for_webapp_to_come_up(): while time.time() < deadline: try: return requests.get(url) - except ConnectionError: + except RequestException: time.sleep(0.5) pytest.fail("API never came up") +def wait_for_redis_to_come_up(): + deadline = time.time() + 5 + r = redis.Redis(**config.get_redis_host_and_port()) + while time.time() < deadline: + try: + return r.ping() + except RedisError: + time.sleep(0.5) + pytest.fail("Redis never came up") + + @pytest.fixture(scope="session") def postgres_db(): engine = create_engine(config.get_postgres_uri()) @@ -78,3 +93,15 @@ def restart_api(): (Path(__file__).parent / "../src/allocation/entrypoints/flask_app.py").touch() time.sleep(0.5) wait_for_webapp_to_come_up() + + +@pytest.fixture +def restart_redis_pubsub(): + wait_for_redis_to_come_up() + if not shutil.which("docker-compose"): + print("skipping restart, assumes running in container") + return + subprocess.run( + ["docker-compose", "restart", "-t", "0", "redis_pubsub"], + check=True, + ) From 7b99fe3e98ac50f13c49573b7e1672310325701c Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 26 Jul 2019 15:02:06 +0100 Subject: [PATCH 02/64] refactor e2e tests, move random_refs and api_client out --- tests/e2e/api_client.py | 25 ++++++++++++++++++++++++ tests/e2e/test_api.py | 43 +++++++++++++++++------------------------ 2 files changed, 43 insertions(+), 25 deletions(-) create mode 100644 tests/e2e/api_client.py diff --git a/tests/e2e/api_client.py b/tests/e2e/api_client.py new file mode 100644 index 00000000..646ac4f7 --- /dev/null +++ b/tests/e2e/api_client.py @@ -0,0 +1,25 @@ +import requests +from allocation import config + + +def post_to_add_batch(ref, sku, qty, eta): + url = config.get_api_url() + r = requests.post( + f"{url}/add_batch", json={"ref": ref, "sku": sku, "qty": qty, "eta": eta} + ) + assert r.status_code == 201 + + +def post_to_allocate(orderid, sku, qty, expect_success=True): + url = config.get_api_url() + r = requests.post( + f"{url}/allocate", + json={ + "orderid": orderid, + "sku": sku, + "qty": qty, + }, + ) + if expect_success: + assert r.status_code == 201 + return r diff --git a/tests/e2e/test_api.py b/tests/e2e/test_api.py index 79345fce..04883893 100644 --- a/tests/e2e/test_api.py +++ b/tests/e2e/test_api.py @@ -1,16 +1,6 @@ import pytest -import requests - -from allocation import config -from ..random_refs import random_sku, random_batchref, random_orderid - - -def post_to_add_batch(ref, sku, qty, eta): - url = config.get_api_url() - r = requests.post( - f"{url}/add_batch", json={"ref": ref, "sku": sku, "qty": qty, "eta": eta} - ) - assert r.status_code == 201 +from ..random_refs import random_batchref, random_orderid, random_sku +from . import api_client @pytest.mark.usefixtures("postgres_db") @@ -20,24 +10,27 @@ def test_happy_path_returns_201_and_allocated_batch(): earlybatch = random_batchref(1) laterbatch = random_batchref(2) otherbatch = random_batchref(3) - post_to_add_batch(laterbatch, sku, 100, "2011-01-02") - post_to_add_batch(earlybatch, sku, 100, "2011-01-01") - post_to_add_batch(otherbatch, othersku, 100, None) - data = {"orderid": random_orderid(), "sku": sku, "qty": 3} + api_client.post_to_add_batch(laterbatch, sku, 100, "2011-01-02") + api_client.post_to_add_batch(earlybatch, sku, 100, "2011-01-01") + api_client.post_to_add_batch(otherbatch, othersku, 100, None) - url = config.get_api_url() - r = requests.post(f"{url}/allocate", json=data) + response = api_client.post_to_allocate(random_orderid(), sku, qty=3) - assert r.status_code == 201 - assert r.json()["batchref"] == earlybatch + assert response.status_code == 201 + assert response.json()["batchref"] == earlybatch @pytest.mark.usefixtures("postgres_db") @pytest.mark.usefixtures("restart_api") def test_unhappy_path_returns_400_and_error_message(): unknown_sku, orderid = random_sku(), random_orderid() - data = {"orderid": orderid, "sku": unknown_sku, "qty": 20} - url = config.get_api_url() - r = requests.post(f"{url}/allocate", json=data) - assert r.status_code == 400 - assert r.json()["message"] == f"Invalid sku {unknown_sku}" + + response = api_client.post_to_allocate( + orderid, + unknown_sku, + qty=20, + expect_success=False, + ) + + assert response.status_code == 400 + assert response.json()["message"] == f"Invalid sku {unknown_sku}" From 7d7bc950424a8811ada61e58ff6aaaf0ffca330d Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 26 Jul 2019 15:02:41 +0100 Subject: [PATCH 03/64] redis client for tests [redis_client_for_tests] --- tests/e2e/redis_client.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 tests/e2e/redis_client.py diff --git a/tests/e2e/redis_client.py b/tests/e2e/redis_client.py new file mode 100644 index 00000000..3392d026 --- /dev/null +++ b/tests/e2e/redis_client.py @@ -0,0 +1,18 @@ +import json +import redis + +from allocation import config + +r = redis.Redis(**config.get_redis_host_and_port()) + + +def subscribe_to(channel): + pubsub = r.pubsub() + pubsub.subscribe(channel) + confirmation = pubsub.get_message(timeout=3) + assert confirmation["type"] == "subscribe" + return pubsub + + +def publish_message(channel, message): + r.publish(channel, json.dumps(message)) From e2a23e2211b6ecd3f6ee57140f4dc34970cfbf74 Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 26 Jul 2019 15:07:14 +0100 Subject: [PATCH 04/64] Test for our external events [redis_e2e_test] --- requirements.txt | 3 ++- tests/e2e/test_external_events.py | 38 +++++++++++++++++++++++++++++++ tests/pytest.ini | 2 ++ 3 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 tests/e2e/test_external_events.py diff --git a/requirements.txt b/requirements.txt index 5ae975d2..882cb352 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,9 +4,10 @@ flask psycopg2-binary redis -# tests +# dev/tests pytest pytest-icdiff mypy +pylint requests tenacity diff --git a/tests/e2e/test_external_events.py b/tests/e2e/test_external_events.py new file mode 100644 index 00000000..49dbd4b9 --- /dev/null +++ b/tests/e2e/test_external_events.py @@ -0,0 +1,38 @@ +import json +import pytest +from tenacity import Retrying, RetryError, stop_after_delay +from . import api_client, redis_client +from ..random_refs import random_batchref, random_orderid, random_sku + + +@pytest.mark.usefixtures("postgres_db") +@pytest.mark.usefixtures("restart_api") +@pytest.mark.usefixtures("restart_redis_pubsub") +def test_change_batch_quantity_leading_to_reallocation(): + # start with two batches and an order allocated to one of them + orderid, sku = random_orderid(), random_sku() + earlier_batch, later_batch = random_batchref("old"), random_batchref("newer") + api_client.post_to_add_batch(earlier_batch, sku, qty=10, eta="2011-01-01") + api_client.post_to_add_batch(later_batch, sku, qty=10, eta="2011-01-02") + response = api_client.post_to_allocate(orderid, sku, 10) + assert response.json()["batchref"] == earlier_batch + + subscription = redis_client.subscribe_to("line_allocated") + + # change quantity on allocated batch so it's less than our order + redis_client.publish_message( + "change_batch_quantity", + {"batchref": earlier_batch, "qty": 5}, + ) + + # wait until we see a message saying the order has been reallocated + messages = [] + for attempt in Retrying(stop=stop_after_delay(3), reraise=True): + with attempt: + message = subscription.get_message(timeout=1) + if message: + messages.append(message) + print(messages) + data = json.loads(messages[-1]["data"]) + assert data["orderid"] == orderid + assert data["batchref"] == later_batch diff --git a/tests/pytest.ini b/tests/pytest.ini index bbd083ac..3fd8685e 100644 --- a/tests/pytest.ini +++ b/tests/pytest.ini @@ -1,2 +1,4 @@ [pytest] addopts = --tb=short +filterwarnings = + ignore::DeprecationWarning From d1e2c86fc3a668ce3c58ec52f55f2e6a54201806 Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 15 Nov 2019 14:46:04 +0000 Subject: [PATCH 05/64] use tenacity in conftest --- tests/conftest.py | 32 +++++++------------------------- 1 file changed, 7 insertions(+), 25 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 918a121f..dc695f4d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,11 +7,9 @@ import pytest import redis import requests -from requests.exceptions import RequestException -from redis.exceptions import RedisError -from sqlalchemy.exc import OperationalError from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, clear_mappers +from tenacity import retry, stop_after_delay from allocation.adapters.orm import metadata, start_mappers from allocation import config @@ -36,36 +34,20 @@ def session(session_factory): return session_factory() +@retry(stop=stop_after_delay(10)) def wait_for_postgres_to_come_up(engine): - deadline = time.time() + 10 - while time.time() < deadline: - try: - return engine.connect() - except OperationalError: - time.sleep(0.5) - pytest.fail("Postgres never came up") + return engine.connect() +@retry(stop=stop_after_delay(10)) def wait_for_webapp_to_come_up(): - deadline = time.time() + 10 - url = config.get_api_url() - while time.time() < deadline: - try: - return requests.get(url) - except RequestException: - time.sleep(0.5) - pytest.fail("API never came up") + return requests.get(config.get_api_url()) +@retry(stop=stop_after_delay(10)) def wait_for_redis_to_come_up(): - deadline = time.time() + 5 r = redis.Redis(**config.get_redis_host_and_port()) - while time.time() < deadline: - try: - return r.ping() - except RedisError: - time.sleep(0.5) - pytest.fail("Redis never came up") + return r.ping() @pytest.fixture(scope="session") From dfe47cfdb282b299dc878387fed485bf27366cd1 Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 26 Jul 2019 15:07:35 +0100 Subject: [PATCH 06/64] Docker infrastructure for new redis event listener container --- Dockerfile | 2 -- Makefile | 12 ++++++------ docker-compose.yml | 32 +++++++++++++++++++++++++++++--- 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/Dockerfile b/Dockerfile index 73024d18..1a3c9765 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,5 +11,3 @@ RUN pip install -e /src COPY tests/ /tests/ WORKDIR /src -ENV FLASK_APP=allocation/entrypoints/flask_app.py FLASK_DEBUG=1 PYTHONUNBUFFERED=1 -CMD flask run --host=0.0.0.0 --port=80 diff --git a/Makefile b/Makefile index 6409e955..fb56493b 100644 --- a/Makefile +++ b/Makefile @@ -8,25 +8,25 @@ build: docker-compose build up: - docker-compose up -d app + docker-compose up -d down: docker-compose down --remove-orphans test: up - docker-compose run --rm --no-deps --entrypoint=pytest app /tests/unit /tests/integration /tests/e2e + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/unit /tests/integration /tests/e2e unit-tests: - docker-compose run --rm --no-deps --entrypoint=pytest app /tests/unit + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/unit integration-tests: up - docker-compose run --rm --no-deps --entrypoint=pytest app /tests/integration + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/integration e2e-tests: up - docker-compose run --rm --no-deps --entrypoint=pytest app /tests/e2e + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/e2e logs: - docker-compose logs app | tail -100 + docker-compose logs --tail=25 api redis_pubsub black: black -l 86 $$(find * -name '*.py') diff --git a/docker-compose.yml b/docker-compose.yml index 74ac28b6..dc2cc369 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,25 +1,51 @@ version: "3" + services: - app: + redis_pubsub: build: context: . dockerfile: Dockerfile + image: allocation-image depends_on: - postgres - redis environment: - DB_HOST=postgres - DB_PASSWORD=abc123 - - API_HOST=app + - REDIS_HOST=redis - PYTHONDONTWRITEBYTECODE=1 volumes: - ./src:/src - ./tests:/tests + entrypoint: + - python + - /src/allocation/entrypoints/redis_eventconsumer.py + + api: + image: allocation-image + depends_on: + - redis_pubsub + environment: + - DB_HOST=postgres + - DB_PASSWORD=abc123 + - API_HOST=api + - REDIS_HOST=redis + - PYTHONDONTWRITEBYTECODE=1 + - FLASK_APP=allocation/entrypoints/flask_app.py + - FLASK_DEBUG=1 + - PYTHONUNBUFFERED=1 + volumes: + - ./src:/src + - ./tests:/tests + entrypoint: + - flask + - run + - --host=0.0.0.0 + - --port=80 ports: - "5005:80" - postgres: image: postgres:9.6 environment: From 26288c735fb71abc6e70bb1cab10fb78b1facd41 Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 14 Aug 2019 15:02:48 +0100 Subject: [PATCH 07/64] add Allocated event [allocated_event] --- src/allocation/domain/events.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/allocation/domain/events.py b/src/allocation/domain/events.py index ba90ea05..e2428f50 100644 --- a/src/allocation/domain/events.py +++ b/src/allocation/domain/events.py @@ -6,6 +6,14 @@ class Event: pass +@dataclass +class Allocated(Event): + orderid: str + sku: str + qty: int + batchref: str + + @dataclass class OutOfStock(Event): sku: str From 7023466711bcd94b222ad927905f2e9523fcf1b6 Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 3 Jan 2020 19:11:39 +0000 Subject: [PATCH 08/64] redis eventconsumer first cut [redis_eventconsumer_first_cut] --- .../entrypoints/redis_eventconsumer.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 src/allocation/entrypoints/redis_eventconsumer.py diff --git a/src/allocation/entrypoints/redis_eventconsumer.py b/src/allocation/entrypoints/redis_eventconsumer.py new file mode 100644 index 00000000..e04a8142 --- /dev/null +++ b/src/allocation/entrypoints/redis_eventconsumer.py @@ -0,0 +1,32 @@ +import json +import logging +import redis + +from allocation import config +from allocation.domain import commands +from allocation.adapters import orm +from allocation.service_layer import messagebus, unit_of_work + +logger = logging.getLogger(__name__) + +r = redis.Redis(**config.get_redis_host_and_port()) + + +def main(): + orm.start_mappers() + pubsub = r.pubsub(ignore_subscribe_messages=True) + pubsub.subscribe("change_batch_quantity") + + for m in pubsub.listen(): + handle_change_batch_quantity(m) + + +def handle_change_batch_quantity(m): + logging.debug("handling %s", m) + data = json.loads(m["data"]) + cmd = commands.ChangeBatchQuantity(ref=data["batchref"], qty=data["qty"]) + messagebus.handle(cmd, uow=unit_of_work.SqlAlchemyUnitOfWork()) + + +if __name__ == "__main__": + main() From 872722b525add3ff050d8b74ec689f0c7aec4fbd Mon Sep 17 00:00:00 2001 From: Harry Date: Fri, 3 Jan 2020 19:12:02 +0000 Subject: [PATCH 09/64] redis eventpublisher first cut [redis_eventpublisher_first_cut] --- src/allocation/adapters/redis_eventpublisher.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 src/allocation/adapters/redis_eventpublisher.py diff --git a/src/allocation/adapters/redis_eventpublisher.py b/src/allocation/adapters/redis_eventpublisher.py new file mode 100644 index 00000000..6100956f --- /dev/null +++ b/src/allocation/adapters/redis_eventpublisher.py @@ -0,0 +1,16 @@ +import json +import logging +from dataclasses import asdict +import redis + +from allocation import config +from allocation.domain import events + +logger = logging.getLogger(__name__) + +r = redis.Redis(**config.get_redis_host_and_port()) + + +def publish(channel, event: events.Event): + logging.debug("publishing: channel=%s, event=%s", channel, event) + r.publish(channel, json.dumps(asdict(event))) From 95b4d78a2776ff3143528dd4b143f264edf31db5 Mon Sep 17 00:00:00 2001 From: Harry Date: Thu, 18 Jul 2019 22:29:20 +0100 Subject: [PATCH 10/64] sneak in a redis patch so unit test dont need redis --- tests/unit/test_handlers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py index d73726fe..c62a55dc 100644 --- a/tests/unit/test_handlers.py +++ b/tests/unit/test_handlers.py @@ -53,6 +53,12 @@ def test_for_existing_product(self): assert "b2" in [b.reference for b in uow.products.get("GARISH-RUG").batches] +@pytest.fixture(autouse=True) +def fake_redis_publish(): + with mock.patch("allocation.adapters.redis_eventpublisher.publish"): + yield + + class TestAllocate: def test_allocates(self): uow = FakeUnitOfWork() From 904af4873945e43d826371e8c7f26e072fa28856 Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 14 Aug 2019 15:20:18 +0100 Subject: [PATCH 11/64] test and Product change to emit event [model_emits_allocated_event] --- src/allocation/domain/model.py | 8 ++++++++ tests/unit/test_product.py | 11 +++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/allocation/domain/model.py b/src/allocation/domain/model.py index 9d74742c..dcdd639a 100644 --- a/src/allocation/domain/model.py +++ b/src/allocation/domain/model.py @@ -17,6 +17,14 @@ def allocate(self, line: OrderLine) -> str: batch = next(b for b in sorted(self.batches) if b.can_allocate(line)) batch.allocate(line) self.version_number += 1 + self.events.append( + events.Allocated( + orderid=line.orderid, + sku=line.sku, + qty=line.qty, + batchref=batch.reference, + ) + ) return batch.reference except StopIteration: self.events.append(events.OutOfStock(line.sku)) diff --git a/tests/unit/test_product.py b/tests/unit/test_product.py index 1a1482b6..fc2b7015 100644 --- a/tests/unit/test_product.py +++ b/tests/unit/test_product.py @@ -43,6 +43,17 @@ def test_returns_allocated_batch_ref(): assert allocation == in_stock_batch.reference +def test_outputs_allocated_event(): + batch = Batch("batchref", "RETRO-LAMPSHADE", 100, eta=None) + line = OrderLine("oref", "RETRO-LAMPSHADE", 10) + product = Product(sku="RETRO-LAMPSHADE", batches=[batch]) + product.allocate(line) + expected = events.Allocated( + orderid="oref", sku="RETRO-LAMPSHADE", qty=10, batchref=batch.reference + ) + assert product.events[-1] == expected + + def test_records_out_of_stock_event_if_cannot_allocate(): batch = Batch("batch1", "SMALL-FORK", 10, eta=today) product = Product(sku="SMALL-FORK", batches=[batch]) From 867c070339ab1422deeae349db763267a4f12efa Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 14 Aug 2019 15:22:23 +0100 Subject: [PATCH 12/64] add handler for Allocated [chapter_11_external_events_ends] --- src/allocation/service_layer/handlers.py | 12 +++++++++++- src/allocation/service_layer/messagebus.py | 1 + 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 8c7b23d5..2d6657f0 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -1,6 +1,6 @@ from __future__ import annotations from typing import TYPE_CHECKING -from allocation.adapters import email +from allocation.adapters import email, redis_eventpublisher from allocation.domain import commands, events, model from allocation.domain.model import OrderLine @@ -49,6 +49,9 @@ def change_batch_quantity( uow.commit() +# pylint: disable=unused-argument + + def send_out_of_stock_notification( event: events.OutOfStock, uow: unit_of_work.AbstractUnitOfWork, @@ -57,3 +60,10 @@ def send_out_of_stock_notification( "stock@made.com", f"Out of stock for {event.sku}", ) + + +def publish_allocated_event( + event: events.Allocated, + uow: unit_of_work.AbstractUnitOfWork, +): + redis_eventpublisher.publish("line_allocated", event) diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index 8bac77a2..fa6d0a71 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -63,6 +63,7 @@ def handle_command( EVENT_HANDLERS = { + events.Allocated: [handlers.publish_allocated_event], events.OutOfStock: [handlers.send_out_of_stock_notification], } # type: Dict[Type[events.Event], List[Callable]] From f6833ffe58b3fd71b1c243aa687722efa5c8fd49 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 21:22:46 +0100 Subject: [PATCH 13/64] modify api tests to try and do a get after a post [get_after_post] --- tests/conftest.py | 2 ++ tests/e2e/api_client.py | 7 ++++++- tests/e2e/test_api.py | 27 +++++++++++++++------------ 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index dc695f4d..b9d18175 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,6 +14,8 @@ from allocation.adapters.orm import metadata, start_mappers from allocation import config +pytest.register_assert_rewrite("tests.e2e.api_client") + @pytest.fixture def in_memory_db(): diff --git a/tests/e2e/api_client.py b/tests/e2e/api_client.py index 646ac4f7..9ce00e28 100644 --- a/tests/e2e/api_client.py +++ b/tests/e2e/api_client.py @@ -21,5 +21,10 @@ def post_to_allocate(orderid, sku, qty, expect_success=True): }, ) if expect_success: - assert r.status_code == 201 + assert r.status_code == 202 return r + + +def get_allocation(orderid): + url = config.get_api_url() + return requests.get(f"{url}/allocations/{orderid}") diff --git a/tests/e2e/test_api.py b/tests/e2e/test_api.py index 04883893..13d86f6f 100644 --- a/tests/e2e/test_api.py +++ b/tests/e2e/test_api.py @@ -5,7 +5,8 @@ @pytest.mark.usefixtures("postgres_db") @pytest.mark.usefixtures("restart_api") -def test_happy_path_returns_201_and_allocated_batch(): +def test_happy_path_returns_202_and_batch_is_allocated(): + orderid = random_orderid() sku, othersku = random_sku(), random_sku("other") earlybatch = random_batchref(1) laterbatch = random_batchref(2) @@ -14,23 +15,25 @@ def test_happy_path_returns_201_and_allocated_batch(): api_client.post_to_add_batch(earlybatch, sku, 100, "2011-01-01") api_client.post_to_add_batch(otherbatch, othersku, 100, None) - response = api_client.post_to_allocate(random_orderid(), sku, qty=3) + r = api_client.post_to_allocate(orderid, sku, qty=3) + assert r.status_code == 202 - assert response.status_code == 201 - assert response.json()["batchref"] == earlybatch + r = api_client.get_allocation(orderid) + assert r.ok + assert r.json() == [ + {"sku": sku, "batchref": earlybatch}, + ] @pytest.mark.usefixtures("postgres_db") @pytest.mark.usefixtures("restart_api") def test_unhappy_path_returns_400_and_error_message(): unknown_sku, orderid = random_sku(), random_orderid() - - response = api_client.post_to_allocate( - orderid, - unknown_sku, - qty=20, - expect_success=False, + r = api_client.post_to_allocate( + orderid, unknown_sku, qty=20, expect_success=False ) + assert r.status_code == 400 + assert r.json()["message"] == f"Invalid sku {unknown_sku}" - assert response.status_code == 400 - assert response.json()["message"] == f"Invalid sku {unknown_sku}" + r = api_client.get_allocation(orderid) + assert r.status_code == 404 From 344cf81c110cb66f09cef5efe56fed3c37e5d6c4 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 21:23:22 +0100 Subject: [PATCH 14/64] modify allocate handler to no longer return anything --- src/allocation/service_layer/handlers.py | 5 ++--- tests/unit/test_handlers.py | 5 +---- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 2d6657f0..15e7f08f 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -28,15 +28,14 @@ def add_batch( def allocate( cmd: commands.Allocate, uow: unit_of_work.AbstractUnitOfWork, -) -> str: +): line = OrderLine(cmd.orderid, cmd.sku, cmd.qty) with uow: product = uow.products.get(sku=line.sku) if product is None: raise InvalidSku(f"Invalid sku {line.sku}") - batchref = product.allocate(line) + product.allocate(line) uow.commit() - return batchref def change_batch_quantity( diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py index c62a55dc..04fb9630 100644 --- a/tests/unit/test_handlers.py +++ b/tests/unit/test_handlers.py @@ -65,10 +65,7 @@ def test_allocates(self): messagebus.handle( commands.CreateBatch("batch1", "COMPLICATED-LAMP", 100, None), uow ) - results = messagebus.handle( - commands.Allocate("o1", "COMPLICATED-LAMP", 10), uow - ) - assert results.pop(0) == "batch1" + messagebus.handle(commands.Allocate("o1", "COMPLICATED-LAMP", 10), uow) [batch] = uow.products.get("COMPLICATED-LAMP").batches assert batch.available_quantity == 90 From d8bfbc6e2f9fbcf8beef5e7622b693d29d761feb Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 21:50:01 +0100 Subject: [PATCH 15/64] modify flask to add new view endpoint and return 202s --- src/allocation/entrypoints/flask_app.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/allocation/entrypoints/flask_app.py b/src/allocation/entrypoints/flask_app.py index 4d8e3204..1070e173 100644 --- a/src/allocation/entrypoints/flask_app.py +++ b/src/allocation/entrypoints/flask_app.py @@ -1,10 +1,11 @@ from datetime import datetime -from flask import Flask, request +from flask import Flask, jsonify, request from allocation.domain import commands from allocation.adapters import orm from allocation.service_layer import messagebus, unit_of_work from allocation.service_layer.handlers import InvalidSku +from allocation import views app = Flask(__name__) orm.start_mappers() @@ -35,4 +36,13 @@ def allocate_endpoint(): except InvalidSku as e: return {"message": str(e)}, 400 - return {"batchref": batchref}, 201 + return "OK", 202 + + +@app.route("/allocations/", methods=["GET"]) +def allocations_view_endpoint(orderid): + uow = unit_of_work.SqlAlchemyUnitOfWork() + result = views.allocations(orderid, uow) + if not result: + return "not found", 404 + return jsonify(result), 200 From 057dc7a13f05f10c917bf239bb42ea12dddee58a Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 12 Jun 2019 21:57:35 +0100 Subject: [PATCH 16/64] session_factory -> sqlite_session_factory (needs backport) --- tests/conftest.py | 6 +++--- tests/integration/test_repository.py | 4 ++-- tests/integration/test_uow.py | 18 +++++++++--------- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index b9d18175..3b2a8066 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,15 +25,15 @@ def in_memory_db(): @pytest.fixture -def session_factory(in_memory_db): +def sqlite_session_factory(in_memory_db): start_mappers() yield sessionmaker(bind=in_memory_db) clear_mappers() @pytest.fixture -def session(session_factory): - return session_factory() +def sqlite_session(sqlite_session_factory): + return sqlite_session_factory() @retry(stop=stop_after_delay(10)) diff --git a/tests/integration/test_repository.py b/tests/integration/test_repository.py index a73bcd51..9b637f1a 100644 --- a/tests/integration/test_repository.py +++ b/tests/integration/test_repository.py @@ -2,8 +2,8 @@ from allocation.domain import model -def test_get_by_batchref(session): - repo = repository.SqlAlchemyRepository(session) +def test_get_by_batchref(sqlite_session): + repo = repository.SqlAlchemyRepository(sqlite_session) b1 = model.Batch(ref="b1", sku="sku1", qty=100, eta=None) b2 = model.Batch(ref="b2", sku="sku1", qty=100, eta=None) b3 = model.Batch(ref="b3", sku="sku2", qty=100, eta=None) diff --git a/tests/integration/test_uow.py b/tests/integration/test_uow.py index a95907cf..55c4f24d 100644 --- a/tests/integration/test_uow.py +++ b/tests/integration/test_uow.py @@ -34,12 +34,12 @@ def get_allocated_batch_ref(session, orderid, sku): return batchref -def test_uow_can_retrieve_a_batch_and_allocate_to_it(session_factory): - session = session_factory() +def test_uow_can_retrieve_a_batch_and_allocate_to_it(sqlite_session_factory): + session = sqlite_session_factory() insert_batch(session, "batch1", "HIPSTER-WORKBENCH", 100, None) session.commit() - uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) with uow: product = uow.products.get(sku="HIPSTER-WORKBENCH") line = model.OrderLine("o1", "HIPSTER-WORKBENCH", 10) @@ -50,27 +50,27 @@ def test_uow_can_retrieve_a_batch_and_allocate_to_it(session_factory): assert batchref == "batch1" -def test_rolls_back_uncommitted_work_by_default(session_factory): - uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) +def test_rolls_back_uncommitted_work_by_default(sqlite_session_factory): + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) with uow: insert_batch(uow.session, "batch1", "MEDIUM-PLINTH", 100, None) - new_session = session_factory() + new_session = sqlite_session_factory() rows = list(new_session.execute('SELECT * FROM "batches"')) assert rows == [] -def test_rolls_back_on_error(session_factory): +def test_rolls_back_on_error(sqlite_session_factory): class MyException(Exception): pass - uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) with pytest.raises(MyException): with uow: insert_batch(uow.session, "batch1", "LARGE-FORK", 100, None) raise MyException() - new_session = session_factory() + new_session = sqlite_session_factory() rows = list(new_session.execute('SELECT * FROM "batches"')) assert rows == [] From 1328c9ce7bacd912d840db7083ca38ca5ba74d92 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 21:50:10 +0100 Subject: [PATCH 17/64] integration test for our view --- tests/integration/test_views.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 tests/integration/test_views.py diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py new file mode 100644 index 00000000..99db03b0 --- /dev/null +++ b/tests/integration/test_views.py @@ -0,0 +1,23 @@ +from datetime import date +from allocation import views +from allocation.domain import commands +from allocation.service_layer import messagebus, unit_of_work + +today = date.today() + + +def test_allocations_view(sqlite_session_factory): + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) + messagebus.handle(commands.CreateBatch("sku1batch", "sku1", 50, None), uow) + messagebus.handle(commands.CreateBatch("sku2batch", "sku2", 50, today), uow) + messagebus.handle(commands.Allocate("order1", "sku1", 20), uow) + messagebus.handle(commands.Allocate("order1", "sku2", 20), uow) + # add a spurious batch and order to make sure we're getting the right ones + messagebus.handle(commands.CreateBatch("sku1batch-later", "sku1", 50, today), uow) + messagebus.handle(commands.Allocate("otherorder", "sku1", 30), uow) + messagebus.handle(commands.Allocate("otherorder", "sku2", 10), uow) + + assert views.allocations("order1", uow) == [ + {"sku": "sku1", "batchref": "sku1batch"}, + {"sku": "sku2", "batchref": "sku2batch"}, + ] From e535570492c8cca1202c6e8d296f9dc8b2dae2db Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 21:50:25 +0100 Subject: [PATCH 18/64] first cut of a view with raw sql [views_dot_py] --- src/allocation/views.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 src/allocation/views.py diff --git a/src/allocation/views.py b/src/allocation/views.py new file mode 100644 index 00000000..a564b8d0 --- /dev/null +++ b/src/allocation/views.py @@ -0,0 +1,16 @@ +from allocation.service_layer import unit_of_work + + +def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): + with uow: + results = uow.session.execute( + """ + SELECT ol.sku, b.reference + FROM allocations AS a + JOIN batches AS b ON a.batch_id = b.id + JOIN order_lines AS ol ON a.orderline_id = ol.id + WHERE ol.orderid = :orderid + """, + dict(orderid=orderid), + ) + return [{"sku": sku, "batchref": batchref} for sku, batchref in results] From da050780abeea7d3527ead8105d177ed352a2290 Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 6 Nov 2019 13:26:44 +0000 Subject: [PATCH 19/64] use repository and go via Product [view_using_repo] --- src/allocation/views.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/src/allocation/views.py b/src/allocation/views.py index a564b8d0..50dc92e3 100644 --- a/src/allocation/views.py +++ b/src/allocation/views.py @@ -3,14 +3,10 @@ def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): with uow: - results = uow.session.execute( - """ - SELECT ol.sku, b.reference - FROM allocations AS a - JOIN batches AS b ON a.batch_id = b.id - JOIN order_lines AS ol ON a.orderline_id = ol.id - WHERE ol.orderid = :orderid - """, - dict(orderid=orderid), - ) - return [{"sku": sku, "batchref": batchref} for sku, batchref in results] + products = uow.products.for_order(orderid=orderid) + batches = [b for p in products for b in p.batches] + return [ + {"sku": b.sku, "batchref": b.reference} + for b in batches + if orderid in b.orderids + ] From a00af9ab027b08c2d02c33709a1a5ce952f28c08 Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 6 Nov 2019 13:27:20 +0000 Subject: [PATCH 20/64] arguably-unnecessary helper property on model. [orderids_property] --- src/allocation/domain/model.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/allocation/domain/model.py b/src/allocation/domain/model.py index dcdd639a..e5aa4c37 100644 --- a/src/allocation/domain/model.py +++ b/src/allocation/domain/model.py @@ -71,6 +71,10 @@ def __gt__(self, other): return True return self.eta > other.eta + @property + def orderids(self): + return {l.orderid for l in self._allocations} + def allocate(self, line: OrderLine): if self.can_allocate(line): self._allocations.add(line) From c57968370ad0decad20a7779843e107e6c3bb3a2 Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 6 Nov 2019 13:27:47 +0000 Subject: [PATCH 21/64] finder method on repo [for_order_method] --- src/allocation/adapters/repository.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/allocation/adapters/repository.py b/src/allocation/adapters/repository.py index f8821758..b02a00a4 100644 --- a/src/allocation/adapters/repository.py +++ b/src/allocation/adapters/repository.py @@ -52,6 +52,17 @@ def _get_by_batchref(self, batchref): return ( self.session.query(model.Product) .join(model.Batch) - .filter(orm.batches.c.reference == batchref) + .filter( + orm.batches.c.reference == batchref, + ) .first() ) + + def for_order(self, orderid): + order_lines = self.session.query(model.OrderLine).filter_by(orderid=orderid) + skus = {l.sku for l in order_lines} + return ( + self.session.query(model.Product) + .join(model.Batch) + .filter(model.Batch.sku.in_(skus)) + ) From d0593d3964597394cbae2a62ca38e4a2130c65b1 Mon Sep 17 00:00:00 2001 From: Harry Date: Wed, 6 Nov 2019 13:29:39 +0000 Subject: [PATCH 22/64] Use the ORM instead [view_using_orm] --- src/allocation/adapters/repository.py | 9 --------- src/allocation/domain/model.py | 4 ---- src/allocation/views.py | 14 +++++++------- 3 files changed, 7 insertions(+), 20 deletions(-) diff --git a/src/allocation/adapters/repository.py b/src/allocation/adapters/repository.py index b02a00a4..15a6b0ae 100644 --- a/src/allocation/adapters/repository.py +++ b/src/allocation/adapters/repository.py @@ -57,12 +57,3 @@ def _get_by_batchref(self, batchref): ) .first() ) - - def for_order(self, orderid): - order_lines = self.session.query(model.OrderLine).filter_by(orderid=orderid) - skus = {l.sku for l in order_lines} - return ( - self.session.query(model.Product) - .join(model.Batch) - .filter(model.Batch.sku.in_(skus)) - ) diff --git a/src/allocation/domain/model.py b/src/allocation/domain/model.py index e5aa4c37..dcdd639a 100644 --- a/src/allocation/domain/model.py +++ b/src/allocation/domain/model.py @@ -71,10 +71,6 @@ def __gt__(self, other): return True return self.eta > other.eta - @property - def orderids(self): - return {l.orderid for l in self._allocations} - def allocate(self, line: OrderLine): if self.can_allocate(line): self._allocations.add(line) diff --git a/src/allocation/views.py b/src/allocation/views.py index 50dc92e3..80fc9aa6 100644 --- a/src/allocation/views.py +++ b/src/allocation/views.py @@ -1,12 +1,12 @@ +from allocation.domain import model from allocation.service_layer import unit_of_work def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): with uow: - products = uow.products.for_order(orderid=orderid) - batches = [b for p in products for b in p.batches] - return [ - {"sku": b.sku, "batchref": b.reference} - for b in batches - if orderid in b.orderids - ] + batches = ( + uow.session.query(model.Batch) + .join(model.OrderLine, model.Batch._allocations) + .filter(model.OrderLine.orderid == orderid) + ) + return [{"sku": b.sku, "batchref": b.reference} for b in batches] From 0c16a31a959835c09f8677b11cbf0068a8fa79cc Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 22:33:33 +0100 Subject: [PATCH 23/64] Simpler view based on a new read model table --- src/allocation/views.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/allocation/views.py b/src/allocation/views.py index 80fc9aa6..ca56ceed 100644 --- a/src/allocation/views.py +++ b/src/allocation/views.py @@ -1,12 +1,12 @@ -from allocation.domain import model from allocation.service_layer import unit_of_work def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): with uow: - batches = ( - uow.session.query(model.Batch) - .join(model.OrderLine, model.Batch._allocations) - .filter(model.OrderLine.orderid == orderid) + results = list( + uow.session.execute( + "SELECT sku, batchref FROM allocations_view WHERE orderid = :orderid", + dict(orderid=orderid), + ) ) - return [{"sku": b.sku, "batchref": b.reference} for b in batches] + return [dict(r) for r in results] From 1fca005271727548e533812f1b97c3af1c4699e7 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 22:33:43 +0100 Subject: [PATCH 24/64] new table in orm --- src/allocation/adapters/orm.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/allocation/adapters/orm.py b/src/allocation/adapters/orm.py index ea76eed5..a87068a5 100644 --- a/src/allocation/adapters/orm.py +++ b/src/allocation/adapters/orm.py @@ -49,6 +49,14 @@ Column("batch_id", ForeignKey("batches.id")), ) +allocations_view = Table( + "allocations_view", + metadata, + Column("orderid", String(255)), + Column("sku", String(255)), + Column("batchref", String(255)), +) + def start_mappers(): lines_mapper = mapper(model.OrderLine, order_lines) From 0dd61333a433a9084b29859af15be9935359b644 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 22:33:58 +0100 Subject: [PATCH 25/64] handler for view model update --- src/allocation/service_layer/handlers.py | 15 +++++++++++++++ src/allocation/views.py | 10 +++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 15e7f08f..34df09be 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -66,3 +66,18 @@ def publish_allocated_event( uow: unit_of_work.AbstractUnitOfWork, ): redis_eventpublisher.publish("line_allocated", event) + + +def add_allocation_to_read_model( + event: events.Allocated, + uow: unit_of_work.SqlAlchemyUnitOfWork, +): + with uow: + uow.session.execute( + """ + INSERT INTO allocations_view (orderid, sku, batchref) + VALUES (:orderid, :sku, :batchref) + """, + dict(orderid=event.orderid, sku=event.sku, batchref=event.batchref), + ) + uow.commit() diff --git a/src/allocation/views.py b/src/allocation/views.py index ca56ceed..a952887f 100644 --- a/src/allocation/views.py +++ b/src/allocation/views.py @@ -3,10 +3,10 @@ def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): with uow: - results = list( - uow.session.execute( - "SELECT sku, batchref FROM allocations_view WHERE orderid = :orderid", - dict(orderid=orderid), - ) + results = uow.session.execute( + """ + SELECT sku, batchref FROM allocations_view WHERE orderid = :orderid + """, + dict(orderid=orderid), ) return [dict(r) for r in results] From 3f399bc70713bcbda5c2e96fdfe7a1779cdce5f7 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 22:34:34 +0100 Subject: [PATCH 26/64] add handler for allocated --- src/allocation/service_layer/messagebus.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index fa6d0a71..fef5a91d 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -63,7 +63,10 @@ def handle_command( EVENT_HANDLERS = { - events.Allocated: [handlers.publish_allocated_event], + events.Allocated: [ + handlers.publish_allocated_event, + handlers.add_allocation_to_read_model, + ], events.OutOfStock: [handlers.send_out_of_stock_notification], } # type: Dict[Type[events.Event], List[Callable]] From 1898b2c2bdd26a43eec36f5770a7e79e1238409c Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 13 Aug 2019 06:21:27 +0100 Subject: [PATCH 27/64] handle_command no longer returns --- src/allocation/entrypoints/flask_app.py | 3 +-- src/allocation/service_layer/messagebus.py | 13 +++---------- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/src/allocation/entrypoints/flask_app.py b/src/allocation/entrypoints/flask_app.py index 1070e173..ed482390 100644 --- a/src/allocation/entrypoints/flask_app.py +++ b/src/allocation/entrypoints/flask_app.py @@ -31,8 +31,7 @@ def allocate_endpoint(): request.json["orderid"], request.json["sku"], request.json["qty"] ) uow = unit_of_work.SqlAlchemyUnitOfWork() - results = messagebus.handle(cmd, uow) - batchref = results.pop(0) + messagebus.handle(cmd, uow) except InvalidSku as e: return {"message": str(e)}, 400 diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index fef5a91d..9527625a 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -13,22 +13,16 @@ Message = Union[commands.Command, events.Event] -def handle( - message: Message, - uow: unit_of_work.AbstractUnitOfWork, -): - results = [] +def handle(message: Message, uow: unit_of_work.AbstractUnitOfWork): queue = [message] while queue: message = queue.pop(0) if isinstance(message, events.Event): handle_event(message, queue, uow) elif isinstance(message, commands.Command): - cmd_result = handle_command(message, queue, uow) - results.append(cmd_result) + handle_command(message, queue, uow) else: raise Exception(f"{message} was not an Event or Command") - return results def handle_event( @@ -54,9 +48,8 @@ def handle_command( logger.debug("handling command %s", command) try: handler = COMMAND_HANDLERS[type(command)] - result = handler(command, uow=uow) + handler(command, uow=uow) queue.extend(uow.collect_new_events()) - return result except Exception: logger.exception("Exception handling command %s", command) raise From 9535cbaabe560e17c8c426e2c7082d89a2b0fec1 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 13 Aug 2019 06:21:35 +0100 Subject: [PATCH 28/64] pylint thing --- src/allocation/service_layer/handlers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 34df09be..66a64b1c 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -1,3 +1,4 @@ +# pylint: disable=unused-argument from __future__ import annotations from typing import TYPE_CHECKING from allocation.adapters import email, redis_eventpublisher From 4b9a2ce2eebf62a881eae4397aef81a21c189a39 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 13 Aug 2019 06:20:52 +0100 Subject: [PATCH 29/64] fix redis e2e test --- tests/e2e/test_external_events.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/e2e/test_external_events.py b/tests/e2e/test_external_events.py index 49dbd4b9..c4fde79f 100644 --- a/tests/e2e/test_external_events.py +++ b/tests/e2e/test_external_events.py @@ -14,8 +14,10 @@ def test_change_batch_quantity_leading_to_reallocation(): earlier_batch, later_batch = random_batchref("old"), random_batchref("newer") api_client.post_to_add_batch(earlier_batch, sku, qty=10, eta="2011-01-01") api_client.post_to_add_batch(later_batch, sku, qty=10, eta="2011-01-02") - response = api_client.post_to_allocate(orderid, sku, 10) - assert response.json()["batchref"] == earlier_batch + r = api_client.post_to_allocate(orderid, sku, 10) + assert r.ok + response = api_client.get_allocation(orderid) + assert response.json()[0]["batchref"] == earlier_batch subscription = redis_client.subscribe_to("line_allocated") From 2693a172bef4d1349669c5fbec152a7d7afab369 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 10 Jun 2019 22:43:35 +0100 Subject: [PATCH 30/64] make sure deallocation fixes view model too [deallocation_to_readmodel] --- src/allocation/domain/events.py | 7 ++++++ src/allocation/domain/model.py | 2 +- src/allocation/service_layer/handlers.py | 26 ++++++++++++++++++++++ src/allocation/service_layer/messagebus.py | 4 ++++ tests/integration/test_views.py | 12 ++++++++++ 5 files changed, 50 insertions(+), 1 deletion(-) diff --git a/src/allocation/domain/events.py b/src/allocation/domain/events.py index e2428f50..47634f64 100644 --- a/src/allocation/domain/events.py +++ b/src/allocation/domain/events.py @@ -14,6 +14,13 @@ class Allocated(Event): batchref: str +@dataclass +class Deallocated(Event): + orderid: str + sku: str + qty: int + + @dataclass class OutOfStock(Event): sku: str diff --git a/src/allocation/domain/model.py b/src/allocation/domain/model.py index dcdd639a..dd4ac782 100644 --- a/src/allocation/domain/model.py +++ b/src/allocation/domain/model.py @@ -35,7 +35,7 @@ def change_batch_quantity(self, ref: str, qty: int): batch._purchased_quantity = qty while batch.available_quantity < 0: line = batch.deallocate_one() - self.events.append(commands.Allocate(line.orderid, line.sku, line.qty)) + self.events.append(events.Deallocated(line.orderid, line.sku, line.qty)) @dataclass(unsafe_hash=True) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 66a64b1c..fa2698a4 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -1,5 +1,6 @@ # pylint: disable=unused-argument from __future__ import annotations +from dataclasses import asdict from typing import TYPE_CHECKING from allocation.adapters import email, redis_eventpublisher from allocation.domain import commands, events, model @@ -39,6 +40,16 @@ def allocate( uow.commit() +def reallocate( + event: events.Deallocated, + uow: unit_of_work.AbstractUnitOfWork, +): + with uow: + product = uow.products.get(sku=event.sku) + product.events.append(commands.Allocate(**asdict(event))) + uow.commit() + + def change_batch_quantity( cmd: commands.ChangeBatchQuantity, uow: unit_of_work.AbstractUnitOfWork, @@ -82,3 +93,18 @@ def add_allocation_to_read_model( dict(orderid=event.orderid, sku=event.sku, batchref=event.batchref), ) uow.commit() + + +def remove_allocation_from_read_model( + event: events.Deallocated, + uow: unit_of_work.SqlAlchemyUnitOfWork, +): + with uow: + uow.session.execute( + """ + DELETE FROM allocations_view + WHERE orderid = :orderid AND sku = :sku + """, + dict(orderid=event.orderid, sku=event.sku), + ) + uow.commit() diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index 9527625a..104111fa 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -60,6 +60,10 @@ def handle_command( handlers.publish_allocated_event, handlers.add_allocation_to_read_model, ], + events.Deallocated: [ + handlers.remove_allocation_from_read_model, + handlers.reallocate, + ], events.OutOfStock: [handlers.send_out_of_stock_notification], } # type: Dict[Type[events.Event], List[Callable]] diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py index 99db03b0..4dda3e54 100644 --- a/tests/integration/test_views.py +++ b/tests/integration/test_views.py @@ -21,3 +21,15 @@ def test_allocations_view(sqlite_session_factory): {"sku": "sku1", "batchref": "sku1batch"}, {"sku": "sku2", "batchref": "sku2batch"}, ] + + +def test_deallocation(sqlite_session_factory): + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) + messagebus.handle(commands.CreateBatch("b1", "sku1", 50, None), uow) + messagebus.handle(commands.CreateBatch("b2", "sku1", 50, today), uow) + messagebus.handle(commands.Allocate("o1", "sku1", 40), uow) + messagebus.handle(commands.ChangeBatchQuantity("b1", 10), uow) + + assert views.allocations("o1", uow) == [ + {"sku": "sku1", "batchref": "b2"}, + ] From 0d84f45eff4ee31de1e779b730604824644019d6 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 26 Nov 2019 12:22:25 +0000 Subject: [PATCH 31/64] handlers talk to redis [redis_readmodel_handlers] --- src/allocation/service_layer/handlers.py | 30 ++++-------------------- 1 file changed, 4 insertions(+), 26 deletions(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index fa2698a4..0ad6d849 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -80,31 +80,9 @@ def publish_allocated_event( redis_eventpublisher.publish("line_allocated", event) -def add_allocation_to_read_model( - event: events.Allocated, - uow: unit_of_work.SqlAlchemyUnitOfWork, -): - with uow: - uow.session.execute( - """ - INSERT INTO allocations_view (orderid, sku, batchref) - VALUES (:orderid, :sku, :batchref) - """, - dict(orderid=event.orderid, sku=event.sku, batchref=event.batchref), - ) - uow.commit() +def add_allocation_to_read_model(event: events.Allocated, _): + redis_eventpublisher.update_readmodel(event.orderid, event.sku, event.batchref) -def remove_allocation_from_read_model( - event: events.Deallocated, - uow: unit_of_work.SqlAlchemyUnitOfWork, -): - with uow: - uow.session.execute( - """ - DELETE FROM allocations_view - WHERE orderid = :orderid AND sku = :sku - """, - dict(orderid=event.orderid, sku=event.sku), - ) - uow.commit() +def remove_allocation_from_read_model(event: events.Deallocated, _): + redis_eventpublisher.update_readmodel(event.orderid, event.sku, None) From 5ddb83a906874a0775924a14629804b48d371d46 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 26 Nov 2019 16:16:46 +0000 Subject: [PATCH 32/64] new helpers to update read model [redis_readmodel_client] --- src/allocation/adapters/redis_eventpublisher.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/allocation/adapters/redis_eventpublisher.py b/src/allocation/adapters/redis_eventpublisher.py index 6100956f..a0d1246b 100644 --- a/src/allocation/adapters/redis_eventpublisher.py +++ b/src/allocation/adapters/redis_eventpublisher.py @@ -14,3 +14,11 @@ def publish(channel, event: events.Event): logging.debug("publishing: channel=%s, event=%s", channel, event) r.publish(channel, json.dumps(asdict(event))) + + +def update_readmodel(orderid, sku, batchref): + r.hset(orderid, sku, batchref) + + +def get_readmodel(orderid): + return r.hgetall(orderid) From 5b0cc201a26b62b2c78f83c8d3f35100e2f788da Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 26 Nov 2019 16:17:08 +0000 Subject: [PATCH 33/64] view now users redis, tweak tests+app. [redis_readmodel_view] --- src/allocation/entrypoints/flask_app.py | 3 +-- src/allocation/views.py | 16 +++++++--------- tests/integration/test_views.py | 20 +++++++++++++++++--- 3 files changed, 25 insertions(+), 14 deletions(-) diff --git a/src/allocation/entrypoints/flask_app.py b/src/allocation/entrypoints/flask_app.py index ed482390..c1cb915b 100644 --- a/src/allocation/entrypoints/flask_app.py +++ b/src/allocation/entrypoints/flask_app.py @@ -40,8 +40,7 @@ def allocate_endpoint(): @app.route("/allocations/", methods=["GET"]) def allocations_view_endpoint(orderid): - uow = unit_of_work.SqlAlchemyUnitOfWork() - result = views.allocations(orderid, uow) + result = views.allocations(orderid) if not result: return "not found", 404 return jsonify(result), 200 diff --git a/src/allocation/views.py b/src/allocation/views.py index a952887f..b9dc175f 100644 --- a/src/allocation/views.py +++ b/src/allocation/views.py @@ -1,12 +1,10 @@ +from allocation.adapters import redis_eventpublisher from allocation.service_layer import unit_of_work -def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): - with uow: - results = uow.session.execute( - """ - SELECT sku, batchref FROM allocations_view WHERE orderid = :orderid - """, - dict(orderid=orderid), - ) - return [dict(r) for r in results] +def allocations(orderid: str): + batches = redis_eventpublisher.get_readmodel(orderid) + return [ + {"batchref": b.decode(), "sku": s.decode()} + for s, b in batches.items() + ] diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py index 4dda3e54..f38f75a4 100644 --- a/tests/integration/test_views.py +++ b/tests/integration/test_views.py @@ -1,11 +1,25 @@ from datetime import date -from allocation import views +import pytest +import redis +from allocation import config, views from allocation.domain import commands from allocation.service_layer import messagebus, unit_of_work today = date.today() +@pytest.fixture +def cleanup_redis(): + r = redis.Redis(**config.get_redis_host_and_port()) + yield + for k in r.keys(): + print("cleaning up redis key", k) + r.delete(k) + + +pytestmark = pytest.mark.usefixtures("cleanup_redis") + + def test_allocations_view(sqlite_session_factory): uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) messagebus.handle(commands.CreateBatch("sku1batch", "sku1", 50, None), uow) @@ -17,7 +31,7 @@ def test_allocations_view(sqlite_session_factory): messagebus.handle(commands.Allocate("otherorder", "sku1", 30), uow) messagebus.handle(commands.Allocate("otherorder", "sku2", 10), uow) - assert views.allocations("order1", uow) == [ + assert views.allocations("order1") == [ {"sku": "sku1", "batchref": "sku1batch"}, {"sku": "sku2", "batchref": "sku2batch"}, ] @@ -30,6 +44,6 @@ def test_deallocation(sqlite_session_factory): messagebus.handle(commands.Allocate("o1", "sku1", 40), uow) messagebus.handle(commands.ChangeBatchQuantity("b1", 10), uow) - assert views.allocations("o1", uow) == [ + assert views.allocations("o1") == [ {"sku": "sku1", "batchref": "b2"}, ] From 19b7304ed01fb300d23a3f317620f125fc63c19e Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 26 Nov 2019 16:17:16 +0000 Subject: [PATCH 34/64] Revert "view now users redis, tweak tests+app. [...]" [chapter_12_cqrs_ends] This reverts commit 6dd38db04ea7c637f2d2510e7f587ac2009dd2ac. Revert "new helpers to update read model ..." This reverts commit dbaebdf249d952f2c3915526fd4d1bc6fe3cd18f. Revert "handlers talk to redis ..." This reverts commit 00658e2181de3e118579fa0a0da9b7ebb9e081f9. --- .../adapters/redis_eventpublisher.py | 8 ----- src/allocation/entrypoints/flask_app.py | 3 +- src/allocation/service_layer/handlers.py | 30 ++++++++++++++++--- src/allocation/views.py | 16 +++++----- tests/integration/test_views.py | 20 ++----------- 5 files changed, 40 insertions(+), 37 deletions(-) diff --git a/src/allocation/adapters/redis_eventpublisher.py b/src/allocation/adapters/redis_eventpublisher.py index a0d1246b..6100956f 100644 --- a/src/allocation/adapters/redis_eventpublisher.py +++ b/src/allocation/adapters/redis_eventpublisher.py @@ -14,11 +14,3 @@ def publish(channel, event: events.Event): logging.debug("publishing: channel=%s, event=%s", channel, event) r.publish(channel, json.dumps(asdict(event))) - - -def update_readmodel(orderid, sku, batchref): - r.hset(orderid, sku, batchref) - - -def get_readmodel(orderid): - return r.hgetall(orderid) diff --git a/src/allocation/entrypoints/flask_app.py b/src/allocation/entrypoints/flask_app.py index c1cb915b..ed482390 100644 --- a/src/allocation/entrypoints/flask_app.py +++ b/src/allocation/entrypoints/flask_app.py @@ -40,7 +40,8 @@ def allocate_endpoint(): @app.route("/allocations/", methods=["GET"]) def allocations_view_endpoint(orderid): - result = views.allocations(orderid) + uow = unit_of_work.SqlAlchemyUnitOfWork() + result = views.allocations(orderid, uow) if not result: return "not found", 404 return jsonify(result), 200 diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 0ad6d849..fa2698a4 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -80,9 +80,31 @@ def publish_allocated_event( redis_eventpublisher.publish("line_allocated", event) -def add_allocation_to_read_model(event: events.Allocated, _): - redis_eventpublisher.update_readmodel(event.orderid, event.sku, event.batchref) +def add_allocation_to_read_model( + event: events.Allocated, + uow: unit_of_work.SqlAlchemyUnitOfWork, +): + with uow: + uow.session.execute( + """ + INSERT INTO allocations_view (orderid, sku, batchref) + VALUES (:orderid, :sku, :batchref) + """, + dict(orderid=event.orderid, sku=event.sku, batchref=event.batchref), + ) + uow.commit() -def remove_allocation_from_read_model(event: events.Deallocated, _): - redis_eventpublisher.update_readmodel(event.orderid, event.sku, None) +def remove_allocation_from_read_model( + event: events.Deallocated, + uow: unit_of_work.SqlAlchemyUnitOfWork, +): + with uow: + uow.session.execute( + """ + DELETE FROM allocations_view + WHERE orderid = :orderid AND sku = :sku + """, + dict(orderid=event.orderid, sku=event.sku), + ) + uow.commit() diff --git a/src/allocation/views.py b/src/allocation/views.py index b9dc175f..a952887f 100644 --- a/src/allocation/views.py +++ b/src/allocation/views.py @@ -1,10 +1,12 @@ -from allocation.adapters import redis_eventpublisher from allocation.service_layer import unit_of_work -def allocations(orderid: str): - batches = redis_eventpublisher.get_readmodel(orderid) - return [ - {"batchref": b.decode(), "sku": s.decode()} - for s, b in batches.items() - ] +def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): + with uow: + results = uow.session.execute( + """ + SELECT sku, batchref FROM allocations_view WHERE orderid = :orderid + """, + dict(orderid=orderid), + ) + return [dict(r) for r in results] diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py index f38f75a4..4dda3e54 100644 --- a/tests/integration/test_views.py +++ b/tests/integration/test_views.py @@ -1,25 +1,11 @@ from datetime import date -import pytest -import redis -from allocation import config, views +from allocation import views from allocation.domain import commands from allocation.service_layer import messagebus, unit_of_work today = date.today() -@pytest.fixture -def cleanup_redis(): - r = redis.Redis(**config.get_redis_host_and_port()) - yield - for k in r.keys(): - print("cleaning up redis key", k) - r.delete(k) - - -pytestmark = pytest.mark.usefixtures("cleanup_redis") - - def test_allocations_view(sqlite_session_factory): uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) messagebus.handle(commands.CreateBatch("sku1batch", "sku1", 50, None), uow) @@ -31,7 +17,7 @@ def test_allocations_view(sqlite_session_factory): messagebus.handle(commands.Allocate("otherorder", "sku1", 30), uow) messagebus.handle(commands.Allocate("otherorder", "sku2", 10), uow) - assert views.allocations("order1") == [ + assert views.allocations("order1", uow) == [ {"sku": "sku1", "batchref": "sku1batch"}, {"sku": "sku2", "batchref": "sku2batch"}, ] @@ -44,6 +30,6 @@ def test_deallocation(sqlite_session_factory): messagebus.handle(commands.Allocate("o1", "sku1", 40), uow) messagebus.handle(commands.ChangeBatchQuantity("b1", 10), uow) - assert views.allocations("o1") == [ + assert views.allocations("o1", uow) == [ {"sku": "sku1", "batchref": "b2"}, ] From ed8f77e99ed1a6698a0affad9b7aa9a4885537b1 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 15 Jul 2019 16:57:14 +0100 Subject: [PATCH 35/64] handlers now have all and only explicit dependencies [handler_with_explicit_dependency] --- src/allocation/service_layer/handlers.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index fa2698a4..bcaf1f44 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -1,8 +1,7 @@ # pylint: disable=unused-argument from __future__ import annotations from dataclasses import asdict -from typing import TYPE_CHECKING -from allocation.adapters import email, redis_eventpublisher +from typing import Callable, TYPE_CHECKING from allocation.domain import commands, events, model from allocation.domain.model import OrderLine @@ -65,9 +64,9 @@ def change_batch_quantity( def send_out_of_stock_notification( event: events.OutOfStock, - uow: unit_of_work.AbstractUnitOfWork, + send_mail: Callable, ): - email.send( + send_mail( "stock@made.com", f"Out of stock for {event.sku}", ) @@ -75,9 +74,9 @@ def send_out_of_stock_notification( def publish_allocated_event( event: events.Allocated, - uow: unit_of_work.AbstractUnitOfWork, + publish: Callable, ): - redis_eventpublisher.publish("line_allocated", event) + publish("line_allocated", event) def add_allocation_to_read_model( From 61c9877a36068ca8c3a86dab773ae29e1c72338c Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 6 Jan 2020 07:47:34 +0000 Subject: [PATCH 36/64] change reallocate handler to avoid cmd/event clash --- src/allocation/service_layer/handlers.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index bcaf1f44..f56e6f77 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -43,10 +43,7 @@ def reallocate( event: events.Deallocated, uow: unit_of_work.AbstractUnitOfWork, ): - with uow: - product = uow.products.get(sku=event.sku) - product.events.append(commands.Allocate(**asdict(event))) - uow.commit() + allocate(commands.Allocate(**asdict(event)), uow=uow) def change_batch_quantity( From 4f2892841d11b46d196bd7a8056f500fbdae511a Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 6 Jan 2020 07:47:56 +0000 Subject: [PATCH 37/64] bring static handlers dicts across from messagebus --- src/allocation/service_layer/handlers.py | 15 ++++++++++++++- src/allocation/service_layer/messagebus.py | 21 +-------------------- 2 files changed, 15 insertions(+), 21 deletions(-) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index f56e6f77..6ab2bbe9 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -1,7 +1,7 @@ # pylint: disable=unused-argument from __future__ import annotations from dataclasses import asdict -from typing import Callable, TYPE_CHECKING +from typing import List, Dict, Callable, Type, TYPE_CHECKING from allocation.domain import commands, events, model from allocation.domain.model import OrderLine @@ -104,3 +104,16 @@ def remove_allocation_from_read_model( dict(orderid=event.orderid, sku=event.sku), ) uow.commit() + + +EVENT_HANDLERS = { + events.Allocated: [publish_allocated_event, add_allocation_to_read_model], + events.Deallocated: [remove_allocation_from_read_model, reallocate], + events.OutOfStock: [send_out_of_stock_notification], +} # type: Dict[Type[events.Event], List[Callable]] + +COMMAND_HANDLERS = { + commands.Allocate: allocate, + commands.CreateBatch: add_batch, + commands.ChangeBatchQuantity: change_batch_quantity, +} # type: Dict[Type[commands.Command], Callable] diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index 104111fa..12b2965b 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -1,7 +1,7 @@ # pylint: disable=broad-except from __future__ import annotations import logging -from typing import List, Dict, Callable, Type, Union, TYPE_CHECKING +from typing import Union, TYPE_CHECKING from allocation.domain import commands, events from . import handlers @@ -53,22 +53,3 @@ def handle_command( except Exception: logger.exception("Exception handling command %s", command) raise - - -EVENT_HANDLERS = { - events.Allocated: [ - handlers.publish_allocated_event, - handlers.add_allocation_to_read_model, - ], - events.Deallocated: [ - handlers.remove_allocation_from_read_model, - handlers.reallocate, - ], - events.OutOfStock: [handlers.send_out_of_stock_notification], -} # type: Dict[Type[events.Event], List[Callable]] - -COMMAND_HANDLERS = { - commands.Allocate: handlers.allocate, - commands.CreateBatch: handlers.add_batch, - commands.ChangeBatchQuantity: handlers.change_batch_quantity, -} # type: Dict[Type[commands.Command], Callable] From 36781d7f1b4b70475fef3b05a8e87751a62fe66e Mon Sep 17 00:00:00 2001 From: Harry Date: Sun, 5 Jan 2020 17:18:11 +0000 Subject: [PATCH 38/64] messagebus becomes a class, requires handlers [messagebus_as_class] --- src/allocation/service_layer/messagebus.py | 34 ++++++++++++++-------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index 12b2965b..d210d66f 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -1,9 +1,8 @@ # pylint: disable=broad-except from __future__ import annotations import logging -from typing import Union, TYPE_CHECKING +from typing import Callable, Dict, List, Union, Type, TYPE_CHECKING from allocation.domain import commands, events -from . import handlers if TYPE_CHECKING: from . import unit_of_work @@ -13,16 +12,27 @@ Message = Union[commands.Command, events.Event] -def handle(message: Message, uow: unit_of_work.AbstractUnitOfWork): - queue = [message] - while queue: - message = queue.pop(0) - if isinstance(message, events.Event): - handle_event(message, queue, uow) - elif isinstance(message, commands.Command): - handle_command(message, queue, uow) - else: - raise Exception(f"{message} was not an Event or Command") +class MessageBus: + def __init__( + self, + uow: unit_of_work.AbstractUnitOfWork, + event_handlers: Dict[Type[events.Event], List[Callable]], + command_handlers: Dict[Type[commands.Command], Callable], + ): + self.uow = uow + self.event_handlers = event_handlers + self.command_handlers = command_handlers + + def handle(self, message: Message): + self.queue = [message] + while self.queue: + message = self.queue.pop(0) + if isinstance(message, events.Event): + self.handle_event(message) + elif isinstance(message, commands.Command): + self.handle_command(message) + else: + raise Exception(f"{message} was not an Event or Command") def handle_event( From 7e21991ff28b3802b5c4031edab410785f89fbf0 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 7 Jan 2020 14:48:53 +0000 Subject: [PATCH 39/64] use self.handlers for handle_event and handle_command [messagebus_handlers_change] --- src/allocation/service_layer/messagebus.py | 46 +++++++++------------- 1 file changed, 18 insertions(+), 28 deletions(-) diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py index d210d66f..45679341 100644 --- a/src/allocation/service_layer/messagebus.py +++ b/src/allocation/service_layer/messagebus.py @@ -1,4 +1,4 @@ -# pylint: disable=broad-except +# pylint: disable=broad-except, attribute-defined-outside-init from __future__ import annotations import logging from typing import Callable, Dict, List, Union, Type, TYPE_CHECKING @@ -34,32 +34,22 @@ def handle(self, message: Message): else: raise Exception(f"{message} was not an Event or Command") - -def handle_event( - event: events.Event, - queue: List[Message], - uow: unit_of_work.AbstractUnitOfWork, -): - for handler in EVENT_HANDLERS[type(event)]: + def handle_event(self, event: events.Event): + for handler in self.event_handlers[type(event)]: + try: + logger.debug("handling event %s with handler %s", event, handler) + handler(event) + self.queue.extend(self.uow.collect_new_events()) + except Exception: + logger.exception("Exception handling event %s", event) + continue + + def handle_command(self, command: commands.Command): + logger.debug("handling command %s", command) try: - logger.debug("handling event %s with handler %s", event, handler) - handler(event, uow=uow) - queue.extend(uow.collect_new_events()) + handler = self.command_handlers[type(command)] + handler(command) + self.queue.extend(self.uow.collect_new_events()) except Exception: - logger.exception("Exception handling event %s", event) - continue - - -def handle_command( - command: commands.Command, - queue: List[Message], - uow: unit_of_work.AbstractUnitOfWork, -): - logger.debug("handling command %s", command) - try: - handler = COMMAND_HANDLERS[type(command)] - handler(command, uow=uow) - queue.extend(uow.collect_new_events()) - except Exception: - logger.exception("Exception handling command %s", command) - raise + logger.exception("Exception handling command %s", command) + raise From ff35294d1dc2bc480b0eb93460ec9fde17842544 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 15 Jul 2019 17:08:54 +0100 Subject: [PATCH 40/64] conftest change to backport, session_factory -> sqlite_session_factory --- tests/conftest.py | 11 +++-------- tests/integration/test_repository.py | 5 +++-- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 3b2a8066..8c1efcf7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,24 +18,19 @@ @pytest.fixture -def in_memory_db(): +def in_memory_sqlite_db(): engine = create_engine("sqlite:///:memory:") metadata.create_all(engine) return engine @pytest.fixture -def sqlite_session_factory(in_memory_db): +def sqlite_session_factory(in_memory_sqlite_db): start_mappers() - yield sessionmaker(bind=in_memory_db) + yield sessionmaker(bind=in_memory_sqlite_db) clear_mappers() -@pytest.fixture -def sqlite_session(sqlite_session_factory): - return sqlite_session_factory() - - @retry(stop=stop_after_delay(10)) def wait_for_postgres_to_come_up(engine): return engine.connect() diff --git a/tests/integration/test_repository.py b/tests/integration/test_repository.py index 9b637f1a..245b26f2 100644 --- a/tests/integration/test_repository.py +++ b/tests/integration/test_repository.py @@ -2,8 +2,9 @@ from allocation.domain import model -def test_get_by_batchref(sqlite_session): - repo = repository.SqlAlchemyRepository(sqlite_session) +def test_get_by_batchref(sqlite_session_factory): + session = sqlite_session_factory() + repo = repository.SqlAlchemyRepository(session) b1 = model.Batch(ref="b1", sku="sku1", qty=100, eta=None) b2 = model.Batch(ref="b2", sku="sku1", qty=100, eta=None) b3 = model.Batch(ref="b3", sku="sku2", qty=100, eta=None) From 79c6b45bcbcc06cc195c60739eeefee0a2a65ccd Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 15 Jul 2019 17:09:18 +0100 Subject: [PATCH 41/64] conftest change to backport to ch 5, isolation serializable in all pg tests --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 8c1efcf7..b0ceb0f2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,7 +49,7 @@ def wait_for_redis_to_come_up(): @pytest.fixture(scope="session") def postgres_db(): - engine = create_engine(config.get_postgres_uri()) + engine = create_engine(config.get_postgres_uri(), isolation_level="SERIALIZABLE") wait_for_postgres_to_come_up(engine) metadata.create_all(engine) return engine From fe6f4996e9402851a702fbb8c4edc34694f128a3 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 15 Jul 2019 17:09:59 +0100 Subject: [PATCH 42/64] uow tests maybe backport, pass explicit uow to threads --- tests/integration/test_uow.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/tests/integration/test_uow.py b/tests/integration/test_uow.py index 55c4f24d..db705849 100644 --- a/tests/integration/test_uow.py +++ b/tests/integration/test_uow.py @@ -1,4 +1,4 @@ -# pylint: disable=broad-except +# pylint: disable=broad-except, too-many-arguments import threading import time import traceback @@ -75,15 +75,15 @@ class MyException(Exception): assert rows == [] -def try_to_allocate(orderid, sku, exceptions): +def try_to_allocate(orderid, sku, exceptions, session_factory): line = model.OrderLine(orderid, sku, 10) try: - with unit_of_work.SqlAlchemyUnitOfWork() as uow: + with unit_of_work.SqlAlchemyUnitOfWork(session_factory) as uow: product = uow.products.get(sku=sku) product.allocate(line) time.sleep(0.2) uow.commit() - except Exception as e: + except Exception as e: # pylint: disable=broad-except print(traceback.format_exc()) exceptions.append(e) @@ -96,8 +96,12 @@ def test_concurrent_updates_to_version_are_not_allowed(postgres_session_factory) order1, order2 = random_orderid(1), random_orderid(2) exceptions = [] # type: List[Exception] - try_to_allocate_order1 = lambda: try_to_allocate(order1, sku, exceptions) - try_to_allocate_order2 = lambda: try_to_allocate(order2, sku, exceptions) + try_to_allocate_order1 = lambda: try_to_allocate( + order1, sku, exceptions, postgres_session_factory + ) + try_to_allocate_order2 = lambda: try_to_allocate( + order2, sku, exceptions, postgres_session_factory + ) thread1 = threading.Thread(target=try_to_allocate_order1) thread2 = threading.Thread(target=try_to_allocate_order2) thread1.start() @@ -121,5 +125,5 @@ def test_concurrent_updates_to_version_are_not_allowed(postgres_session_factory) dict(sku=sku), ) assert orders.rowcount == 1 - with unit_of_work.SqlAlchemyUnitOfWork() as uow: + with unit_of_work.SqlAlchemyUnitOfWork(postgres_session_factory) as uow: uow.session.execute("select 1") From eb869900133711ae6c47471b0cc510850207ce63 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 7 Oct 2019 14:29:59 +0100 Subject: [PATCH 43/64] bootstrap script preps DI'd handlers and start orm [bootstrap_script] --- src/allocation/bootstrap.py | 44 +++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 src/allocation/bootstrap.py diff --git a/src/allocation/bootstrap.py b/src/allocation/bootstrap.py new file mode 100644 index 00000000..805b13f5 --- /dev/null +++ b/src/allocation/bootstrap.py @@ -0,0 +1,44 @@ +import inspect +from typing import Callable +from allocation.adapters import email, orm, redis_eventpublisher +from allocation.service_layer import handlers, messagebus, unit_of_work + + +def bootstrap( + start_orm: bool = True, + uow: unit_of_work.AbstractUnitOfWork = unit_of_work.SqlAlchemyUnitOfWork(), + send_mail: Callable = email.send, + publish: Callable = redis_eventpublisher.publish, +) -> messagebus.MessageBus: + + if start_orm: + orm.start_mappers() + + dependencies = {"uow": uow, "send_mail": send_mail, "publish": publish} + injected_event_handlers = { + event_type: [ + inject_dependencies(handler, dependencies) + for handler in event_handlers + ] + for event_type, event_handlers in handlers.EVENT_HANDLERS.items() + } + injected_command_handlers = { + command_type: inject_dependencies(handler, dependencies) + for command_type, handler in handlers.COMMAND_HANDLERS.items() + } + + return messagebus.MessageBus( + uow=uow, + event_handlers=injected_event_handlers, + command_handlers=injected_command_handlers, + ) + + +def inject_dependencies(handler, dependencies): + params = inspect.signature(handler).parameters + deps = { + name: dependency + for name, dependency in dependencies.items() + if name in params + } + return lambda message: handler(message, **deps) From 97687278f8be44150bf5f168ea9f2a86b5702032 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 15 Jul 2019 17:03:43 +0100 Subject: [PATCH 44/64] use bootstrap in service layer tests [bootstrap_tests] --- tests/unit/test_handlers.py | 110 +++++++++++++++++++----------------- 1 file changed, 57 insertions(+), 53 deletions(-) diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py index 04fb9630..cb21461e 100644 --- a/tests/unit/test_handlers.py +++ b/tests/unit/test_handlers.py @@ -1,10 +1,12 @@ # pylint: disable=no-self-use +from __future__ import annotations from datetime import date from unittest import mock import pytest +from allocation import bootstrap from allocation.adapters import repository -from allocation.domain import commands, events -from allocation.service_layer import handlers, messagebus, unit_of_work +from allocation.domain import commands +from allocation.service_layer import handlers, unit_of_work class FakeRepository(repository.AbstractRepository): @@ -37,81 +39,83 @@ def rollback(self): pass +def bootstrap_test_app(): + return bootstrap.bootstrap( + start_orm=False, + uow=FakeUnitOfWork(), + send_mail=lambda *args: None, + publish=lambda *args: None, + ) + + class TestAddBatch: def test_for_new_product(self): - uow = FakeUnitOfWork() - messagebus.handle( - commands.CreateBatch("b1", "CRUNCHY-ARMCHAIR", 100, None), uow - ) - assert uow.products.get("CRUNCHY-ARMCHAIR") is not None - assert uow.committed + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "CRUNCHY-ARMCHAIR", 100, None)) + assert bus.uow.products.get("CRUNCHY-ARMCHAIR") is not None + assert bus.uow.committed def test_for_existing_product(self): - uow = FakeUnitOfWork() - messagebus.handle(commands.CreateBatch("b1", "GARISH-RUG", 100, None), uow) - messagebus.handle(commands.CreateBatch("b2", "GARISH-RUG", 99, None), uow) - assert "b2" in [b.reference for b in uow.products.get("GARISH-RUG").batches] - - -@pytest.fixture(autouse=True) -def fake_redis_publish(): - with mock.patch("allocation.adapters.redis_eventpublisher.publish"): - yield + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "GARISH-RUG", 100, None)) + bus.handle(commands.CreateBatch("b2", "GARISH-RUG", 99, None)) + assert "b2" in [ + b.reference for b in bus.uow.products.get("GARISH-RUG").batches + ] class TestAllocate: def test_allocates(self): - uow = FakeUnitOfWork() - messagebus.handle( - commands.CreateBatch("batch1", "COMPLICATED-LAMP", 100, None), uow - ) - messagebus.handle(commands.Allocate("o1", "COMPLICATED-LAMP", 10), uow) - [batch] = uow.products.get("COMPLICATED-LAMP").batches + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("batch1", "COMPLICATED-LAMP", 100, None)) + bus.handle(commands.Allocate("o1", "COMPLICATED-LAMP", 10)) + [batch] = bus.uow.products.get("COMPLICATED-LAMP").batches assert batch.available_quantity == 90 def test_errors_for_invalid_sku(self): - uow = FakeUnitOfWork() - messagebus.handle(commands.CreateBatch("b1", "AREALSKU", 100, None), uow) + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "AREALSKU", 100, None)) with pytest.raises(handlers.InvalidSku, match="Invalid sku NONEXISTENTSKU"): - messagebus.handle(commands.Allocate("o1", "NONEXISTENTSKU", 10), uow) + bus.handle(commands.Allocate("o1", "NONEXISTENTSKU", 10)) def test_commits(self): - uow = FakeUnitOfWork() - messagebus.handle( - commands.CreateBatch("b1", "OMINOUS-MIRROR", 100, None), uow - ) - messagebus.handle(commands.Allocate("o1", "OMINOUS-MIRROR", 10), uow) - assert uow.committed + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "OMINOUS-MIRROR", 100, None)) + bus.handle(commands.Allocate("o1", "OMINOUS-MIRROR", 10)) + assert bus.uow.committed def test_sends_email_on_out_of_stock_error(self): - uow = FakeUnitOfWork() - messagebus.handle( - commands.CreateBatch("b1", "POPULAR-CURTAINS", 9, None), uow - ) + emails = [] - with mock.patch("allocation.adapters.email.send") as mock_send_mail: - messagebus.handle(commands.Allocate("o1", "POPULAR-CURTAINS", 10), uow) - assert mock_send_mail.call_args == mock.call( - "stock@made.com", f"Out of stock for POPULAR-CURTAINS" - ) + def fake_send_mail(*args): + emails.append(args) + + bus = bootstrap.bootstrap( + start_orm=False, + uow=FakeUnitOfWork(), + send_mail=fake_send_mail, + publish=lambda *args: None, + ) + bus.handle(commands.CreateBatch("b1", "POPULAR-CURTAINS", 9, None)) + bus.handle(commands.Allocate("o1", "POPULAR-CURTAINS", 10)) + assert emails == [ + ("stock@made.com", f"Out of stock for POPULAR-CURTAINS"), + ] class TestChangeBatchQuantity: def test_changes_available_quantity(self): - uow = FakeUnitOfWork() - messagebus.handle( - commands.CreateBatch("batch1", "ADORABLE-SETTEE", 100, None), uow - ) - [batch] = uow.products.get(sku="ADORABLE-SETTEE").batches + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("batch1", "ADORABLE-SETTEE", 100, None)) + [batch] = bus.uow.products.get(sku="ADORABLE-SETTEE").batches assert batch.available_quantity == 100 - messagebus.handle(commands.ChangeBatchQuantity("batch1", 50), uow) - + bus.handle(commands.ChangeBatchQuantity("batch1", 50)) assert batch.available_quantity == 50 def test_reallocates_if_necessary(self): - uow = FakeUnitOfWork() + bus = bootstrap_test_app() history = [ commands.CreateBatch("batch1", "INDIFFERENT-TABLE", 50, None), commands.CreateBatch("batch2", "INDIFFERENT-TABLE", 50, date.today()), @@ -119,12 +123,12 @@ def test_reallocates_if_necessary(self): commands.Allocate("order2", "INDIFFERENT-TABLE", 20), ] for msg in history: - messagebus.handle(msg, uow) - [batch1, batch2] = uow.products.get(sku="INDIFFERENT-TABLE").batches + bus.handle(msg) + [batch1, batch2] = bus.uow.products.get(sku="INDIFFERENT-TABLE").batches assert batch1.available_quantity == 10 assert batch2.available_quantity == 50 - messagebus.handle(commands.ChangeBatchQuantity("batch1", 25), uow) + bus.handle(commands.ChangeBatchQuantity("batch1", 25)) # order1 or order2 will be deallocated, so we'll have 25 - 20 assert batch1.available_quantity == 5 From 206081019ea47a63de8a3c772b3ec4961ca629a9 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 6 Jan 2020 07:32:55 +0000 Subject: [PATCH 45/64] fixture to start mappers explicitly, use in repo and uow tests --- tests/conftest.py | 9 ++++++--- tests/integration/test_repository.py | 3 +++ tests/integration/test_uow.py | 3 +++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index b0ceb0f2..f91f93a0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,8 +26,13 @@ def in_memory_sqlite_db(): @pytest.fixture def sqlite_session_factory(in_memory_sqlite_db): - start_mappers() yield sessionmaker(bind=in_memory_sqlite_db) + + +@pytest.fixture +def mappers(): + start_mappers() + yield clear_mappers() @@ -57,9 +62,7 @@ def postgres_db(): @pytest.fixture def postgres_session_factory(postgres_db): - start_mappers() yield sessionmaker(bind=postgres_db) - clear_mappers() @pytest.fixture diff --git a/tests/integration/test_repository.py b/tests/integration/test_repository.py index 245b26f2..7961be2a 100644 --- a/tests/integration/test_repository.py +++ b/tests/integration/test_repository.py @@ -1,6 +1,9 @@ +import pytest from allocation.adapters import repository from allocation.domain import model +pytestmark = pytest.mark.usefixtures("mappers") + def test_get_by_batchref(sqlite_session_factory): session = sqlite_session_factory() diff --git a/tests/integration/test_uow.py b/tests/integration/test_uow.py index db705849..61f3aae5 100644 --- a/tests/integration/test_uow.py +++ b/tests/integration/test_uow.py @@ -3,11 +3,14 @@ import time import traceback from typing import List +from unittest.mock import Mock import pytest from allocation.domain import model from allocation.service_layer import unit_of_work from ..random_refs import random_sku, random_batchref, random_orderid +pytestmark = pytest.mark.usefixtures("mappers") + def insert_batch(session, ref, sku, qty, eta, product_version=1): session.execute( From 2105f0c03c5a330c0b994e3989aaa53924bccd63 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 6 Jan 2020 05:07:03 +0000 Subject: [PATCH 46/64] fix view tests to use bootstrap. [bootstrap_view_tests] --- tests/integration/test_views.py | 51 +++++++++++++++++++++------------ 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py index 4dda3e54..1eea394e 100644 --- a/tests/integration/test_views.py +++ b/tests/integration/test_views.py @@ -1,35 +1,48 @@ +# pylint: disable=redefined-outer-name from datetime import date -from allocation import views +from sqlalchemy.orm import clear_mappers +import pytest +from allocation import bootstrap, views from allocation.domain import commands -from allocation.service_layer import messagebus, unit_of_work +from allocation.service_layer import unit_of_work today = date.today() -def test_allocations_view(sqlite_session_factory): - uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) - messagebus.handle(commands.CreateBatch("sku1batch", "sku1", 50, None), uow) - messagebus.handle(commands.CreateBatch("sku2batch", "sku2", 50, today), uow) - messagebus.handle(commands.Allocate("order1", "sku1", 20), uow) - messagebus.handle(commands.Allocate("order1", "sku2", 20), uow) +@pytest.fixture +def sqlite_bus(sqlite_session_factory): + bus = bootstrap.bootstrap( + start_orm=True, + uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), + send_mail=lambda *args: None, + publish=lambda *args: None, + ) + yield bus + clear_mappers() + + +def test_allocations_view(sqlite_bus): + sqlite_bus.handle(commands.CreateBatch("sku1batch", "sku1", 50, None)) + sqlite_bus.handle(commands.CreateBatch("sku2batch", "sku2", 50, today)) + sqlite_bus.handle(commands.Allocate("order1", "sku1", 20)) + sqlite_bus.handle(commands.Allocate("order1", "sku2", 20)) # add a spurious batch and order to make sure we're getting the right ones - messagebus.handle(commands.CreateBatch("sku1batch-later", "sku1", 50, today), uow) - messagebus.handle(commands.Allocate("otherorder", "sku1", 30), uow) - messagebus.handle(commands.Allocate("otherorder", "sku2", 10), uow) + sqlite_bus.handle(commands.CreateBatch("sku1batch-later", "sku1", 50, today)) + sqlite_bus.handle(commands.Allocate("otherorder", "sku1", 30)) + sqlite_bus.handle(commands.Allocate("otherorder", "sku2", 10)) - assert views.allocations("order1", uow) == [ + assert views.allocations("order1", sqlite_bus.uow) == [ {"sku": "sku1", "batchref": "sku1batch"}, {"sku": "sku2", "batchref": "sku2batch"}, ] -def test_deallocation(sqlite_session_factory): - uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) - messagebus.handle(commands.CreateBatch("b1", "sku1", 50, None), uow) - messagebus.handle(commands.CreateBatch("b2", "sku1", 50, today), uow) - messagebus.handle(commands.Allocate("o1", "sku1", 40), uow) - messagebus.handle(commands.ChangeBatchQuantity("b1", 10), uow) +def test_deallocation(sqlite_bus): + sqlite_bus.handle(commands.CreateBatch("b1", "sku1", 50, None)) + sqlite_bus.handle(commands.CreateBatch("b2", "sku1", 50, today)) + sqlite_bus.handle(commands.Allocate("o1", "sku1", 40)) + sqlite_bus.handle(commands.ChangeBatchQuantity("b1", 10)) - assert views.allocations("o1", uow) == [ + assert views.allocations("o1", sqlite_bus.uow) == [ {"sku": "sku1", "batchref": "b2"}, ] From 44adc609d3ee261577fae5d2803b9ac9a9b901f7 Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 7 Oct 2019 14:44:15 +0100 Subject: [PATCH 47/64] use bootstrap in flask [flask_calls_bootstrap] --- src/allocation/entrypoints/flask_app.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/src/allocation/entrypoints/flask_app.py b/src/allocation/entrypoints/flask_app.py index ed482390..f50f3edd 100644 --- a/src/allocation/entrypoints/flask_app.py +++ b/src/allocation/entrypoints/flask_app.py @@ -1,14 +1,11 @@ from datetime import datetime from flask import Flask, jsonify, request - from allocation.domain import commands -from allocation.adapters import orm -from allocation.service_layer import messagebus, unit_of_work from allocation.service_layer.handlers import InvalidSku -from allocation import views +from allocation import bootstrap, views app = Flask(__name__) -orm.start_mappers() +bus = bootstrap.bootstrap() @app.route("/add_batch", methods=["POST"]) @@ -19,8 +16,7 @@ def add_batch(): cmd = commands.CreateBatch( request.json["ref"], request.json["sku"], request.json["qty"], eta ) - uow = unit_of_work.SqlAlchemyUnitOfWork() - messagebus.handle(cmd, uow) + bus.handle(cmd) return "OK", 201 @@ -30,8 +26,7 @@ def allocate_endpoint(): cmd = commands.Allocate( request.json["orderid"], request.json["sku"], request.json["qty"] ) - uow = unit_of_work.SqlAlchemyUnitOfWork() - messagebus.handle(cmd, uow) + bus.handle(cmd) except InvalidSku as e: return {"message": str(e)}, 400 @@ -40,8 +35,7 @@ def allocate_endpoint(): @app.route("/allocations/", methods=["GET"]) def allocations_view_endpoint(orderid): - uow = unit_of_work.SqlAlchemyUnitOfWork() - result = views.allocations(orderid, uow) + result = views.allocations(orderid, bus.uow) if not result: return "not found", 404 return jsonify(result), 200 From a82e57fd57dee284ad529a1efa3cc020d92d070a Mon Sep 17 00:00:00 2001 From: Harry Date: Mon, 6 Jan 2020 06:59:59 +0000 Subject: [PATCH 48/64] use bootstrap for redis --- src/allocation/entrypoints/redis_eventconsumer.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/allocation/entrypoints/redis_eventconsumer.py b/src/allocation/entrypoints/redis_eventconsumer.py index e04a8142..9b5b93d0 100644 --- a/src/allocation/entrypoints/redis_eventconsumer.py +++ b/src/allocation/entrypoints/redis_eventconsumer.py @@ -2,10 +2,8 @@ import logging import redis -from allocation import config +from allocation import bootstrap, config from allocation.domain import commands -from allocation.adapters import orm -from allocation.service_layer import messagebus, unit_of_work logger = logging.getLogger(__name__) @@ -13,19 +11,19 @@ def main(): - orm.start_mappers() + bus = bootstrap.bootstrap() pubsub = r.pubsub(ignore_subscribe_messages=True) pubsub.subscribe("change_batch_quantity") for m in pubsub.listen(): - handle_change_batch_quantity(m) + handle_change_batch_quantity(m, bus) -def handle_change_batch_quantity(m): +def handle_change_batch_quantity(m, bus): logging.debug("handling %s", m) data = json.loads(m["data"]) cmd = commands.ChangeBatchQuantity(ref=data["batchref"], qty=data["qty"]) - messagebus.handle(cmd, uow=unit_of_work.SqlAlchemyUnitOfWork()) + bus.handle(cmd) if __name__ == "__main__": From 9ee84345231e55b4caff515a0739450ac34289be Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 7 Jan 2020 22:32:52 +0000 Subject: [PATCH 49/64] experiment with nonmagic DI zzzz [nomagic_di] --- src/allocation/bootstrap.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/allocation/bootstrap.py b/src/allocation/bootstrap.py index 805b13f5..94f26dd1 100644 --- a/src/allocation/bootstrap.py +++ b/src/allocation/bootstrap.py @@ -1,5 +1,6 @@ import inspect from typing import Callable +from allocation.domain import commands, events from allocation.adapters import email, orm, redis_eventpublisher from allocation.service_layer import handlers, messagebus, unit_of_work @@ -14,17 +15,24 @@ def bootstrap( if start_orm: orm.start_mappers() - dependencies = {"uow": uow, "send_mail": send_mail, "publish": publish} injected_event_handlers = { - event_type: [ - inject_dependencies(handler, dependencies) - for handler in event_handlers - ] - for event_type, event_handlers in handlers.EVENT_HANDLERS.items() + events.Allocated: [ + lambda e: handlers.publish_allocated_event(e, publish), + lambda e: handlers.add_allocation_to_read_model(e, uow), + ], + events.Deallocated: [ + lambda e: handlers.remove_allocation_from_read_model(e, uow), + lambda e: handlers.reallocate(e, uow), + ], + events.OutOfStock: [ + lambda e: handlers.send_out_of_stock_notification(e, send_mail) + ], } injected_command_handlers = { - command_type: inject_dependencies(handler, dependencies) - for command_type, handler in handlers.COMMAND_HANDLERS.items() + commands.Allocate: lambda c: handlers.allocate(c, uow), + commands.CreateBatch: lambda c: handlers.add_batch(c, uow), + commands.ChangeBatchQuantity: \ + lambda c: handlers.change_batch_quantity(c, uow), } return messagebus.MessageBus( From f09d269b8c4091de05d4dea7e8910d5a6dfb3b63 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 7 Jan 2020 22:36:23 +0000 Subject: [PATCH 50/64] Revert "experiment with nonmagic DI zzzz" This reverts commit f42b193857eec8560443767de9fd7ada9f2acb96. --- src/allocation/bootstrap.py | 24 ++++++++---------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/src/allocation/bootstrap.py b/src/allocation/bootstrap.py index 94f26dd1..805b13f5 100644 --- a/src/allocation/bootstrap.py +++ b/src/allocation/bootstrap.py @@ -1,6 +1,5 @@ import inspect from typing import Callable -from allocation.domain import commands, events from allocation.adapters import email, orm, redis_eventpublisher from allocation.service_layer import handlers, messagebus, unit_of_work @@ -15,24 +14,17 @@ def bootstrap( if start_orm: orm.start_mappers() + dependencies = {"uow": uow, "send_mail": send_mail, "publish": publish} injected_event_handlers = { - events.Allocated: [ - lambda e: handlers.publish_allocated_event(e, publish), - lambda e: handlers.add_allocation_to_read_model(e, uow), - ], - events.Deallocated: [ - lambda e: handlers.remove_allocation_from_read_model(e, uow), - lambda e: handlers.reallocate(e, uow), - ], - events.OutOfStock: [ - lambda e: handlers.send_out_of_stock_notification(e, send_mail) - ], + event_type: [ + inject_dependencies(handler, dependencies) + for handler in event_handlers + ] + for event_type, event_handlers in handlers.EVENT_HANDLERS.items() } injected_command_handlers = { - commands.Allocate: lambda c: handlers.allocate(c, uow), - commands.CreateBatch: lambda c: handlers.add_batch(c, uow), - commands.ChangeBatchQuantity: \ - lambda c: handlers.change_batch_quantity(c, uow), + command_type: inject_dependencies(handler, dependencies) + for command_type, handler in handlers.COMMAND_HANDLERS.items() } return messagebus.MessageBus( From d6141c7f00f33f7be53d69d788783f7fb9a3d030 Mon Sep 17 00:00:00 2001 From: Harry Date: Sun, 14 Jul 2019 02:55:46 +0100 Subject: [PATCH 51/64] switch to a notifications class [notifications_class] --- src/allocation/adapters/email.py | 2 -- src/allocation/adapters/notifications.py | 28 ++++++++++++++++++++++++ src/allocation/bootstrap.py | 13 ++++++++--- src/allocation/config.py | 7 ++++++ src/allocation/service_layer/handlers.py | 5 +++-- 5 files changed, 48 insertions(+), 7 deletions(-) delete mode 100644 src/allocation/adapters/email.py create mode 100644 src/allocation/adapters/notifications.py diff --git a/src/allocation/adapters/email.py b/src/allocation/adapters/email.py deleted file mode 100644 index 1c37d427..00000000 --- a/src/allocation/adapters/email.py +++ /dev/null @@ -1,2 +0,0 @@ -def send(*args): - print("SENDING EMAIL:", *args) diff --git a/src/allocation/adapters/notifications.py b/src/allocation/adapters/notifications.py new file mode 100644 index 00000000..db29f7c8 --- /dev/null +++ b/src/allocation/adapters/notifications.py @@ -0,0 +1,28 @@ +# pylint: disable=too-few-public-methods +import abc +import smtplib +from allocation import config + + +class AbstractNotifications(abc.ABC): + @abc.abstractmethod + def send(self, destination, message): + raise NotImplementedError + + +DEFAULT_HOST = config.get_email_host_and_port()["host"] +DEFAULT_PORT = config.get_email_host_and_port()["port"] + + +class EmailNotifications(AbstractNotifications): + def __init__(self, smtp_host=DEFAULT_HOST, port=DEFAULT_PORT): + self.server = smtplib.SMTP(smtp_host, port=port) + self.server.noop() + + def send(self, destination, message): + msg = f"Subject: allocation service notification\n{message}" + self.server.sendmail( + from_addr="allocations@example.com", + to_addrs=[destination], + msg=msg, + ) diff --git a/src/allocation/bootstrap.py b/src/allocation/bootstrap.py index 805b13f5..22112a06 100644 --- a/src/allocation/bootstrap.py +++ b/src/allocation/bootstrap.py @@ -1,20 +1,27 @@ import inspect from typing import Callable -from allocation.adapters import email, orm, redis_eventpublisher +from allocation.adapters import orm, redis_eventpublisher +from allocation.adapters.notifications import ( + AbstractNotifications, + EmailNotifications, +) from allocation.service_layer import handlers, messagebus, unit_of_work def bootstrap( start_orm: bool = True, uow: unit_of_work.AbstractUnitOfWork = unit_of_work.SqlAlchemyUnitOfWork(), - send_mail: Callable = email.send, + notifications: AbstractNotifications = None, publish: Callable = redis_eventpublisher.publish, ) -> messagebus.MessageBus: + if notifications is None: + notifications = EmailNotifications() + if start_orm: orm.start_mappers() - dependencies = {"uow": uow, "send_mail": send_mail, "publish": publish} + dependencies = {"uow": uow, "notifications": notifications, "publish": publish} injected_event_handlers = { event_type: [ inject_dependencies(handler, dependencies) diff --git a/src/allocation/config.py b/src/allocation/config.py index 30a8eb07..bda1bbf2 100644 --- a/src/allocation/config.py +++ b/src/allocation/config.py @@ -19,3 +19,10 @@ def get_redis_host_and_port(): host = os.environ.get("REDIS_HOST", "localhost") port = 63791 if host == "localhost" else 6379 return dict(host=host, port=port) + + +def get_email_host_and_port(): + host = os.environ.get("EMAIL_HOST", "localhost") + port = 11025 if host == "localhost" else 1025 + http_port = 18025 if host == "localhost" else 8025 + return dict(host=host, port=port, http_port=http_port) diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py index 6ab2bbe9..2d7aa8d4 100644 --- a/src/allocation/service_layer/handlers.py +++ b/src/allocation/service_layer/handlers.py @@ -6,6 +6,7 @@ from allocation.domain.model import OrderLine if TYPE_CHECKING: + from allocation.adapters import notifications from . import unit_of_work @@ -61,9 +62,9 @@ def change_batch_quantity( def send_out_of_stock_notification( event: events.OutOfStock, - send_mail: Callable, + notifications: notifications.AbstractNotifications, ): - send_mail( + notifications.send( "stock@made.com", f"Out of stock for {event.sku}", ) From 514b6a404b940c3e1bba24554ba48bb5bd0a4ea3 Mon Sep 17 00:00:00 2001 From: Harry Date: Sun, 14 Jul 2019 02:56:00 +0100 Subject: [PATCH 52/64] tests for notifcations [notifications_unit_tests] --- tests/integration/test_views.py | 3 ++- tests/unit/test_handlers.py | 30 ++++++++++++++++++------------ 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py index 1eea394e..ccd5d542 100644 --- a/tests/integration/test_views.py +++ b/tests/integration/test_views.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name from datetime import date from sqlalchemy.orm import clear_mappers +from unittest import mock import pytest from allocation import bootstrap, views from allocation.domain import commands @@ -14,7 +15,7 @@ def sqlite_bus(sqlite_session_factory): bus = bootstrap.bootstrap( start_orm=True, uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), - send_mail=lambda *args: None, + notifications=mock.Mock(), publish=lambda *args: None, ) yield bus diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py index cb21461e..f1218540 100644 --- a/tests/unit/test_handlers.py +++ b/tests/unit/test_handlers.py @@ -1,12 +1,14 @@ # pylint: disable=no-self-use from __future__ import annotations +from collections import defaultdict from datetime import date -from unittest import mock +from typing import Dict, List import pytest from allocation import bootstrap -from allocation.adapters import repository from allocation.domain import commands -from allocation.service_layer import handlers, unit_of_work +from allocation.service_layer import handlers +from allocation.adapters import notifications, repository +from allocation.service_layer import unit_of_work class FakeRepository(repository.AbstractRepository): @@ -39,11 +41,19 @@ def rollback(self): pass +class FakeNotifications(notifications.AbstractNotifications): + def __init__(self): + self.sent = defaultdict(list) # type: Dict[str, List[str]] + + def send(self, destination, message): + self.sent[destination].append(message) + + def bootstrap_test_app(): return bootstrap.bootstrap( start_orm=False, uow=FakeUnitOfWork(), - send_mail=lambda *args: None, + notifications=FakeNotifications(), publish=lambda *args: None, ) @@ -86,21 +96,17 @@ def test_commits(self): assert bus.uow.committed def test_sends_email_on_out_of_stock_error(self): - emails = [] - - def fake_send_mail(*args): - emails.append(args) - + fake_notifs = FakeNotifications() bus = bootstrap.bootstrap( start_orm=False, uow=FakeUnitOfWork(), - send_mail=fake_send_mail, + notifications=fake_notifs, publish=lambda *args: None, ) bus.handle(commands.CreateBatch("b1", "POPULAR-CURTAINS", 9, None)) bus.handle(commands.Allocate("o1", "POPULAR-CURTAINS", 10)) - assert emails == [ - ("stock@made.com", f"Out of stock for POPULAR-CURTAINS"), + assert fake_notifs.sent["stock@made.com"] == [ + f"Out of stock for POPULAR-CURTAINS", ] From 51a00dfaa78345ece4e5f75bc999080051111bf4 Mon Sep 17 00:00:00 2001 From: Harry Date: Sun, 14 Jul 2019 03:23:07 +0100 Subject: [PATCH 53/64] add a mailhog fake email server to docker-compose --- docker-compose.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index dc2cc369..f964ab74 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,10 +10,12 @@ services: depends_on: - postgres - redis + - mailhog environment: - DB_HOST=postgres - DB_PASSWORD=abc123 - REDIS_HOST=redis + - EMAIL_HOST=mailhog - PYTHONDONTWRITEBYTECODE=1 volumes: - ./src:/src @@ -26,11 +28,13 @@ services: image: allocation-image depends_on: - redis_pubsub + - mailhog environment: - DB_HOST=postgres - DB_PASSWORD=abc123 - API_HOST=api - REDIS_HOST=redis + - EMAIL_HOST=mailhog - PYTHONDONTWRITEBYTECODE=1 - FLASK_APP=allocation/entrypoints/flask_app.py - FLASK_DEBUG=1 @@ -59,3 +63,8 @@ services: ports: - "63791:6379" + mailhog: + image: mailhog/mailhog + ports: + - "11025:1025" + - "18025:8025" From bb51578bdd3860de31702011b2a858b16f60520b Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 8 Oct 2019 12:56:58 +0100 Subject: [PATCH 54/64] logging.info needs backport --- src/allocation/adapters/orm.py | 3 +++ src/allocation/adapters/redis_eventpublisher.py | 2 +- src/allocation/entrypoints/redis_eventconsumer.py | 3 ++- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/allocation/adapters/orm.py b/src/allocation/adapters/orm.py index a87068a5..81d704ca 100644 --- a/src/allocation/adapters/orm.py +++ b/src/allocation/adapters/orm.py @@ -1,3 +1,4 @@ +import logging from sqlalchemy import ( Table, MetaData, @@ -12,6 +13,7 @@ from allocation.domain import model +logger = logging.getLogger(__name__) metadata = MetaData() @@ -59,6 +61,7 @@ def start_mappers(): + logger.info("Starting mappers") lines_mapper = mapper(model.OrderLine, order_lines) batches_mapper = mapper( model.Batch, diff --git a/src/allocation/adapters/redis_eventpublisher.py b/src/allocation/adapters/redis_eventpublisher.py index 6100956f..d607d6ac 100644 --- a/src/allocation/adapters/redis_eventpublisher.py +++ b/src/allocation/adapters/redis_eventpublisher.py @@ -12,5 +12,5 @@ def publish(channel, event: events.Event): - logging.debug("publishing: channel=%s, event=%s", channel, event) + logging.info("publishing: channel=%s, event=%s", channel, event) r.publish(channel, json.dumps(asdict(event))) diff --git a/src/allocation/entrypoints/redis_eventconsumer.py b/src/allocation/entrypoints/redis_eventconsumer.py index 9b5b93d0..6d0d49a7 100644 --- a/src/allocation/entrypoints/redis_eventconsumer.py +++ b/src/allocation/entrypoints/redis_eventconsumer.py @@ -11,6 +11,7 @@ def main(): + logger.info("Redis pubsub starting") bus = bootstrap.bootstrap() pubsub = r.pubsub(ignore_subscribe_messages=True) pubsub.subscribe("change_batch_quantity") @@ -20,7 +21,7 @@ def main(): def handle_change_batch_quantity(m, bus): - logging.debug("handling %s", m) + logger.info("handling %s", m) data = json.loads(m["data"]) cmd = commands.ChangeBatchQuantity(ref=data["batchref"], qty=data["qty"]) bus.handle(cmd) From f4fca95b571b34b723c452388bac8341058f9545 Mon Sep 17 00:00:00 2001 From: Harry Date: Sun, 14 Jul 2019 03:23:44 +0100 Subject: [PATCH 55/64] integration test for emails [chapter_13_dependency_injection_ends] --- tests/integration/test_email.py | 37 +++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 tests/integration/test_email.py diff --git a/tests/integration/test_email.py b/tests/integration/test_email.py new file mode 100644 index 00000000..4aade37b --- /dev/null +++ b/tests/integration/test_email.py @@ -0,0 +1,37 @@ +# pylint: disable=redefined-outer-name +import pytest +import requests +from sqlalchemy.orm import clear_mappers +from allocation import bootstrap, config +from allocation.domain import commands +from allocation.adapters import notifications +from allocation.service_layer import unit_of_work +from ..random_refs import random_sku + + +@pytest.fixture +def bus(sqlite_session_factory): + bus = bootstrap.bootstrap( + start_orm=True, + uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), + notifications=notifications.EmailNotifications(), + publish=lambda *args: None, + ) + yield bus + clear_mappers() + + +def get_email_from_mailhog(sku): + host, port = map(config.get_email_host_and_port().get, ["host", "http_port"]) + all_emails = requests.get(f"http://{host}:{port}/api/v2/messages").json() + return next(m for m in all_emails["items"] if sku in str(m)) + + +def test_out_of_stock_email(bus): + sku = random_sku() + bus.handle(commands.CreateBatch("batch1", sku, 9, None)) + bus.handle(commands.Allocate("order1", sku, 10)) + email = get_email_from_mailhog(sku) + assert email["Raw"]["From"] == "allocations@example.com" + assert email["Raw"]["To"] == ["stock@made.com"] + assert f"Out of stock for {sku}" in email["Raw"]["Data"] From c95fc4f8cd8cfed54381542c618da738e4106988 Mon Sep 17 00:00:00 2001 From: Harry Date: Thu, 25 Feb 2021 01:02:38 +0000 Subject: [PATCH 56/64] update travis config --- .travis.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index fcd3ceea..bf0b9de2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,11 @@ dist: xenial language: python python: 3.8 - script: - make all - branches: except: - - /.*_exercise$/ + - "/.*_exercise$/" +env: + global: + secure: cNctlzVCjUj1oOrRW0aryxhJHB/u0b6vmn532jcgyCRS1aQjMJtT61O7tW6yMk4wOaH4Lr0kAI0J6+1lnRYf5g11H1M+IpEHGMWgJgImsysDpLRWUGmDJTez/ii8psk0SfOP/0ZwQp+QxOB92CHdPeOPOmu2HFa00V82/H7gousXR7ywQRNthLHwso36O8+UoHc4qw8nIbjcHzbfD6ysJNynmaUMlB3mRTU1hkjGKKpA2Xyl4tmkIhp3NCPJc0WR4SgB3y0u3dVOC+RtbRzl/XpEbjsZHHNloBirK+8ERn9ISBBh/mvfo6qTix743e+xvhtBlLJjk3o4H0VMH+wQ3zIpIh4TKbhPCMqWY3gvtKDVRHD+Sywk2TE6zSz0sDPWk248MC2QsL7sgeFwcnFHOWy2iKf4YyuZtoaJuX+2tw23cDCdMS6wbARlT8Kb5QwMlsxuKYN/04kQB+9nXTVsWKJGIwLKdYRzshnlzqB/UEe2vrjZcbBixCp4pbZ2jSzw2881he4KSbVGIJdZYSFetMuaN0P9obtdaJU4V+IhwzFyyapjZhEGCTl+l/m8uGdJ5DOhFlZ7OczHja7DKUuQvB3AbnMGvN518C+fJkJpWxAn5UeIp3d0ZZm32XVKt3k8PJaP7LBYdxnr3JCRit7+kNnlP7Ho0NjvX6GTHQ+r+JM= From c455e35ad613cee2a96c40033f217241a8410f14 Mon Sep 17 00:00:00 2001 From: Harry Date: Thu, 25 Feb 2021 01:32:55 +0000 Subject: [PATCH 57/64] readme fixes --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index df5c823c..636ed7c3 100644 --- a/README.md +++ b/README.md @@ -3,23 +3,23 @@ ## Chapters Each chapter has its own branch which contains all the commits for that chapter, -so it has the state that corresponds to the _end_ of that chapter. If you want -to try and code along with a chapter, you'll want to check out the branch for the -previous chapter. +so it has the state that corresponds to the _end_ of that chapter. +If you want to try and code along with a chapter, +you'll want to check out the branch for the previous chapter. -https://github.com/python-leap/code/branches/all +https://github.com/cosmicpython/code/branches/all ## Exercises -Branches for the exercises follow the convention `{chatper_name}_exercise`, eg -https://github.com/python-leap/code/tree/chapter_04_service_layer_exercise +Branches for the exercises follow the convention `{chapter_name}_exercise`, +eg https://github.com/cosmicpython/code/tree/chapter_04_service_layer_exercise ## Requirements * docker with docker-compose -* for chapters 1 and 2, and optionally for the rest: a local python3.7 virtualenv +* for chapters 1 and 2, and optionally for the rest: a local python3.8 virtualenv ## Building the containers From 2f8feba3974c6c0506b402b7b429ebbf5c120b75 Mon Sep 17 00:00:00 2001 From: Harry Date: Thu, 25 Feb 2021 01:45:17 +0000 Subject: [PATCH 58/64] attempt docker login --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index bf0b9de2..d69e8e14 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,8 @@ dist: xenial language: python python: 3.8 +before_install: + - echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin script: - make all branches: From 539e35c689f9f52348b39b897bdc468ed255131e Mon Sep 17 00:00:00 2001 From: Harry Date: Thu, 25 Feb 2021 01:48:56 +0000 Subject: [PATCH 59/64] upgrade travis to focal + 3.9 why the heck not --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index d69e8e14..8b4550c1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,6 @@ -dist: xenial +dist: focal language: python -python: 3.8 +python: 3.9 before_install: - echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin script: From 15922acc755aa272dff0945f17d59d354116f64a Mon Sep 17 00:00:00 2001 From: Sergey Fursov Date: Fri, 2 Apr 2021 18:00:45 +0300 Subject: [PATCH 60/64] fix requirements installation command in README --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 636ed7c3..63e8510c 100644 --- a/README.md +++ b/README.md @@ -45,10 +45,10 @@ pip install pytest pip install pytest sqlalchemy # for chapter 4+5 -pip install requirements.txt +pip install -r requirements.txt # for chapter 6+ -pip install requirements.txt +pip install -r requirements.txt pip install -e src/ ``` From 280b9d596a2fdc981b1e3282d3343df883e9d2b8 Mon Sep 17 00:00:00 2001 From: adamculp Date: Sat, 22 May 2021 16:23:39 -0400 Subject: [PATCH 61/64] Corrected commands of the individual tests make commands --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 63e8510c..561cbb70 100644 --- a/README.md +++ b/README.md @@ -60,9 +60,9 @@ pip install -e src/ ```sh make test # or, to run individual test types -make unit -make integration -make e2e +make unit-tests +make integration-tests +make e2e-tests # or, if you have a local virtualenv make up pytest tests/unit From 9aa5b82dc593adebd564b089a439a6f18c8e022b Mon Sep 17 00:00:00 2001 From: Harry Percival Date: Fri, 17 Mar 2023 12:53:12 +0000 Subject: [PATCH 62/64] pin sqlalchmy to <2 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 882cb352..789a24fe 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # app -sqlalchemy +sqlalchemy<2 flask psycopg2-binary redis From af5550869895fbd255d98c6809f6fed961e2bf6b Mon Sep 17 00:00:00 2001 From: Daniel Faber <59205635+DanielSchiessl@users.noreply.github.com> Date: Fri, 17 Nov 2023 18:37:50 +0100 Subject: [PATCH 63/64] run tests on pull request --- .github/workflows/run_tests_on_pull_request.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/workflows/run_tests_on_pull_request.yml diff --git a/.github/workflows/run_tests_on_pull_request.yml b/.github/workflows/run_tests_on_pull_request.yml new file mode 100644 index 00000000..940e6afa --- /dev/null +++ b/.github/workflows/run_tests_on_pull_request.yml @@ -0,0 +1,11 @@ +on: + pull_request: + types: [opened, synchronize, reopened] +jobs: + run_tests: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: run tests + run: make all \ No newline at end of file From 4038abbe52e4197b139c300a532cb1330271b9c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alfonso=20Montero=20L=C3=B3pez?= Date: Fri, 24 Jan 2025 14:33:19 +0100 Subject: [PATCH 64/64] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 561cbb70..8b35464c 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Example application code for the python architecture book +# Example application code for the "Architecture Patterns with Python" book ## Chapters