diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..fcd3ceea --- /dev/null +++ b/.travis.yml @@ -0,0 +1,10 @@ +dist: xenial +language: python +python: 3.8 + +script: +- make all + +branches: + except: + - /.*_exercise$/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..a52f27b6 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.8-alpine + +RUN apk add --no-cache --virtual .build-deps gcc postgresql-dev musl-dev python3-dev +RUN apk add libpq + +COPY requirements.txt /tmp/ +RUN pip install -r /tmp/requirements.txt + +RUN apk del --no-cache .build-deps + +RUN mkdir -p /src +COPY src/ /src/ +RUN pip install -e /src +COPY tests/ /tests/ + +WORKDIR /src diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..968a3be5 --- /dev/null +++ b/Makefile @@ -0,0 +1,25 @@ +build: + docker-compose build + +up: + docker-compose up -d + +test: up + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/unit /tests/integration /tests/e2e + +unit-tests: + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/unit + +integration-tests: up + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/integration + +e2e-tests: up + docker-compose run --rm --no-deps --entrypoint=pytest api /tests/e2e + +logs: + docker-compose logs --tail=25 api redis_pubsub + +down: + docker-compose down --remove-orphans + +all: down build up test diff --git a/README.md b/README.md new file mode 100644 index 00000000..71d8d309 --- /dev/null +++ b/README.md @@ -0,0 +1,54 @@ +# Example application code for the python architecture book + +## Chapters + +Each chapter has its own branch which contains all the commits for that chapter, +so it has the state that corresponds to the _end_ of that chapter. If you want +to try and code along with a chapter, you'll want to check out the branch for the +previous chapter. + +https://github.com/python-leap/code/branches/all + + +## Exercises + +Branches for the exercises follow the convention `{chatper_name}_exercise`, eg +https://github.com/python-leap/code/tree/chapter_03_service_layer_exercise + + +## Requirements + +* docker with docker-compose +* for chapters 1 and 2, and optionally for the rest: a local python3.7 virtualenv + + +## Building the containers + +_(this is only required from chapter 3 onwards)_ + +```sh +make build +make up +# or +make all # builds, brings containers up, runs tests +``` + +## Running the tests + +```sh +make test +# or, to run individual test types +make unit +make integration +make e2e +# or, if you have a local virtualenv +make up +pytest tests/unit +pytest tests/integration +pytest tests/e2e + + +## Makefile + +There are more useful commands in the makefile, have a look and try them out. + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..f964ab74 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,70 @@ +version: "3" + +services: + + redis_pubsub: + build: + context: . + dockerfile: Dockerfile + image: allocation-image + depends_on: + - postgres + - redis + - mailhog + environment: + - DB_HOST=postgres + - DB_PASSWORD=abc123 + - REDIS_HOST=redis + - EMAIL_HOST=mailhog + - PYTHONDONTWRITEBYTECODE=1 + volumes: + - ./src:/src + - ./tests:/tests + entrypoint: + - python + - /src/allocation/entrypoints/redis_eventconsumer.py + + api: + image: allocation-image + depends_on: + - redis_pubsub + - mailhog + environment: + - DB_HOST=postgres + - DB_PASSWORD=abc123 + - API_HOST=api + - REDIS_HOST=redis + - EMAIL_HOST=mailhog + - PYTHONDONTWRITEBYTECODE=1 + - FLASK_APP=allocation/entrypoints/flask_app.py + - FLASK_DEBUG=1 + - PYTHONUNBUFFERED=1 + volumes: + - ./src:/src + - ./tests:/tests + entrypoint: + - flask + - run + - --host=0.0.0.0 + - --port=80 + ports: + - "5005:80" + + postgres: + image: postgres:9.6 + environment: + - POSTGRES_USER=allocation + - POSTGRES_PASSWORD=abc123 + ports: + - "54321:5432" + + redis: + image: redis:alpine + ports: + - "63791:6379" + + mailhog: + image: mailhog/mailhog + ports: + - "11025:1025" + - "18025:8025" diff --git a/mypy.ini b/mypy.ini index ead5ef09..601283d7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,9 +1,7 @@ [mypy] ignore_missing_imports = False +mypy_path = ./src +check_untyped_defs = True -[mypy-pytest.*] +[mypy-pytest.*,sqlalchemy.*,redis.*] ignore_missing_imports = True - -[mypy-sqlalchemy.*] -ignore_missing_imports = True - diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..abada9a3 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,13 @@ +# app +sqlalchemy +flask +psycopg2-binary + +# dev/tests +pytest +pytest-icdiff +mypy +pylint +requests +redis +tenacity diff --git a/src/allocation/__init__.py b/src/allocation/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/allocation/adapters/__init__.py b/src/allocation/adapters/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/allocation/adapters/notifications.py b/src/allocation/adapters/notifications.py new file mode 100644 index 00000000..1d398d16 --- /dev/null +++ b/src/allocation/adapters/notifications.py @@ -0,0 +1,31 @@ +#pylint: disable=too-few-public-methods +import abc +import smtplib +from allocation import config + + +class AbstractNotifications(abc.ABC): + + @abc.abstractmethod + def send(self, destination, message): + raise NotImplementedError + + +DEFAULT_HOST = config.get_email_host_and_port()['host'] +DEFAULT_PORT = config.get_email_host_and_port()['port'] + + + +class EmailNotifications(AbstractNotifications): + + def __init__(self, smtp_host=DEFAULT_HOST, port=DEFAULT_PORT): + self.server = smtplib.SMTP(smtp_host, port=port) + self.server.noop() + + def send(self, destination, message): + msg = f'Subject: allocation service notification\n{message}' + self.server.sendmail( + from_addr='allocations@example.com', + to_addrs=[destination], + msg=msg + ) diff --git a/src/allocation/adapters/orm.py b/src/allocation/adapters/orm.py new file mode 100644 index 00000000..c9b7a5d0 --- /dev/null +++ b/src/allocation/adapters/orm.py @@ -0,0 +1,68 @@ +import logging +from sqlalchemy import ( + Table, MetaData, Column, Integer, String, Date, ForeignKey, + event, +) +from sqlalchemy.orm import mapper, relationship + +from allocation.domain import model + +logger = logging.getLogger(__name__) + +metadata = MetaData() + +order_lines = Table( + 'order_lines', metadata, + Column('id', Integer, primary_key=True, autoincrement=True), + Column('sku', String(255)), + Column('qty', Integer, nullable=False), + Column('orderid', String(255)), +) + +products = Table( + 'products', metadata, + Column('sku', String(255), primary_key=True), + Column('version_number', Integer, nullable=False, server_default='0'), +) + +batches = Table( + 'batches', metadata, + Column('id', Integer, primary_key=True, autoincrement=True), + Column('reference', String(255)), + Column('sku', ForeignKey('products.sku')), + Column('_purchased_quantity', Integer, nullable=False), + Column('eta', Date, nullable=True), +) + +allocations = Table( + 'allocations', metadata, + Column('id', Integer, primary_key=True, autoincrement=True), + Column('orderline_id', ForeignKey('order_lines.id')), + Column('batch_id', ForeignKey('batches.id')), +) + +allocations_view = Table( + 'allocations_view', metadata, + Column('orderid', String(255)), + Column('sku', String(255)), + Column('batchref', String(255)), +) + + +def start_mappers(): + logger.info("Starting mappers") + lines_mapper = mapper(model.OrderLine, order_lines) + batches_mapper = mapper(model.Batch, batches, properties={ + '_allocations': relationship( + lines_mapper, + secondary=allocations, + collection_class=set, + ) + }) + mapper(model.Product, products, properties={ + 'batches': relationship(batches_mapper) + }) + +@event.listens_for(model.Product, 'load') +def receive_load(product, _): + product.events = [] diff --git a/src/allocation/adapters/redis_eventpublisher.py b/src/allocation/adapters/redis_eventpublisher.py new file mode 100644 index 00000000..8e37ab26 --- /dev/null +++ b/src/allocation/adapters/redis_eventpublisher.py @@ -0,0 +1,16 @@ +import json +import logging +from dataclasses import asdict +import redis + +from allocation import config +from allocation.domain import events + +logger = logging.getLogger(__name__) + +r = redis.Redis(**config.get_redis_host_and_port()) + + +def publish(channel, event: events.Event): + logging.info('publishing: channel=%s, event=%s', channel, event) + r.publish(channel, json.dumps(asdict(event))) diff --git a/src/allocation/adapters/repository.py b/src/allocation/adapters/repository.py new file mode 100644 index 00000000..bda90c07 --- /dev/null +++ b/src/allocation/adapters/repository.py @@ -0,0 +1,59 @@ +import abc +from typing import Set +from allocation.adapters import orm +from allocation.domain import model + + + +class AbstractRepository(abc.ABC): + + def __init__(self): + self.seen = set() # type: Set[model.Product] + + def add(self, product: model.Product): + self._add(product) + self.seen.add(product) + + def get(self, sku) -> model.Product: + product = self._get(sku) + if product: + self.seen.add(product) + return product + + def get_by_batchref(self, batchref) -> model.Product: + product = self._get_by_batchref(batchref) + if product: + self.seen.add(product) + return product + + @abc.abstractmethod + def _add(self, product: model.Product): + raise NotImplementedError + + @abc.abstractmethod + def _get(self, sku) -> model.Product: + raise NotImplementedError + + @abc.abstractmethod + def _get_by_batchref(self, batchref) -> model.Product: + raise NotImplementedError + + + + +class SqlAlchemyRepository(AbstractRepository): + + def __init__(self, session): + super().__init__() + self.session = session + + def _add(self, product): + self.session.add(product) + + def _get(self, sku): + return self.session.query(model.Product).filter_by(sku=sku).first() + + def _get_by_batchref(self, batchref): + return self.session.query(model.Product).join(model.Batch).filter( + orm.batches.c.reference == batchref, + ).first() diff --git a/src/allocation/bootstrap.py b/src/allocation/bootstrap.py new file mode 100644 index 00000000..842c24ce --- /dev/null +++ b/src/allocation/bootstrap.py @@ -0,0 +1,50 @@ +import inspect +from typing import Callable +from allocation.adapters import orm, redis_eventpublisher +from allocation.adapters.notifications import ( + AbstractNotifications, EmailNotifications +) +from allocation.service_layer import handlers, messagebus, unit_of_work + + +def bootstrap( + start_orm: bool = True, + uow: unit_of_work.AbstractUnitOfWork = unit_of_work.SqlAlchemyUnitOfWork(), + notifications: AbstractNotifications = None, + publish: Callable = redis_eventpublisher.publish, +) -> messagebus.MessageBus: + + if notifications is None: + notifications = EmailNotifications() + + if start_orm: + orm.start_mappers() + + dependencies = {'uow': uow, 'notifications': notifications, 'publish': publish} + injected_event_handlers = { + event_type: [ + inject_dependencies(handler, dependencies) + for handler in event_handlers + ] + for event_type, event_handlers in handlers.EVENT_HANDLERS.items() + } + injected_command_handlers = { + command_type: inject_dependencies(handler, dependencies) + for command_type, handler in handlers.COMMAND_HANDLERS.items() + } + + return messagebus.MessageBus( + uow=uow, + event_handlers=injected_event_handlers, + command_handlers=injected_command_handlers, + ) + + +def inject_dependencies(handler, dependencies): + params = inspect.signature(handler).parameters + deps = { + name: dependency + for name, dependency in dependencies.items() + if name in params + } + return lambda message: handler(message, **deps) diff --git a/src/allocation/config.py b/src/allocation/config.py new file mode 100644 index 00000000..13d23b4d --- /dev/null +++ b/src/allocation/config.py @@ -0,0 +1,26 @@ +import os + + +def get_postgres_uri(): + host = os.environ.get('DB_HOST', 'localhost') + port = 54321 if host == 'localhost' else 5432 + password = os.environ.get('DB_PASSWORD', 'abc123') + user, db_name = 'allocation', 'allocation' + return f"postgresql://{user}:{password}@{host}:{port}/{db_name}" + + +def get_api_url(): + host = os.environ.get('API_HOST', 'localhost') + port = 5005 if host == 'localhost' else 80 + return f"http://{host}:{port}" + +def get_redis_host_and_port(): + host = os.environ.get('REDIS_HOST', 'localhost') + port = 63791 if host == 'localhost' else 6379 + return dict(host=host, port=port) + +def get_email_host_and_port(): + host = os.environ.get('EMAIL_HOST', 'localhost') + port = 11025 if host == 'localhost' else 1025 + http_port = 18025 if host == 'localhost' else 8025 + return dict(host=host, port=port, http_port=http_port) diff --git a/src/allocation/domain/__init__.py b/src/allocation/domain/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/allocation/domain/commands.py b/src/allocation/domain/commands.py new file mode 100644 index 00000000..c4f8a25c --- /dev/null +++ b/src/allocation/domain/commands.py @@ -0,0 +1,25 @@ +# pylint: disable=too-few-public-methods +from datetime import date +from typing import Optional +from dataclasses import dataclass + +class Command: + pass + +@dataclass +class Allocate(Command): + orderid: str + sku: str + qty: int + +@dataclass +class CreateBatch(Command): + ref: str + sku: str + qty: int + eta: Optional[date] = None + +@dataclass +class ChangeBatchQuantity(Command): + ref: str + qty: int diff --git a/src/allocation/domain/events.py b/src/allocation/domain/events.py new file mode 100644 index 00000000..3790fbbe --- /dev/null +++ b/src/allocation/domain/events.py @@ -0,0 +1,22 @@ +# pylint: disable=too-few-public-methods +from dataclasses import dataclass + +class Event: + pass + +@dataclass +class Allocated(Event): + orderid: str + sku: str + qty: int + batchref: str + +@dataclass +class Deallocated(Event): + orderid: str + sku: str + qty: int + +@dataclass +class OutOfStock(Event): + sku: str diff --git a/src/allocation/domain/model.py b/src/allocation/domain/model.py new file mode 100644 index 00000000..a33679c9 --- /dev/null +++ b/src/allocation/domain/model.py @@ -0,0 +1,92 @@ +from __future__ import annotations +from dataclasses import dataclass +from datetime import date +from typing import Optional, List, Set +from . import commands, events + + +class Product: + + def __init__(self, sku: str, batches: List[Batch], version_number: int = 0): + self.sku = sku + self.batches = batches + self.version_number = version_number + self.events = [] # type: List[events.Event] + + def allocate(self, line: OrderLine) -> str: + try: + batch = next( + b for b in sorted(self.batches) if b.can_allocate(line) + ) + batch.allocate(line) + self.version_number += 1 + self.events.append(events.Allocated( + orderid=line.orderid, sku=line.sku, qty=line.qty, + batchref=batch.reference, + )) + return batch.reference + except StopIteration: + self.events.append(events.OutOfStock(line.sku)) + return None + + def change_batch_quantity(self, ref: str, qty: int): + batch = next(b for b in self.batches if b.reference == ref) + batch._purchased_quantity = qty + while batch.available_quantity < 0: + line = batch.deallocate_one() + self.events.append( + events.Deallocated(line.orderid, line.sku, line.qty) + ) + +@dataclass(unsafe_hash=True) +class OrderLine: + orderid: str + sku: str + qty: int + + +class Batch: + def __init__( + self, ref: str, sku: str, qty: int, eta: Optional[date] + ): + self.reference = ref + self.sku = sku + self.eta = eta + self._purchased_quantity = qty + self._allocations = set() # type: Set[OrderLine] + + def __repr__(self): + return f'' + + def __eq__(self, other): + if not isinstance(other, Batch): + return False + return other.reference == self.reference + + def __hash__(self): + return hash(self.reference) + + def __gt__(self, other): + if self.eta is None: + return False + if other.eta is None: + return True + return self.eta > other.eta + + def allocate(self, line: OrderLine): + if self.can_allocate(line): + self._allocations.add(line) + + def deallocate_one(self) -> OrderLine: + return self._allocations.pop() + + @property + def allocated_quantity(self) -> int: + return sum(line.qty for line in self._allocations) + + @property + def available_quantity(self) -> int: + return self._purchased_quantity - self.allocated_quantity + + def can_allocate(self, line: OrderLine) -> bool: + return self.sku == line.sku and self.available_quantity >= line.qty diff --git a/src/allocation/entrypoints/__init__.py b/src/allocation/entrypoints/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/allocation/entrypoints/flask_app.py b/src/allocation/entrypoints/flask_app.py new file mode 100644 index 00000000..2d3f76ac --- /dev/null +++ b/src/allocation/entrypoints/flask_app.py @@ -0,0 +1,41 @@ +from datetime import datetime +from flask import Flask, jsonify, request +from allocation.domain import commands +from allocation.service_layer.handlers import InvalidSku +from allocation import bootstrap, views + +app = Flask(__name__) +bus = bootstrap.bootstrap() + + +@app.route("/add_batch", methods=['POST']) +def add_batch(): + eta = request.json['eta'] + if eta is not None: + eta = datetime.fromisoformat(eta).date() + cmd = commands.CreateBatch( + request.json['ref'], request.json['sku'], request.json['qty'], eta, + ) + bus.handle(cmd) + return 'OK', 201 + + +@app.route("/allocate", methods=['POST']) +def allocate_endpoint(): + try: + cmd = commands.Allocate( + request.json['orderid'], request.json['sku'], request.json['qty'], + ) + bus.handle(cmd) + except InvalidSku as e: + return jsonify({'message': str(e)}), 400 + + return 'OK', 202 + + +@app.route("/allocations/", methods=['GET']) +def allocations_view_endpoint(orderid): + result = views.allocations(orderid, bus.uow) + if not result: + return 'not found', 404 + return jsonify(result), 200 diff --git a/src/allocation/entrypoints/redis_eventconsumer.py b/src/allocation/entrypoints/redis_eventconsumer.py new file mode 100644 index 00000000..4c15e541 --- /dev/null +++ b/src/allocation/entrypoints/redis_eventconsumer.py @@ -0,0 +1,32 @@ +import json +import logging +import redis + +from allocation import bootstrap, config +from allocation.domain import commands + +logger = logging.getLogger(__name__) + +r = redis.Redis(**config.get_redis_host_and_port()) + + + +def main(): + logger.info('Redis pubsub starting') + bus = bootstrap.bootstrap() + pubsub = r.pubsub(ignore_subscribe_messages=True) + pubsub.subscribe('change_batch_quantity') + + for m in pubsub.listen(): + handle_change_batch_quantity(m, bus) + + +def handle_change_batch_quantity(m, bus): + logger.info('handling %s', m) + data = json.loads(m['data']) + cmd = commands.ChangeBatchQuantity(ref=data['batchref'], qty=data['qty']) + bus.handle(cmd) + + +if __name__ == '__main__': + main() diff --git a/src/allocation/service_layer/__init__.py b/src/allocation/service_layer/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/allocation/service_layer/handlers.py b/src/allocation/service_layer/handlers.py new file mode 100644 index 00000000..75995f7b --- /dev/null +++ b/src/allocation/service_layer/handlers.py @@ -0,0 +1,109 @@ +#pylint: disable=unused-argument +from __future__ import annotations +from dataclasses import asdict +from typing import List, Dict, Callable, Type, TYPE_CHECKING +from allocation.domain import commands, events, model +from allocation.domain.model import OrderLine +if TYPE_CHECKING: + from allocation.adapters import notifications + from . import unit_of_work + + +class InvalidSku(Exception): + pass + + + +def add_batch( + cmd: commands.CreateBatch, uow: unit_of_work.AbstractUnitOfWork +): + with uow: + product = uow.products.get(sku=cmd.sku) + if product is None: + product = model.Product(cmd.sku, batches=[]) + uow.products.add(product) + product.batches.append(model.Batch( + cmd.ref, cmd.sku, cmd.qty, cmd.eta + )) + uow.commit() + + +def allocate( + cmd: commands.Allocate, uow: unit_of_work.AbstractUnitOfWork +): + line = OrderLine(cmd.orderid, cmd.sku, cmd.qty) + with uow: + product = uow.products.get(sku=line.sku) + if product is None: + raise InvalidSku(f'Invalid sku {line.sku}') + product.allocate(line) + uow.commit() + + +def reallocate( + event: events.Deallocated, uow: unit_of_work.AbstractUnitOfWork +): + allocate(commands.Allocate(**asdict(event)), uow=uow) + + +def change_batch_quantity( + cmd: commands.ChangeBatchQuantity, uow: unit_of_work.AbstractUnitOfWork +): + with uow: + product = uow.products.get_by_batchref(batchref=cmd.ref) + product.change_batch_quantity(ref=cmd.ref, qty=cmd.qty) + uow.commit() + + +#pylint: disable=unused-argument + +def send_out_of_stock_notification( + event: events.OutOfStock, notifications: notifications.AbstractNotifications, +): + notifications.send( + 'stock@made.com', + f'Out of stock for {event.sku}', + ) + + +def publish_allocated_event( + event: events.Allocated, publish: Callable, +): + publish('line_allocated', event) + + +def add_allocation_to_read_model( + event: events.Allocated, uow: unit_of_work.SqlAlchemyUnitOfWork, +): + with uow: + uow.session.execute( + 'INSERT INTO allocations_view (orderid, sku, batchref)' + ' VALUES (:orderid, :sku, :batchref)', + dict(orderid=event.orderid, sku=event.sku, batchref=event.batchref) + ) + uow.commit() + + +def remove_allocation_from_read_model( + event: events.Deallocated, uow: unit_of_work.SqlAlchemyUnitOfWork, +): + with uow: + uow.session.execute( + 'DELETE FROM allocations_view ' + ' WHERE orderid = :orderid AND sku = :sku', + dict(orderid=event.orderid, sku=event.sku) + ) + uow.commit() + + +EVENT_HANDLERS = { + events.Allocated: [publish_allocated_event, add_allocation_to_read_model], + events.Deallocated: [remove_allocation_from_read_model, reallocate], + events.OutOfStock: [send_out_of_stock_notification], +} # type: Dict[Type[events.Event], List[Callable]] + +COMMAND_HANDLERS = { + commands.Allocate: allocate, + commands.CreateBatch: add_batch, + commands.ChangeBatchQuantity: change_batch_quantity, +} # type: Dict[Type[commands.Command], Callable] diff --git a/src/allocation/service_layer/messagebus.py b/src/allocation/service_layer/messagebus.py new file mode 100644 index 00000000..e6ccae66 --- /dev/null +++ b/src/allocation/service_layer/messagebus.py @@ -0,0 +1,58 @@ +# pylint: disable=broad-except, attribute-defined-outside-init +from __future__ import annotations +import logging +from typing import Callable, Dict, List, Union, Type, TYPE_CHECKING +from allocation.domain import commands, events + +if TYPE_CHECKING: + from . import unit_of_work + +logger = logging.getLogger(__name__) + +Message = Union[commands.Command, events.Event] + + +class MessageBus: + + def __init__( + self, + uow: unit_of_work.AbstractUnitOfWork, + event_handlers: Dict[Type[events.Event], List[Callable]], + command_handlers: Dict[Type[commands.Command], Callable], + ): + self.uow = uow + self.event_handlers = event_handlers + self.command_handlers = command_handlers + + def handle(self, message: Message): + self.queue = [message] + while self.queue: + message = self.queue.pop(0) + if isinstance(message, events.Event): + self.handle_event(message) + elif isinstance(message, commands.Command): + self.handle_command(message) + else: + raise Exception(f'{message} was not an Event or Command') + + + def handle_event(self, event: events.Event): + for handler in self.event_handlers[type(event)]: + try: + logger.debug('handling event %s with handler %s', event, handler) + handler(event) + self.queue.extend(self.uow.collect_new_events()) + except Exception: + logger.exception('Exception handling event %s', event) + continue + + + def handle_command(self, command: commands.Command): + logger.debug('handling command %s', command) + try: + handler = self.command_handlers[type(command)] + handler(command) + self.queue.extend(self.uow.collect_new_events()) + except Exception: + logger.exception('Exception handling command %s', command) + raise diff --git a/src/allocation/service_layer/unit_of_work.py b/src/allocation/service_layer/unit_of_work.py new file mode 100644 index 00000000..db55084f --- /dev/null +++ b/src/allocation/service_layer/unit_of_work.py @@ -0,0 +1,63 @@ +# pylint: disable=attribute-defined-outside-init +from __future__ import annotations +import abc +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.session import Session + + +from allocation import config +from allocation.adapters import repository + + +class AbstractUnitOfWork(abc.ABC): + products: repository.AbstractRepository + + def __enter__(self) -> AbstractUnitOfWork: + return self + + def __exit__(self, *args): + self.rollback() + + def commit(self): + self._commit() + + def collect_new_events(self): + for product in self.products.seen: + while product.events: + yield product.events.pop(0) + + @abc.abstractmethod + def _commit(self): + raise NotImplementedError + + @abc.abstractmethod + def rollback(self): + raise NotImplementedError + + + +DEFAULT_SESSION_FACTORY = sessionmaker(bind=create_engine( + config.get_postgres_uri(), + isolation_level="REPEATABLE READ", +)) + +class SqlAlchemyUnitOfWork(AbstractUnitOfWork): + + def __init__(self, session_factory=DEFAULT_SESSION_FACTORY): + self.session_factory = session_factory + + def __enter__(self): + self.session = self.session_factory() # type: Session + self.products = repository.SqlAlchemyRepository(self.session) + return super().__enter__() + + def __exit__(self, *args): + super().__exit__(*args) + self.session.close() + + def _commit(self): + self.session.commit() + + def rollback(self): + self.session.rollback() diff --git a/src/allocation/views.py b/src/allocation/views.py new file mode 100644 index 00000000..4cd07397 --- /dev/null +++ b/src/allocation/views.py @@ -0,0 +1,9 @@ +from allocation.service_layer import unit_of_work + +def allocations(orderid: str, uow: unit_of_work.SqlAlchemyUnitOfWork): + with uow: + results = list(uow.session.execute( + 'SELECT sku, batchref FROM allocations_view WHERE orderid = :orderid', + dict(orderid=orderid) + )) + return [dict(r) for r in results] diff --git a/src/setup.py b/src/setup.py new file mode 100644 index 00000000..be04950f --- /dev/null +++ b/src/setup.py @@ -0,0 +1,7 @@ +from setuptools import setup + +setup( + name='allocation', + version='0.1', + packages=['allocation'], +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..14e6dcf9 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,84 @@ +# pylint: disable=redefined-outer-name +import shutil +import subprocess +import time +from pathlib import Path + +import pytest +import redis +import requests +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker, clear_mappers +from tenacity import retry, stop_after_delay + +from allocation.adapters.orm import metadata, start_mappers +from allocation import config + +pytest.register_assert_rewrite('tests.e2e.api_client') + +@pytest.fixture +def in_memory_sqlite_db(): + engine = create_engine('sqlite:///:memory:') + metadata.create_all(engine) + return engine + +@pytest.fixture +def sqlite_session_factory(in_memory_sqlite_db): + yield sessionmaker(bind=in_memory_sqlite_db) + +@pytest.fixture +def mappers(): + start_mappers() + yield + clear_mappers() + + + +@retry(stop=stop_after_delay(10)) +def wait_for_postgres_to_come_up(engine): + return engine.connect() + + +@retry(stop=stop_after_delay(10)) +def wait_for_webapp_to_come_up(): + return requests.get(config.get_api_url()) + + +@retry(stop=stop_after_delay(10)) +def wait_for_redis_to_come_up(): + r = redis.Redis(**config.get_redis_host_and_port()) + return r.ping() + + +@pytest.fixture(scope='session') +def postgres_db(): + engine = create_engine(config.get_postgres_uri(), isolation_level='SERIALIZABLE') + wait_for_postgres_to_come_up(engine) + metadata.create_all(engine) + return engine + +@pytest.fixture +def postgres_session_factory(postgres_db): + yield sessionmaker(bind=postgres_db) + +@pytest.fixture +def postgres_session(postgres_session_factory): + return postgres_session_factory() + + +@pytest.fixture +def restart_api(): + (Path(__file__).parent / '../src/allocation/entrypoints/flask_app.py').touch() + time.sleep(0.5) + wait_for_webapp_to_come_up() + +@pytest.fixture +def restart_redis_pubsub(): + wait_for_redis_to_come_up() + if not shutil.which('docker-compose'): + print('skipping restart, assumes running in container') + return + subprocess.run( + ['docker-compose', 'restart', '-t', '0', 'redis_pubsub'], + check=True, + ) diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/e2e/api_client.py b/tests/e2e/api_client.py new file mode 100644 index 00000000..82dbd1b0 --- /dev/null +++ b/tests/e2e/api_client.py @@ -0,0 +1,25 @@ +import requests +from allocation import config + + +def post_to_add_batch(ref, sku, qty, eta): + url = config.get_api_url() + r = requests.post( + f'{url}/add_batch', + json={'ref': ref, 'sku': sku, 'qty': qty, 'eta': eta} + ) + assert r.status_code == 201 + + +def post_to_allocate(orderid, sku, qty, expect_success=True): + url = config.get_api_url() + r = requests.post(f'{url}/allocate', json={ + 'orderid': orderid, 'sku': sku, 'qty': qty, + }) + if expect_success: + assert r.status_code == 202 + return r + +def get_allocation(orderid): + url = config.get_api_url() + return requests.get(f'{url}/allocations/{orderid}') diff --git a/tests/e2e/redis_client.py b/tests/e2e/redis_client.py new file mode 100644 index 00000000..d776baea --- /dev/null +++ b/tests/e2e/redis_client.py @@ -0,0 +1,18 @@ +import json +import redis + +from allocation import config + +r = redis.Redis(**config.get_redis_host_and_port()) + + +def subscribe_to(channel): + pubsub = r.pubsub() + pubsub.subscribe(channel) + confirmation = pubsub.get_message(timeout=3) + assert confirmation['type'] == 'subscribe' + return pubsub + + +def publish_message(channel, message): + r.publish(channel, json.dumps(message)) diff --git a/tests/e2e/test_api.py b/tests/e2e/test_api.py new file mode 100644 index 00000000..d5dfc670 --- /dev/null +++ b/tests/e2e/test_api.py @@ -0,0 +1,38 @@ +import pytest +from ..random_refs import random_batchref, random_orderid, random_sku +from . import api_client + +@pytest.mark.usefixtures('postgres_db') +@pytest.mark.usefixtures('restart_api') +def test_happy_path_returns_202_and_batch_is_allocated(): + orderid = random_orderid() + sku, othersku = random_sku(), random_sku('other') + earlybatch = random_batchref(1) + laterbatch = random_batchref(2) + otherbatch = random_batchref(3) + api_client.post_to_add_batch(laterbatch, sku, 100, '2011-01-02') + api_client.post_to_add_batch(earlybatch, sku, 100, '2011-01-01') + api_client.post_to_add_batch(otherbatch, othersku, 100, None) + + r = api_client.post_to_allocate(orderid, sku, qty=3) + assert r.status_code == 202 + + r = api_client.get_allocation(orderid) + assert r.ok + assert r.json() == [ + {'sku': sku, 'batchref': earlybatch}, + ] + + +@pytest.mark.usefixtures('postgres_db') +@pytest.mark.usefixtures('restart_api') +def test_unhappy_path_returns_400_and_error_message(): + unknown_sku, orderid = random_sku(), random_orderid() + r = api_client.post_to_allocate( + orderid, unknown_sku, qty=20, expect_success=False, + ) + assert r.status_code == 400 + assert r.json()['message'] == f'Invalid sku {unknown_sku}' + + r = api_client.get_allocation(orderid) + assert r.status_code == 404 diff --git a/tests/e2e/test_external_events.py b/tests/e2e/test_external_events.py new file mode 100644 index 00000000..762731eb --- /dev/null +++ b/tests/e2e/test_external_events.py @@ -0,0 +1,40 @@ +import json +import pytest +from tenacity import Retrying, RetryError, stop_after_delay +from . import api_client, redis_client +from ..random_refs import random_batchref, random_orderid, random_sku + + + +@pytest.mark.usefixtures('postgres_db') +@pytest.mark.usefixtures('restart_api') +@pytest.mark.usefixtures('restart_redis_pubsub') +def test_change_batch_quantity_leading_to_reallocation(): + # start with two batches and an order allocated to one of them + orderid, sku = random_orderid(), random_sku() + earlier_batch, later_batch = random_batchref('old'), random_batchref('newer') + api_client.post_to_add_batch(earlier_batch, sku, qty=10, eta='2011-01-02') + api_client.post_to_add_batch(later_batch, sku, qty=10, eta='2011-01-03') + r = api_client.post_to_allocate(orderid, sku, 10) + assert r.ok + response = api_client.get_allocation(orderid) + assert response.json()[0]['batchref'] == earlier_batch + + subscription = redis_client.subscribe_to('line_allocated') + + # change quantity on allocated batch so it's less than our order + redis_client.publish_message('change_batch_quantity', { + 'batchref': earlier_batch, 'qty': 5 + }) + + # wait until we see a message saying the order has been reallocated + messages = [] + for attempt in Retrying(stop=stop_after_delay(3), reraise=True): + with attempt: + message = subscription.get_message(timeout=1) + if message: + messages.append(message) + print(messages) + data = json.loads(messages[-1]['data']) + assert data['orderid'] == orderid + assert data['batchref'] == later_batch diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/test_email.py b/tests/integration/test_email.py new file mode 100644 index 00000000..56b04d9a --- /dev/null +++ b/tests/integration/test_email.py @@ -0,0 +1,37 @@ +#pylint: disable=redefined-outer-name +import pytest +import requests +from sqlalchemy.orm import clear_mappers +from allocation import bootstrap, config +from allocation.domain import commands +from allocation.adapters import notifications +from allocation.service_layer import unit_of_work +from ..random_refs import random_sku + + +@pytest.fixture +def bus(sqlite_session_factory): + bus = bootstrap.bootstrap( + start_orm=True, + uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), + notifications=notifications.EmailNotifications(), + publish=lambda *args: None, + ) + yield bus + clear_mappers() + + +def get_email_from_mailhog(sku): + host, port = map(config.get_email_host_and_port().get, ['host', 'http_port']) + all_emails = requests.get(f'http://{host}:{port}/api/v2/messages').json() + return next(m for m in all_emails['items'] if sku in str(m)) + + +def test_out_of_stock_email(bus): + sku = random_sku() + bus.handle(commands.CreateBatch('batch1', sku, 9, None)) + bus.handle(commands.Allocate('order1', sku, 10)) + email = get_email_from_mailhog(sku) + assert email['Raw']['From'] == 'allocations@example.com' + assert email['Raw']['To'] == ['stock@made.com'] + assert f'Out of stock for {sku}' in email['Raw']['Data'] diff --git a/tests/integration/test_repository.py b/tests/integration/test_repository.py new file mode 100644 index 00000000..2d0c87ab --- /dev/null +++ b/tests/integration/test_repository.py @@ -0,0 +1,19 @@ +import pytest +from allocation.adapters import repository +from allocation.domain import model + +pytestmark = pytest.mark.usefixtures('mappers') + + +def test_get_by_batchref(sqlite_session_factory): + session = sqlite_session_factory() + repo = repository.SqlAlchemyRepository(session) + b1 = model.Batch(ref='b1', sku='sku1', qty=100, eta=None) + b2 = model.Batch(ref='b2', sku='sku1', qty=100, eta=None) + b3 = model.Batch(ref='b3', sku='sku2', qty=100, eta=None) + p1 = model.Product(sku='sku1', batches=[b1, b2]) + p2 = model.Product(sku='sku2', batches=[b3]) + repo.add(p1) + repo.add(p2) + assert repo.get_by_batchref('b2') == p1 + assert repo.get_by_batchref('b3') == p2 diff --git a/tests/integration/test_uow.py b/tests/integration/test_uow.py new file mode 100644 index 00000000..04aff1c1 --- /dev/null +++ b/tests/integration/test_uow.py @@ -0,0 +1,132 @@ +# pylint: disable=broad-except, too-many-arguments +import threading +import time +import traceback +from typing import List +from unittest.mock import Mock +import pytest +from allocation.domain import model +from allocation.service_layer import unit_of_work +from ..random_refs import random_sku, random_batchref, random_orderid + +pytestmark = pytest.mark.usefixtures('mappers') + + +def insert_batch(session, ref, sku, qty, eta, product_version=1): + session.execute( + 'INSERT INTO products (sku, version_number) VALUES (:sku, :version)', + dict(sku=sku, version=product_version), + ) + session.execute( + 'INSERT INTO batches (reference, sku, _purchased_quantity, eta)' + ' VALUES (:ref, :sku, :qty, :eta)', + dict(ref=ref, sku=sku, qty=qty, eta=eta) + ) + + +def get_allocated_batch_ref(session, orderid, sku): + [[orderlineid]] = session.execute( + 'SELECT id FROM order_lines WHERE orderid=:orderid AND sku=:sku', + dict(orderid=orderid, sku=sku) + ) + [[batchref]] = session.execute( + 'SELECT b.reference FROM allocations JOIN batches AS b ON batch_id = b.id' + ' WHERE orderline_id=:orderlineid', + dict(orderlineid=orderlineid) + ) + return batchref + + +def test_uow_can_retrieve_a_batch_and_allocate_to_it(sqlite_session_factory): + session = sqlite_session_factory() + insert_batch(session, 'batch1', 'HIPSTER-WORKBENCH', 100, None) + session.commit() + + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) + with uow: + product = uow.products.get(sku='HIPSTER-WORKBENCH') + line = model.OrderLine('o1', 'HIPSTER-WORKBENCH', 10) + product.allocate(line) + uow.commit() + + batchref = get_allocated_batch_ref(session, 'o1', 'HIPSTER-WORKBENCH') + assert batchref == 'batch1' + + +def test_rolls_back_uncommitted_work_by_default(sqlite_session_factory): + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) + with uow: + insert_batch(uow.session, 'batch1', 'MEDIUM-PLINTH', 100, None) + + new_session = sqlite_session_factory() + rows = list(new_session.execute('SELECT * FROM "batches"')) + assert rows == [] + + +def test_rolls_back_on_error(sqlite_session_factory): + class MyException(Exception): + pass + + uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) + with pytest.raises(MyException): + with uow: + insert_batch(uow.session, 'batch1', 'LARGE-FORK', 100, None) + raise MyException() + + new_session = sqlite_session_factory() + rows = list(new_session.execute('SELECT * FROM "batches"')) + assert rows == [] + + +def try_to_allocate(orderid, sku, exceptions, session_factory): + line = model.OrderLine(orderid, sku, 10) + try: + with unit_of_work.SqlAlchemyUnitOfWork(session_factory) as uow: + product = uow.products.get(sku=sku) + product.allocate(line) + time.sleep(0.2) + uow.commit() + except Exception as e: # pylint: disable=broad-except + print(traceback.format_exc()) + exceptions.append(e) + + +def test_concurrent_updates_to_version_are_not_allowed(postgres_session_factory): + sku, batch = random_sku(), random_batchref() + session = postgres_session_factory() + insert_batch(session, batch, sku, 100, eta=None, product_version=1) + session.commit() + + order1, order2 = random_orderid(1), random_orderid(2) + exceptions = [] # type: List[Exception] + try_to_allocate_order1 = lambda: try_to_allocate( + order1, sku, exceptions, postgres_session_factory + ) + try_to_allocate_order2 = lambda: try_to_allocate( + order2, sku, exceptions, postgres_session_factory + ) + thread1 = threading.Thread(target=try_to_allocate_order1) + thread2 = threading.Thread(target=try_to_allocate_order2) + thread1.start() + thread2.start() + thread1.join() + thread2.join() + + [[version]] = session.execute( + "SELECT version_number FROM products WHERE sku=:sku", + dict(sku=sku), + ) + assert version == 2 + [exception] = exceptions + assert 'could not serialize access due to concurrent update' in str(exception) + + orders = list(session.execute( + "SELECT orderid FROM allocations" + " JOIN batches ON allocations.batch_id = batches.id" + " JOIN order_lines ON allocations.orderline_id = order_lines.id" + " WHERE order_lines.sku=:sku", + dict(sku=sku), + )) + assert len(orders) == 1 + with unit_of_work.SqlAlchemyUnitOfWork(postgres_session_factory) as uow: + uow.session.execute('select 1') diff --git a/tests/integration/test_views.py b/tests/integration/test_views.py new file mode 100644 index 00000000..b125870f --- /dev/null +++ b/tests/integration/test_views.py @@ -0,0 +1,48 @@ +# pylint: disable=redefined-outer-name +from datetime import date +from sqlalchemy.orm import clear_mappers +from unittest import mock +import pytest +from allocation import bootstrap, views +from allocation.domain import commands +from allocation.service_layer import unit_of_work + +today = date.today() + + +@pytest.fixture +def sqlite_bus(sqlite_session_factory): + bus = bootstrap.bootstrap( + start_orm=True, + uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), + notifications=mock.Mock(), + publish=lambda *args: None, + ) + yield bus + clear_mappers() + +def test_allocations_view(sqlite_bus): + sqlite_bus.handle(commands.CreateBatch('sku1batch', 'sku1', 50, None)) + sqlite_bus.handle(commands.CreateBatch('sku2batch', 'sku2', 50, today)) + sqlite_bus.handle(commands.Allocate('order1', 'sku1', 20)) + sqlite_bus.handle(commands.Allocate('order1', 'sku2', 20)) + # add a spurious batch and order to make sure we're getting the right ones + sqlite_bus.handle(commands.CreateBatch('sku1batch-later', 'sku1', 50, today)) + sqlite_bus.handle(commands.Allocate('otherorder', 'sku1', 30)) + sqlite_bus.handle(commands.Allocate('otherorder', 'sku2', 10)) + + assert views.allocations('order1', sqlite_bus.uow) == [ + {'sku': 'sku1', 'batchref': 'sku1batch'}, + {'sku': 'sku2', 'batchref': 'sku2batch'}, + ] + + +def test_deallocation(sqlite_bus): + sqlite_bus.handle(commands.CreateBatch('b1', 'sku1', 50, None)) + sqlite_bus.handle(commands.CreateBatch('b2', 'sku1', 50, today)) + sqlite_bus.handle(commands.Allocate('o1', 'sku1', 40)) + sqlite_bus.handle(commands.ChangeBatchQuantity('b1', 10)) + + assert views.allocations('o1', sqlite_bus.uow) == [ + {'sku': 'sku1', 'batchref': 'b2'}, + ] diff --git a/tests/pytest.ini b/tests/pytest.ini new file mode 100644 index 00000000..3fd8685e --- /dev/null +++ b/tests/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +addopts = --tb=short +filterwarnings = + ignore::DeprecationWarning diff --git a/tests/random_refs.py b/tests/random_refs.py new file mode 100644 index 00000000..37259c98 --- /dev/null +++ b/tests/random_refs.py @@ -0,0 +1,13 @@ +import uuid + +def random_suffix(): + return uuid.uuid4().hex[:6] + +def random_sku(name=''): + return f'sku-{name}-{random_suffix()}' + +def random_batchref(name=''): + return f'batch-{name}-{random_suffix()}' + +def random_orderid(name=''): + return f'order-{name}-{random_suffix()}' diff --git a/tests/unit/test_batches.py b/tests/unit/test_batches.py new file mode 100644 index 00000000..39917dd1 --- /dev/null +++ b/tests/unit/test_batches.py @@ -0,0 +1,41 @@ +from datetime import date +from allocation.domain.model import Batch, OrderLine + + +def test_allocating_to_a_batch_reduces_the_available_quantity(): + batch = Batch("batch-001", "SMALL-TABLE", qty=20, eta=date.today()) + line = OrderLine('order-ref', "SMALL-TABLE", 2) + + batch.allocate(line) + + assert batch.available_quantity == 18 + +def make_batch_and_line(sku, batch_qty, line_qty): + return ( + Batch("batch-001", sku, batch_qty, eta=date.today()), + OrderLine("order-123", sku, line_qty) + ) + + +def test_can_allocate_if_available_greater_than_required(): + large_batch, small_line = make_batch_and_line("ELEGANT-LAMP", 20, 2) + assert large_batch.can_allocate(small_line) + +def test_cannot_allocate_if_available_smaller_than_required(): + small_batch, large_line = make_batch_and_line("ELEGANT-LAMP", 2, 20) + assert small_batch.can_allocate(large_line) is False + +def test_can_allocate_if_available_equal_to_required(): + batch, line = make_batch_and_line("ELEGANT-LAMP", 2, 2) + assert batch.can_allocate(line) + +def test_cannot_allocate_if_skus_do_not_match(): + batch = Batch("batch-001", "UNCOMFORTABLE-CHAIR", 100, eta=None) + different_sku_line = OrderLine("order-123", "EXPENSIVE-TOASTER", 10) + assert batch.can_allocate(different_sku_line) is False + +def test_allocation_is_idempotent(): + batch, line = make_batch_and_line("ANGULAR-DESK", 20, 2) + batch.allocate(line) + batch.allocate(line) + assert batch.available_quantity == 18 diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py new file mode 100644 index 00000000..49124dc5 --- /dev/null +++ b/tests/unit/test_handlers.py @@ -0,0 +1,153 @@ +# pylint: disable=no-self-use +from __future__ import annotations +from collections import defaultdict +from datetime import date +from typing import Dict, List +import pytest +from allocation import bootstrap +from allocation.domain import commands +from allocation.service_layer import handlers +from allocation.adapters import notifications, repository +from allocation.service_layer import unit_of_work + + +class FakeRepository(repository.AbstractRepository): + + def __init__(self, products): + super().__init__() + self._products = set(products) + + def _add(self, product): + self._products.add(product) + + def _get(self, sku): + return next((p for p in self._products if p.sku == sku), None) + + def _get_by_batchref(self, batchref): + return next(( + p for p in self._products for b in p.batches + if b.reference == batchref + ), None) + + +class FakeUnitOfWork(unit_of_work.AbstractUnitOfWork): + + def __init__(self): + self.products = FakeRepository([]) + self.committed = False + + def _commit(self): + self.committed = True + + def rollback(self): + pass + + +class FakeNotifications(notifications.AbstractNotifications): + + def __init__(self): + self.sent = defaultdict(list) # type: Dict[str, List[str]] + + def send(self, destination, message): + self.sent[destination].append(message) + + + +def bootstrap_test_app(): + return bootstrap.bootstrap( + start_orm=False, + uow=FakeUnitOfWork(), + notifications=FakeNotifications(), + publish=lambda *args: None, + ) + + +class TestAddBatch: + + def test_for_new_product(self): + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "CRUNCHY-ARMCHAIR", 100, None)) + assert bus.uow.products.get("CRUNCHY-ARMCHAIR") is not None + assert bus.uow.committed + + + def test_for_existing_product(self): + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "GARISH-RUG", 100, None)) + bus.handle(commands.CreateBatch("b2", "GARISH-RUG", 99, None)) + assert "b2" in [b.reference for b in bus.uow.products.get("GARISH-RUG").batches] + + + +class TestAllocate: + + def test_allocates(self): + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("batch1", "COMPLICATED-LAMP", 100, None)) + bus.handle(commands.Allocate("o1", "COMPLICATED-LAMP", 10)) + [batch] = bus.uow.products.get("COMPLICATED-LAMP").batches + assert batch.available_quantity == 90 + + + def test_errors_for_invalid_sku(self): + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "AREALSKU", 100, None)) + + with pytest.raises(handlers.InvalidSku, match="Invalid sku NONEXISTENTSKU"): + bus.handle(commands.Allocate("o1", "NONEXISTENTSKU", 10)) + + def test_commits(self): + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("b1", "OMINOUS-MIRROR", 100, None)) + bus.handle(commands.Allocate("o1", "OMINOUS-MIRROR", 10)) + assert bus.uow.committed + + + def test_sends_email_on_out_of_stock_error(self): + fake_notifs = FakeNotifications() + bus = bootstrap.bootstrap( + start_orm=False, + uow=FakeUnitOfWork(), + notifications=fake_notifs, + publish=lambda *args: None, + ) + bus.handle(commands.CreateBatch("b1", "POPULAR-CURTAINS", 9, None)) + bus.handle(commands.Allocate("o1", "POPULAR-CURTAINS", 10)) + assert fake_notifs.sent['stock@made.com'] == [ + f"Out of stock for POPULAR-CURTAINS", + ] + + + +class TestChangeBatchQuantity: + + def test_changes_available_quantity(self): + bus = bootstrap_test_app() + bus.handle(commands.CreateBatch("batch1", "ADORABLE-SETTEE", 100, None)) + [batch] = bus.uow.products.get(sku="ADORABLE-SETTEE").batches + assert batch.available_quantity == 100 + + bus.handle(commands.ChangeBatchQuantity("batch1", 50)) + assert batch.available_quantity == 50 + + + def test_reallocates_if_necessary(self): + bus = bootstrap_test_app() + history = [ + commands.CreateBatch("batch1", "INDIFFERENT-TABLE", 50, None), + commands.CreateBatch("batch2", "INDIFFERENT-TABLE", 50, date.today()), + commands.Allocate("order1", "INDIFFERENT-TABLE", 20), + commands.Allocate("order2", "INDIFFERENT-TABLE", 20), + ] + for msg in history: + bus.handle(msg) + [batch1, batch2] = bus.uow.products.get(sku="INDIFFERENT-TABLE").batches + assert batch1.available_quantity == 10 + assert batch2.available_quantity == 50 + + bus.handle(commands.ChangeBatchQuantity("batch1", 25)) + + # order1 or order2 will be deallocated, so we'll have 25 - 20 + assert batch1.available_quantity == 5 + # and 20 will be reallocated to the next batch + assert batch2.available_quantity == 30 diff --git a/tests/unit/test_product.py b/tests/unit/test_product.py new file mode 100644 index 00000000..c1beb274 --- /dev/null +++ b/tests/unit/test_product.py @@ -0,0 +1,71 @@ +from datetime import date, timedelta +from allocation.domain import events +from allocation.domain.model import Product, OrderLine, Batch + + +today = date.today() +tomorrow = today + timedelta(days=1) +later = tomorrow + timedelta(days=10) + +def test_prefers_warehouse_batches_to_shipments(): + in_stock_batch = Batch("in-stock-batch", "RETRO-CLOCK", 100, eta=None) + shipment_batch = Batch("shipment-batch", "RETRO-CLOCK", 100, eta=tomorrow) + product = Product(sku="RETRO-CLOCK", batches=[in_stock_batch, shipment_batch]) + line = OrderLine("oref", "RETRO-CLOCK", 10) + + product.allocate(line) + + assert in_stock_batch.available_quantity == 90 + assert shipment_batch.available_quantity == 100 + + +def test_prefers_earlier_batches(): + earliest = Batch("speedy-batch", "MINIMALIST-SPOON", 100, eta=today) + medium = Batch("normal-batch", "MINIMALIST-SPOON", 100, eta=tomorrow) + latest = Batch("slow-batch", "MINIMALIST-SPOON", 100, eta=later) + product = Product(sku="MINIMALIST-SPOON", batches=[medium, earliest, latest]) + line = OrderLine("order1", "MINIMALIST-SPOON", 10) + + product.allocate(line) + + assert earliest.available_quantity == 90 + assert medium.available_quantity == 100 + assert latest.available_quantity == 100 + + +def test_returns_allocated_batch_ref(): + in_stock_batch = Batch("in-stock-batch-ref", "HIGHBROW-POSTER", 100, eta=None) + shipment_batch = Batch("shipment-batch-ref", "HIGHBROW-POSTER", 100, eta=tomorrow) + line = OrderLine("oref", "HIGHBROW-POSTER", 10) + product = Product(sku="HIGHBROW-POSTER", batches=[in_stock_batch, shipment_batch]) + allocation = product.allocate(line) + assert allocation == in_stock_batch.reference + + +def test_outputs_allocated_event(): + batch = Batch("batchref", "RETRO-LAMPSHADE", 100, eta=None) + line = OrderLine("oref", "RETRO-LAMPSHADE", 10) + product = Product(sku="RETRO-LAMPSHADE", batches=[batch]) + product.allocate(line) + expected = events.Allocated( + orderid="oref", sku="RETRO-LAMPSHADE", qty=10, batchref=batch.reference + ) + assert product.events[-1] == expected + + +def test_records_out_of_stock_event_if_cannot_allocate(): + batch = Batch('batch1', 'SMALL-FORK', 10, eta=today) + product = Product(sku="SMALL-FORK", batches=[batch]) + product.allocate(OrderLine('order1', 'SMALL-FORK', 10)) + + allocation = product.allocate(OrderLine('order2', 'SMALL-FORK', 1)) + assert product.events[-1] == events.OutOfStock(sku="SMALL-FORK") + assert allocation is None + + +def test_increments_version_number(): + line = OrderLine('oref', "SCANDI-PEN", 10) + product = Product(sku="SCANDI-PEN", batches=[Batch('b1', "SCANDI-PEN", 100, eta=None)]) + product.version_number = 7 + product.allocate(line) + assert product.version_number == 8