Skip to content

Commit

Permalink
Merge pull request #414 from SUNET/lundberg_bump_requirements
Browse files Browse the repository at this point in the history
bump requirements
  • Loading branch information
helylle authored Sep 25, 2023
2 parents cc13416 + 92e7107 commit 4a50bc9
Show file tree
Hide file tree
Showing 47 changed files with 6,371 additions and 5,791 deletions.
3 changes: 3 additions & 0 deletions requirements/constraints.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
anyio>=3.7.1,<4.0.0 # until fastapi loosens req
pydantic<2
oic==1.5.0 # until pydantic upgrade
1,980 changes: 1,000 additions & 980 deletions requirements/fastapi_requirements.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions requirements/main.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
-r sub_main.in
-c constraints.txt
Babel
Pillow
PyNaCl
Expand Down
1,634 changes: 830 additions & 804 deletions requirements/main.txt

Large diffs are not rendered by default.

1,637 changes: 828 additions & 809 deletions requirements/satosa_scim_requirements.txt

Large diffs are not rendered by default.

32 changes: 16 additions & 16 deletions requirements/sub_main.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.11
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# make update_deps
Expand All @@ -10,29 +10,29 @@ cached-property==1.5.2 \
--hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \
--hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0
# via -r sub_main.in
exceptiongroup==1.1.0 \
--hash=sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e \
--hash=sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23
exceptiongroup==1.1.3 \
--hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \
--hash=sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3
# via -r sub_main.in
importlib-metadata==6.0.0 \
--hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
--hash=sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d
importlib-metadata==6.8.0 \
--hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \
--hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743
# via -r sub_main.in
importlib-resources==5.12.0 \
--hash=sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6 \
--hash=sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a
importlib-resources==6.1.0 \
--hash=sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9 \
--hash=sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83
# via -r sub_main.in
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
# via -r sub_main.in
zipp==3.15.0 \
--hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \
--hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556
zipp==3.17.0 \
--hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \
--hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0
# via importlib-metadata

# The following packages are considered to be unsafe in a requirements file:
setuptools==67.6.0 \
--hash=sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077 \
--hash=sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2
setuptools==68.2.2 \
--hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \
--hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a
# via -r sub_main.in
1 change: 1 addition & 0 deletions requirements/test_requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ pytest
pytest-cov
respx
pip-tools
motor-types
2,330 changes: 1,180 additions & 1,150 deletions requirements/test_requirements.txt

Large diffs are not rendered by default.

1,675 changes: 847 additions & 828 deletions requirements/webapp_requirements.txt

Large diffs are not rendered by default.

1,625 changes: 822 additions & 803 deletions requirements/worker_requirements.txt

Large diffs are not rendered by default.

18 changes: 18 additions & 0 deletions src/eduid/common/config/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,11 @@

from datetime import timedelta
from enum import Enum
from pathlib import Path
from re import Pattern
from typing import Any, Mapping, Optional, Sequence, TypeVar, Union

import pkg_resources
from pydantic import BaseModel, Field

from eduid.userdb.credentials import CredentialProofingMethod
Expand Down Expand Up @@ -305,6 +307,22 @@ class VCCSConfigMixin(BaseModel):
vccs_check_password: str


class CaptchaConfigMixin(BaseModel):
captcha_code_length: int = 6
captcha_width: int = 160
captcha_height: int = 60
captcha_fonts: list[Path] = Field(
default=[
pkg_resources.resource_filename("eduid", "static/fonts/ProximaNova-Regular.ttf"),
pkg_resources.resource_filename("eduid", "static/fonts/ProximaNova-Light.ttf"),
pkg_resources.resource_filename("eduid", "static/fonts/ProximaNova-Bold.ttf"),
]
)
captcha_font_size: tuple[int, int, int] = (42, 50, 56)
captcha_max_bad_attempts: int = 100
captcha_backdoor_code: str = "123456"


class AmConfigMixin(CeleryConfigMixin):
"""Config used by AmRelay"""

Expand Down
28 changes: 16 additions & 12 deletions src/eduid/queue/db/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,27 +36,18 @@
from eduid.queue.db.payload import Payload
from eduid.queue.db.queue_item import QueueItem
from eduid.queue.exceptions import PayloadNotRegistered
from eduid.userdb.db import BaseDB, DatabaseDriver
from eduid.userdb.db import BaseDB
from eduid.userdb.exceptions import MultipleDocumentsReturned

logger = logging.getLogger(__name__)

__author__ = "lundberg"


class QueueDB(BaseDB):
def __init__(
self, db_uri: str, collection: str, db_name: str = "eduid_queue", driver: Optional[DatabaseDriver] = None
):
super().__init__(db_uri, db_name, collection=collection, driver=driver)
class QueuePayloadMixin:
def __init__(self, *args, **kwargs):
self.handlers: dict[str, type[Payload]] = dict()

# Remove messages older than discard_at datetime
indexes = {
"auto-discard": {"key": [("discard_at", 1)], "expireAfterSeconds": 0},
}
self.setup_indexes(indexes)

def register_handler(self, payload: type[Payload]):
payload_type = payload.get_type()
if payload_type in self.handlers:
Expand All @@ -71,6 +62,19 @@ def _load_payload(self, item: QueueItem) -> Payload:
raise PayloadNotRegistered(f"Payload type '{item.payload_type}' not registered with {self}")
return payload_cls.from_dict(item.payload.to_dict())


class QueueDB(BaseDB, QueuePayloadMixin):
def __init__(self, db_uri: str, collection: str, db_name: str = "eduid_queue"):
super().__init__(db_uri=db_uri, db_name=db_name, collection=collection)

self.handlers: dict[str, type[Payload]] = dict()

# Remove messages older than discard_at datetime
indexes = {
"auto-discard": {"key": [("discard_at", 1)], "expireAfterSeconds": 0},
}
self.setup_indexes(indexes)

def get_item_by_id(self, message_id: Union[str, ObjectId], parse_payload: bool = True) -> Optional[QueueItem]:
if isinstance(message_id, str):
message_id = ObjectId(message_id)
Expand Down
50 changes: 25 additions & 25 deletions src/eduid/queue/db/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,39 +4,33 @@
from typing import Any, Mapping, Optional, Union

from bson import ObjectId
from motor import motor_asyncio
from pymongo.results import UpdateResult

from eduid.queue.db import QueueDB, QueueItem
from eduid.queue.db import Payload, QueueItem
from eduid.queue.db.client import QueuePayloadMixin
from eduid.queue.exceptions import PayloadNotRegistered
from eduid.userdb import MongoDB
from eduid.userdb.db import DatabaseDriver
from eduid.userdb.db.async_db import AsyncBaseDB

__author__ = "lundberg"

logger = logging.getLogger(__name__)


class AsyncQueueDB(QueueDB):
class AsyncQueueDB(AsyncBaseDB, QueuePayloadMixin):
def __init__(self, db_uri: str, collection: str, db_name: str = "eduid_queue"):
super().__init__(db_uri, collection=collection, db_name=db_name)

# Re-initialize database and collection with async versions.
# TODO: Refactor setup_indexes() to work with async driver too, possibly by creating an AsyncMongoDB class.
self._db = MongoDB(db_uri, db_name=db_name, driver=DatabaseDriver.ASYNCIO)
self._coll = self._db.get_collection(collection=collection)
self.handlers: dict[str, type[Payload]] = dict()

@property
def database(self) -> motor_asyncio.AsyncIOMotorDatabase:
return self._db.get_database()

@property
def collection(self) -> motor_asyncio.AsyncIOMotorCollection:
return self._coll

@property
def connection(self) -> motor_asyncio.AsyncIOMotorClient:
return self._db.get_connection()
@classmethod
async def create(cls, db_uri: str, collection: str, db_name: str = "eduid_queue") -> "AsyncQueueDB":
# Remove messages older than discard_at datetime
indexes = {
"auto-discard": {"key": [("discard_at", 1)], "expireAfterSeconds": 0},
}
instance = cls(db_uri=db_uri, collection=collection, db_name=db_name)
await instance.setup_indexes(indexes)
return instance

def parse_queue_item(self, doc: Mapping, parse_payload: bool = True):
item = QueueItem.from_dict(doc)
Expand Down Expand Up @@ -77,17 +71,18 @@ async def grab_item(self, item_id: Union[str, ObjectId], worker_name: str, regra
spec["processed_ts"] = doc["processed_ts"]

# Update item with current worker name and ts
doc["processed_by"] = worker_name
doc["processed_ts"] = datetime.now(tz=timezone.utc)
mutable_doc = dict(doc)
mutable_doc["processed_by"] = worker_name
mutable_doc["processed_ts"] = datetime.now(tz=timezone.utc)

try:
# Try to parse the queue item to only grab items that are registered with the current db
item = self.parse_queue_item(doc, parse_payload=True)
item = self.parse_queue_item(mutable_doc, parse_payload=True)
except PayloadNotRegistered as e:
logger.debug(e)
return None

update_result: UpdateResult = await self.collection.replace_one(spec, doc)
update_result: UpdateResult = await self.collection.replace_one(spec, mutable_doc)
logger.debug(f"result: {update_result.raw_result}")
if not update_result.acknowledged or update_result.modified_count != 1:
logger.debug(f"Grabbing of item failed: {update_result.raw_result}")
Expand Down Expand Up @@ -137,7 +132,7 @@ async def remove_item(self, item_id: Union[str, ObjectId]) -> bool:

async def replace_item(self, old_item: QueueItem, new_item: QueueItem) -> bool:
if old_item.item_id != new_item.item_id:
logger.warning(f"Can not replace items with different item_id")
logger.warning("Can not replace items with different item_id")
logger.debug(f"old_item: {old_item}")
logger.debug(f"new_item: {new_item}")
return False
Expand All @@ -147,3 +142,8 @@ async def replace_item(self, old_item: QueueItem, new_item: QueueItem) -> bool:
logger.debug(f"Saving of item failed: {update_result.raw_result}")
return False
return True

async def save(self, item: QueueItem) -> bool:
test_doc = {"_id": item.item_id}
res = await self._coll.replace_one(test_doc, item.to_dict(), upsert=True)
return res.acknowledged
25 changes: 21 additions & 4 deletions src/eduid/queue/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

from eduid.common.misc.timeutil import utc_now
from eduid.queue.db import Payload, QueueDB, QueueItem, SenderInfo
from eduid.queue.db.worker import AsyncQueueDB
from eduid.userdb.db import TUserDbDocument
from eduid.userdb.testing import EduidTemporaryInstance, MongoTemporaryInstance

Expand Down Expand Up @@ -114,7 +115,7 @@ class EduidQueueTestCase(TestCase):
mongo_instance: MongoTemporaryInstanceReplicaSet
mongo_uri: str
mongo_collection: str
db: QueueDB
client_db: QueueDB

@classmethod
def setUpClass(cls) -> None:
Expand All @@ -126,13 +127,13 @@ def setUp(self) -> None:
self._init_db()

def tearDown(self) -> None:
self.db._drop_whole_collection()
self.client_db._drop_whole_collection()

def _init_db(self):
db_init_try = 0
while True:
try:
self.db = QueueDB(db_uri=self.mongo_uri, collection=self.mongo_collection)
self.client_db = QueueDB(db_uri=self.mongo_uri, collection=self.mongo_collection)
break
except pymongo.errors.NotPrimaryError as e:
db_init_try += 1
Expand All @@ -143,14 +144,30 @@ def _init_db(self):


class QueueAsyncioTest(EduidQueueTestCase, IsolatedAsyncioTestCase):
worker_db: AsyncQueueDB

async def asyncSetUp(self) -> None:
self.tasks: list[Task] = []
await self._init_async_db()

async def asyncTearDown(self) -> None:
for task in self.tasks:
if not task.done():
task.cancel()

async def _init_async_db(self):
db_init_try = 0
while True:
try:
self.worker_db = await AsyncQueueDB.create(db_uri=self.mongo_uri, collection=self.mongo_collection)
break
except pymongo.errors.NotPrimaryError as e:
db_init_try += 1
time.sleep(db_init_try)
if db_init_try >= 10:
raise e
continue

@staticmethod
def create_queue_item(expires_at: datetime, discard_at: datetime, payload: Payload):
sender_info = SenderInfo(hostname="localhost", node_id="test")
Expand All @@ -168,7 +185,7 @@ async def _assert_item_gets_processed(self, queue_item: QueueItem, retry: bool =
fetched: Optional[QueueItem] = None
while utc_now() < end_time:
await asyncio.sleep(0.5) # Allow worker to run
fetched = self.db.get_item_by_id(queue_item.item_id)
fetched = self.client_db.get_item_by_id(queue_item.item_id)
if not fetched:
logger.info(f"Queue item {queue_item.item_id} was processed")
break
Expand Down
9 changes: 5 additions & 4 deletions src/eduid/queue/tests/test_mail_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,11 @@ def setUp(self) -> None:
os.environ["EDUID_CONFIG_YAML"] = "YAML_CONFIG_NOT_USED"

self.config = load_config(typ=QueueWorkerConfig, app_name="test", ns="queue", test_config=self.test_config)
self.db.register_handler(EduidSignupEmail)
self.client_db.register_handler(EduidSignupEmail)

async def asyncSetUp(self) -> None:
await super().asyncSetUp()
self.worker_db.register_handler(EduidSignupEmail)
await asyncio.sleep(0.5) # wait for db
self.worker = MailQueueWorker(config=self.config)
self.tasks = [asyncio.create_task(self.worker.run())]
Expand All @@ -72,7 +73,7 @@ async def test_eduid_signup_mail_from_stream(self):
)
queue_item = self.create_queue_item(expires_at, discard_at, payload)
# Client saves new queue item
self.db.save(queue_item)
self.client_db.save(queue_item)
await self._assert_item_gets_processed(queue_item)

@patch("aiosmtplib.SMTP.sendmail")
Expand All @@ -89,7 +90,7 @@ async def test_eduid_signup_mail_from_stream_unrecoverable_error(self, mock_send
)
queue_item = self.create_queue_item(expires_at, discard_at, payload)
# Client saves new queue item
self.db.save(queue_item)
self.client_db.save(queue_item)
await self._assert_item_gets_processed(queue_item)

@patch("aiosmtplib.SMTP.sendmail")
Expand All @@ -109,5 +110,5 @@ async def test_eduid_signup_mail_from_stream_error_retry(self, mock_sendmail):
)
queue_item = self.create_queue_item(expires_at, discard_at, payload)
# Client saves new queue item
self.db.save(queue_item)
self.client_db.save(queue_item)
await self._assert_item_gets_processed(queue_item, retry=True)
9 changes: 4 additions & 5 deletions src/eduid/queue/tests/test_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import os
from datetime import timedelta
from os import environ
from typing import Optional

from eduid.common.config.parsers import load_config
from eduid.queue.config import QueueWorkerConfig
Expand Down Expand Up @@ -50,11 +49,11 @@ def setUp(self) -> None:
os.environ["EDUID_CONFIG_YAML"] = "YAML_CONFIG_NOT_USED"

self.config = load_config(typ=QueueWorkerConfig, app_name="test", ns="queue", test_config=self.test_config)

self.db.register_handler(TestPayload)
self.client_db.register_handler(TestPayload)

async def asyncSetUp(self) -> None:
await super().asyncSetUp()
self.worker_db.register_handler(TestPayload)
await asyncio.sleep(0.5) # wait for db
self.worker = QueueTestWorker(config=self.config, handle_payloads=[TestPayload])
self.tasks = [asyncio.create_task(self.worker.run())]
Expand All @@ -72,7 +71,7 @@ async def test_worker_item_from_stream(self):
payload = TestPayload(message="New item")
queue_item = self.create_queue_item(expires_at, discard_at, payload)
# Client saves new queue item
self.db.save(queue_item)
self.client_db.save(queue_item)
await self._assert_item_gets_processed(queue_item)

async def test_worker_expired_item(self):
Expand All @@ -87,7 +86,7 @@ async def test_worker_expired_item(self):
payload = TestPayload(message="Expired item")
queue_item = self.create_queue_item(expires_at, discard_at, payload)
# Fake that a client have saved queue item in the past
self.db.save(queue_item)
self.client_db.save(queue_item)
# Start worker after save to fake that the item has expired unhandled in the queue
self.tasks = [asyncio.create_task(self.worker.run())]
await self._assert_item_gets_processed(queue_item)
Loading

0 comments on commit 4a50bc9

Please sign in to comment.