From 69d5caab8706c8819ac9a6095892b9f0e2cde334 Mon Sep 17 00:00:00 2001 From: M Aswin Kishore <60577077+mak626@users.noreply.github.com> Date: Sat, 26 Jul 2025 13:19:11 +0530 Subject: [PATCH] feat: mongoengine async v0.1.0 --- .github/workflows/lint.yml | 33 + .github/workflows/publish-release.yml | 56 ++ .github/workflows/test.yml | 70 ++ .gitignore | 246 ++++++ .pre-commit-config.yaml | 43 + README.md | 231 +++++- examples/__init__.py | 0 examples/usage.py | 111 +++ mongoengine_async_extension/__init__.py | 26 + mongoengine_async_extension/config.py | 10 + mongoengine_async_extension/connection.py | 36 + mongoengine_async_extension/core.py | 960 ++++++++++++++++++++++ mongoengine_async_extension/exceptions.py | 21 + poetry.lock | 609 ++++++++++++++ pyproject.toml | 54 ++ pytest.ini | 3 + tests/__init__.py | 0 tests/conftest.py | 13 + tests/db/__init__.py | 0 tests/db/model.py | 16 + tests/test_operations.py | 340 ++++++++ 21 files changed, 2877 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/publish-release.yml create mode 100644 .github/workflows/test.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 examples/__init__.py create mode 100644 examples/usage.py create mode 100644 mongoengine_async_extension/__init__.py create mode 100644 mongoengine_async_extension/config.py create mode 100644 mongoengine_async_extension/connection.py create mode 100644 mongoengine_async_extension/core.py create mode 100644 mongoengine_async_extension/exceptions.py create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 pytest.ini create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/db/__init__.py create mode 100644 tests/db/model.py create mode 100644 tests/test_operations.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..2dcc565 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,33 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: Lint + +on: [push, pull_request] +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + strategy: + matrix: + python: ["3.12"] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip poetry + poetry install --only dev + - name: ruff check + run: | + poetry run ruff check . + poetry run ruff format --diff . diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml new file mode 100644 index 0000000..f7e4c9f --- /dev/null +++ b/.github/workflows/publish-release.yml @@ -0,0 +1,56 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +name: Publish to PyPI + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + build: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python: ["3.12", "3.13"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip poetry + poetry install --with dev + - name: Ruff check + run: | + poetry run ruff check . + poetry run ruff format --diff . + - name: Test with pytest + run: | + poetry run pytest + + deploy: + needs: build + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install poetry + - name: Build & Release package + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} + run: | + poetry publish --build diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..6b86d0c --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,70 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: Test Package + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + test: + strategy: + matrix: + os: [ubuntu-latest] +# supercharge/mongodb-github-action@1.12.0 works only on linux +# os: [ubuntu-latest, macos-latest, windows-latest] + python: ["3.12", "3.13"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip poetry + poetry install --with dev + - name: Start MongoDB + uses: supercharge/mongodb-github-action@1.12.0 + with: + mongodb-replica-set: local-rs + - name: Ruff check + run: | + poetry run ruff check . + poetry run ruff format --diff . + - name: Test with pytest + run: | + poetry run pytest + + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + - name: Install dependencies + run: | + python -m pip install --upgrade poetry + - name: Build package + run: | + poetry version "$(poetry version --short | cut -f1 -d + )+build-$(date +'%Y%m%d%H%M%S')" + poetry build + - name: Upload to GitHub Artifacts + uses: actions/upload-artifact@v4 + with: + name: build-dist + path: dist/ diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bbc4176 --- /dev/null +++ b/.gitignore @@ -0,0 +1,246 @@ +# Created by https://www.toptal.com/developers/gitignore/api/python,pycharm +# Edit at https://www.toptal.com/developers/gitignore?templates=python,pycharm + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit tests / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +pytestdebug.log + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +doc/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env.txt/ +venv/ +env.bak/ +venv.bak/ +pythonenv* + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# profiling mock_job +.prof + +.idea/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..e1d6b71 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,43 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-yaml + stages: [ pre-commit ] + - id: detect-private-key + stages: [ pre-commit ] + - id: name-tests-test + stages: [ pre-push ] + - id: check-added-large-files + stages: [ pre-commit ] + - id: end-of-file-fixer + stages: [ pre-commit ] + - id: no-commit-to-branch + args: [ '--branch', 'master','--branch', 'main', '--branch', 'develop', '--branch', 'staging' ] + stages: [ pre-push ] + - id: check-merge-conflict + stages: [ pre-push ] + + - repo: https://github.com/PyCQA/bandit + rev: '1.8.3' + hooks: + - id: bandit + args: [ -ll ] + stages: [ pre-commit, pre-push ] + + - repo: local + hooks: + - id: ruff-format + name: ruff format + description: "Run 'ruff format' for extremely fast Python formatting" + entry: ruff format --force-exclude + language: python + types_or: [python, pyi, jupyter] + args: [] + - id: ruff-check + name: ruff check + description: "Run 'ruff check' for extremely fast Python linting" + entry: ruff check --force-exclude + language: python + types_or: [ python, pyi, jupyter ] + args: [ ] diff --git a/README.md b/README.md index 279dab7..2861ab3 100644 --- a/README.md +++ b/README.md @@ -1 +1,230 @@ -# mongoengine-async-extension \ No newline at end of file +# mongoengine-async-extension + +### Mongoengine Async Extension + +A wrapper to use `pymongo-async` via `mongoengine` syntax. +Supports transaction inbuilt. + +[![PyPI version][pypi-image]][pypi-url] +[![PyPI pyversions][pypi-version-image]][pypi-version-url] +[![Downloads][pypi-downloads-image]][pypi-downloads-url] +[![Test Status][tests-image]][tests-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +[pypi-image]: https://badge.fury.io/py/mongoengine-async-extension.svg +[pypi-url]: https://pypi.org/project/mongoengine-async-extension/ +[pypi-version-image]: https://img.shields.io/pypi/pyversions/mongoengine-async-extension.svg +[pypi-version-url]: https://pypi.python.org/pypi/mongoengine-async-extension/ +[pypi-downloads-image]: https://pepy.tech/badge/mongoengine-async-extension +[pypi-downloads-url]: https://pepy.tech/project/mongoengine-async-extension +[tests-image]: https://github.com/strollby/mongoengine-async-extension/actions/workflows/test.yml/badge.svg?branch=main +[tests-url]: https://github.com/strollby/mongoengine-async-extension/actions/workflows/test.yml +[coveralls-image]: https://coveralls.io/repos/github/strollby/mongoengine-async-extension/badge.svg?branch=main +[coveralls-url]: https://coveralls.io/github/strollby/mongoengine-async-extension?branch=main + +------------------------ + +#### Installation + +`pip install mongoengine-async-extension` + +------------------------ + +#### Usage + +- Please note this is a wrapper over [mongoengine](https://docs.mongoengine.org/apireference.html). +- Most of its operations are migrated to async. The syntax remains similar + +##### Connecting to MongoDB + +```python +from mongoengine import connect +from mongoengine_async_extension import async_mongo_connect + +connect(db="mongoengine_async_test") # Synchronous MongoEngine connection +async_mongo_connect(db="mongoengine_async_test") # Asynchronous mongoengine-async connection +``` + +##### Queryset + +```python +from mongoengine import DateField, Document, ObjectIdField +from mongoengine_async_extension import QS + +class FooModel(Document): + meta = { + "collection": "foo", + "indexes": [ + {"fields": ["tenant"]}, + { + "name": "default sort index", + "fields": ["tenant", "start_date"], + }, + ], + } + + start_date = DateField(required=True, description="Start date of fiscal year") + end_date = DateField(required=True, description="End date of fiscal year") + tenant = ObjectIdField(required=True) + +FooModel.objects.timeout() +queryset = QS(FooModel) +""" +Class QS + +Args: + model (type[T]): Mongoengine Document class + from_son (Callable[[dict], T], optional): from_son callable. Defaults to model._from_son. + auto_deference (bool, optional): auto_deference in _from_son. Defaults to False. + throw_pymongo_errors (bool, optional): if true throws pymongo errors instead of mongoengine errors. + Defaults to False. + session (AsyncClientSession, optional): session to use + read_preference: The read preference to use. + write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern` + read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. +""" +``` + +##### Insertion + +```python +from faker import Faker +faker = Faker() + + +# Insert 100 new FooModel documents +await queryset.insert( + docs=[FooModel(start_date=fake.date(), end_date=fake.date(), tenant=tenant) for _ in range(100)], + load_bulk=True +) +# [, ...] +``` + +##### Listing + +```python +# List the first 10 documents, including only 'id' and 'start_date', sorted by 'start_date' +results: list[FooModel] = await queryset.limit(10).only("id", "start_date").sort("start_date").to_list() +print([i.to_mongo() for i in results]) +# Output example: [SON([('_id', ObjectId('...')), ('start_date', datetime.datetime(...))]), ...] + +# Filter by ID and exclude 'start_date' +results: list[FooModel] = await queryset.filter(id__in=[results[0].id]).exclude("start_date").to_list() +print([i.to_mongo() for i in results]) +# Output example: [SON([('_id', ObjectId('...')), ('end_date', datetime.datetime(...)), ('tenant', ObjectId('...'))])] +``` + + +##### Single Document + +```python +# Get a single document by ID using filter() +one_doc: FooModel = await queryset.filter(id=results[0].id).get() +print(one_doc) +# Output: FooModel object + +# Get a single document by ID directly from get() +one_doc: FooModel = await queryset.get(id=results[0].id) +print(one_doc) +# Output: FooModel object +``` + +##### Counting Documents + +```python +# Count documents matching a specific ID +print(await queryset.filter(id=results[0].id).count()) +# Output: 1 + +``` + +##### Update Operations + +```python +# Update all documents in the collection (use with caution!) +print(await queryset.filter().update(start_date=datetime.datetime.now())) +# The output `201` indicates the number of documents modified. +``` + +##### Deletion + +```python +# Delete a specific document by ID +print(await queryset.filter(id=results[0].id).delete()) +# Output: DeleteResult(...) (details about the delete operation) +``` + +##### Aggregation +```python +# Perform a simple aggregation to project only the '_id' field using aggregate_cursor +print(await (await queryset.aggregate_cursor(pipeline=[{"$project": {"_id": 1}}])).to_list()) +# Output example: [{'_id': ObjectId('...')}, ...] + +# Perform the same aggregation using the convenience aggregate() method +print(await queryset.aggregate(pipeline=[{"$project": {"_id": 1}}])) +# Output example: [{'_id': ObjectId('...')}, ...] +``` + + +##### Document-Level CRUD + +###### Create + +```python +model = FooModel( + start_date=fake.date(), + end_date=fake.date(), + tenant=ObjectId(), +) +await queryset.doc(model).save() +print(model.to_mongo()) +# Output: SON([...]) (the saved document's data) +``` + + +###### Modifying/Updation + +```python +doc: FooModel = await queryset.get(id='some_id') + +# Modify the 'start_date' of the 'model' instance +# QS updates the original model and also returns it, you may access it in both ways +updated_doc = await queryset.doc(doc).modify( + start_date=datetime.datetime.now().replace(year=2999), +) +print(updated_doc) +# Output: FooModel object (the modified document instance) +print(doc) +# Output: FooModel object (the modified document instance) + +# updated_doc == doc +``` + +###### Deletion + +```python +doc: FooModel = await queryset.get(id=ObjectId()) + +# Delete the 'model' instance from the database +print(await queryset.doc(doc).delete()) +# Output: bool (true if success) +``` + +##### Inbuild Transaction Support + +```python +from pymongo import AsyncMongoClient +from pymongo.asynchronous.client_session import AsyncClientSession +from mongoengine_async_extension import QS, async_mongo_client + +db: AsyncMongoClient = async_mongo_client() +session: AsyncClientSession = db.start_session() + +async with await session.start_transaction() as session: + queryset = QS(FooModel, session=session) + doc: FooModel = await queryset.get(id=ObjectId()) + await queryset.doc(doc).modify( + start_date=datetime.datetime.now().replace(year=2999), + ) + await queryset.doc(doc).delete() +``` diff --git a/examples/__init__.py b/examples/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/examples/usage.py b/examples/usage.py new file mode 100644 index 0000000..d6a160d --- /dev/null +++ b/examples/usage.py @@ -0,0 +1,111 @@ +import asyncio +import datetime + +from faker import Faker +from bson import ObjectId +from mongoengine import connect, DateField, Document, ObjectIdField +from mongoengine_async_extension import async_mongo_connect, QS, async_mongo_client + +fake = Faker() + + +async def main(): + # ------------------- Connection ------------------- + connect(db="mongoengine_async_test") # Sync + async_mongo_connect(db="mongoengine_async_test") # Async + + # ------------------- Model Definition ------------------- + class FooModel(Document): + meta = { + "collection": "foo", + "indexes": [ + {"fields": ["bar"]}, + {"name": "default sort index", "fields": ["bar", "start_date"]}, + ], + } + + start_date = DateField(required=True, description="Start date") + end_date = DateField(required=True, description="End date") + bar = ObjectIdField(required=True) + + # ------------------- Queryset ------------------- + queryset = QS(FooModel) + bar_ref = ObjectId() + + # ----- Insertion ----- + await queryset.insert( + docs=[FooModel(start_date=fake.date(), end_date=fake.date(), bar=bar_ref) for _ in range(10)], + load_bulk=True, + ) + + # ----- Listing ----- + results = await queryset.limit(10).only("id", "start_date").sort("start_date").to_list() + print("List with only id and start_date:", [i.to_mongo() for i in results]) + + results = await queryset.filter(id__in=[results[0].id]).exclude("start_date").to_list() + print("Excluding start_date:", [i.to_mongo() for i in results]) + + # ----- Single Document ----- + one_doc = await queryset.filter(id=results[0].id).get() + print("Single document (filter):", one_doc) + + one_doc = await queryset.get(id=results[0].id) + print("Single document (get):", one_doc) + + # ----- Count ----- + count = await queryset.filter(id=results[0].id).count() + print("Document count:", count) + + # ----- Update ----- + updated_count = await queryset.filter().update(start_date=datetime.datetime.now()) + print("Updated documents:", updated_count) + + # ----- Deletion ----- + delete_result = await queryset.filter(id=results[0].id).delete() + print("Delete result:", delete_result) + + # ----- Aggregation ----- + agg_result_cursor = await (await queryset.aggregate_cursor(pipeline=[{"$project": {"_id": 1}}])).to_list() + print("Aggregation (cursor):", agg_result_cursor) + + agg_result = await queryset.aggregate(pipeline=[{"$project": {"_id": 1}}]) + print("Aggregation:", agg_result) + + # ----- Document-Level Create ----- + new_doc = FooModel(start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + await queryset.doc(new_doc).save() + print("Saved model:", new_doc.to_mongo()) + + # ----- Document-Level Modify ----- + doc = await queryset.get(id=new_doc.pk) + updated_doc = await queryset.doc(doc).modify( + start_date=datetime.datetime.now().replace(year=2999), + ) + print("Modified doc:", updated_doc) + + # ----- Document-Level Delete ----- + print("Deleted doc:", await queryset.doc(doc).delete()) + + # ----- Transaction ----- + db = async_mongo_client() + session = db.start_session() + + async with await session.start_transaction(): + transactional_qs = QS(FooModel, session=session) + new_doc = FooModel(start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + + await queryset.doc(new_doc).save() + + doc = await transactional_qs.get(id=new_doc.pk) + + await transactional_qs.doc(doc).modify( + start_date=datetime.datetime.now().replace(year=2999), + ) + + await transactional_qs.doc(doc).delete() + print("Transaction completed") + + +# ------------------- Entry Point ------------------- +if __name__ == "__main__": + asyncio.run(main()) diff --git a/mongoengine_async_extension/__init__.py b/mongoengine_async_extension/__init__.py new file mode 100644 index 0000000..145963d --- /dev/null +++ b/mongoengine_async_extension/__init__.py @@ -0,0 +1,26 @@ +from importlib.util import find_spec +from .exceptions import ( + MongoengineAsyncException, + MongoengineAsyncDependencyMissingException, + MongoengineAsyncTransactionException, + MongoengineAsyncTransactionCommitException, + MongoengineAsyncTransactionAbortException, +) + +if find_spec("mongoengine") is None: + raise MongoengineAsyncDependencyMissingException("Install strollby-shared-library with --extras mongoengine-async") + +from .connection import async_mongo_db, async_mongo_client, async_mongo_connect +from .core import QS + +__all__ = [ + "async_mongo_connect", + "async_mongo_client", + "async_mongo_db", + "QS", + "MongoengineAsyncException", + "MongoengineAsyncDependencyMissingException", + "MongoengineAsyncTransactionException", + "MongoengineAsyncTransactionCommitException", + "MongoengineAsyncTransactionAbortException", +] diff --git a/mongoengine_async_extension/config.py b/mongoengine_async_extension/config.py new file mode 100644 index 0000000..f5c8b01 --- /dev/null +++ b/mongoengine_async_extension/config.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass + +from pymongo import AsyncMongoClient +from pymongo.asynchronous.database import AsyncDatabase + + +@dataclass +class PyMongoAsyncDBConfig: + database: AsyncDatabase | None = None + async_mongo_client: AsyncMongoClient | None = None diff --git a/mongoengine_async_extension/connection.py b/mongoengine_async_extension/connection.py new file mode 100644 index 0000000..6d4ac30 --- /dev/null +++ b/mongoengine_async_extension/connection.py @@ -0,0 +1,36 @@ +from typing import Any + +from pymongo.asynchronous.database import AsyncDatabase +from pymongo import AsyncMongoClient + +from .config import PyMongoAsyncDBConfig + + +__config = PyMongoAsyncDBConfig() + + +def async_mongo_connect(db: str, **kwargs: Any) -> AsyncMongoClient: + """ + Create a pymongo async client + + Args: + db (str): The name of the database + kwargs: Additional arguments to be passed to the pymongo AsyncMongoClient + """ + __config.async_mongo_client = AsyncMongoClient(**kwargs) + __config.database = __config.async_mongo_client[db] + return __config.async_mongo_client + + +def async_mongo_client() -> AsyncMongoClient: + """Get Mongo AsyncMongoClient""" + if not __config.async_mongo_client: + raise ValueError("Call async_mongo_connect(...) first") + return __config.async_mongo_client + + +def async_mongo_db() -> AsyncDatabase: + """Get Mongo AsyncDatabase""" + if __config.database is None: + raise ValueError("Call async_mongo_connect(...) first") + return __config.database diff --git a/mongoengine_async_extension/core.py b/mongoengine_async_extension/core.py new file mode 100644 index 0000000..f91d65f --- /dev/null +++ b/mongoengine_async_extension/core.py @@ -0,0 +1,960 @@ +import copy +import itertools +import re +from collections.abc import Callable +from collections.abc import Mapping, Sequence +from functools import partial +from typing import Any + +import pymongo +from bson import ObjectId +from bson.typings import _DocumentType +from mongoengine import ( + Document, + DoesNotExist, + InvalidDocumentError, + LookUpError, + MultipleObjectsReturned, + NotUniqueError, + OperationError, + Q, + QueryFieldList, +) +from mongoengine.base import get_document +from mongoengine.queryset import transform +from pymongo import WriteConcern +from pymongo.asynchronous.client_session import AsyncClientSession +from pymongo.asynchronous.collection import AsyncCollection, ReturnDocument, _WriteOp +from pymongo.asynchronous.command_cursor import AsyncCommandCursor +from pymongo.asynchronous.cursor import AsyncCursor +from pymongo.collation import Collation +from pymongo.cursor_shared import _Hint +from pymongo.errors import DuplicateKeyError, OperationFailure +from pymongo.operations import _IndexKeyHint +from pymongo.read_concern import ReadConcern +from pymongo.read_preferences import _ServerMode +from pymongo.results import BulkWriteResult, DeleteResult, UpdateResult + +from .connection import async_mongo_db + +# ruff: noqa: ANN003, ANN401, PLR0904 +# flake8: noqa: ANN003 + +_registry: dict[type[Document], dict] = {} + + +class QS[T: Document]: + """ + Async QuerySet Implementation + """ + + def __init__( + self, + model: type[T], + from_son: Callable[[dict], T] = None, + auto_deference: bool = False, + throw_pymongo_errors: bool = False, + session: AsyncClientSession | None = None, + read_preference: _ServerMode | None = None, + write_concern: WriteConcern | None = None, + read_concern: ReadConcern | None = None, + ) -> None: + """ + Async Queryset Constructor + + Args: + model (type[T]): Mongoengine Document class + from_son (Callable[[dict], T], optional): from_son callable. Defaults to model._from_son. + auto_deference (bool, optional): auto_deference in _from_son. Defaults to False. + throw_pymongo_errors (bool, optional): if true throws pymongo errors instead of mongoengine errors. + Defaults to False. + session (AsyncClientSession, optional): session to use + read_preference: The read preference to use. + write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern` + read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. + """ + self._create_params = { + "model": model, + "from_son": from_son, + "auto_deference": auto_deference, + "session": session, + "read_preference": read_preference, + "write_concern": write_concern, + "read_concern": read_concern, + "throw_pymongo_errors": throw_pymongo_errors, + } + self._model: type[Document] = model + + if (cache := _registry.get(self._model)) is None: + # To initialise index if not done already + # noinspection PyProtectedMember + model._get_collection() + + self._loaded_fields: QueryFieldList = QueryFieldList() + self._cls_query = {} + + # If inheritance is allowed, only return instances and instances of + # subclasses of the class being used + + # noinspection PyProtectedMember,PyUnresolvedReferences + if self._model._meta.get("allow_inheritance") is True: + # noinspection PyProtectedMember,PyUnresolvedReferences + if len(self._model._subclasses) == 1: + # noinspection PyProtectedMember,PyUnresolvedReferences + self._cls_query = {"_cls": self._model._subclasses[0]} + else: + # noinspection PyProtectedMember, PyUnresolvedReferences + self._cls_query = {"_cls": {"$in": self._model._subclasses}} + self._loaded_fields = QueryFieldList(always_include=["_cls"]) + + self._pymongo_collection = async_mongo_db().get_collection(self._model._meta.get("collection")) + + _registry[self._model] = { + "_cls_query": self._cls_query, + "_loaded_fields": self._loaded_fields, + "_pymongo_collection": self._pymongo_collection, + } + else: + self._cls_query = cache.get("_cls_query") + self._loaded_fields = cache.get("_loaded_fields") + self._pymongo_collection = cache.get("_pymongo_collection") + + self._session: AsyncClientSession = session + self._collection: AsyncCollection = self._pymongo_collection.with_options( + read_preference=read_preference, + read_concern=read_concern, + write_concern=write_concern, + ) + + # noinspection PyProtectedMember + self._from_son = from_son or partial(self._model._from_son, _auto_dereference=auto_deference) + + self._throw_pymongo_errors = throw_pymongo_errors + + # Dynamic Data + self._query: Q = Q() + self._limit: int = 0 + self._skip: int = 0 + self._sort = None + self._comment = None + self._hint = None + self._let = None + + @property + def raw(self) -> AsyncCollection: + """ + Get the raw Async collection + + Note: Session and all other params needs to be passed manually + """ + return self._collection + + @property + def _mongo_query(self) -> dict: + _mongo_query = self._query.to_query(self._model) + if self._cls_query: + if "_cls" in _mongo_query: + _mongo_query = {"$and": [self._cls_query, _mongo_query]} + else: + _mongo_query.update(self._cls_query) + return _mongo_query + + def _build_cursor(self) -> AsyncCursor: + cursor = self._collection.find( + filter=self._mongo_query, + projection=self._loaded_fields.as_dict() if self._loaded_fields else None, + session=self._session, + hint=self._hint, + comment=self._comment, + ) + if self._limit: + cursor = cursor.limit(self._limit) + if self._skip: + cursor = cursor.skip(self._skip) + if self._sort: + cursor = cursor.sort(*self._sort) + + return cursor + + def clone(self) -> "QS[T]": + """Create a copy of the current queryset.""" + new_qs = QS(**self._create_params) + copy_props = ( + "_query", + "_loaded_fields", + "_limit", + "_skip", + "_sort", + "_comment", + "_hint", + "_let", + ) + + for prop in copy_props: + val = getattr(self, prop) + setattr(new_qs, prop, copy.copy(val)) + + return new_qs + + def filter(self, **query: Any) -> "QS[T]": + """ + Filter by given query. + + :param query: queries to filter + """ + new_qs = self.clone() + new_qs._query &= Q(**query) + return new_qs + + def limit(self, limit: int) -> "QS[T]": + """ + Limit the number of returned documents to `n`. + + :param limit: the maximum number of objects to return if n is greater than 0. + When 0 is passed, returns all the documents in the cursor + """ + new_qs = self.clone() + new_qs._limit = limit + return new_qs + + def comment(self, comment: str) -> "QS[T]": + """ + Set the command argument if applicable. + + :param comment: A user-provided comment to attach to this command. + """ + new_qs = self.clone() + new_qs._comment = comment + return new_qs + + def hint(self, hint: _IndexKeyHint) -> "QS[T]": + """ + An index to use to support the query predicate specified either by its string name, + or in the same format as passed to create_index() + + (e.g. [('field', ASCENDING)]). + + This option is only supported on MongoDB 4.2 and above. + + :param hint: hint to be used in applicable commands + """ + new_qs = self.clone() + new_qs._hint = hint + return new_qs + + def let(self, let: Mapping[str, Any]) -> "QS[T]": + """ + Map of parameter names and values. + Values must be constant or closed expressions that do not reference document fields. + Parameters can then be accessed as variables in an aggregate expression context (e.g. “$$var”) + + :param let: let to be used in applicable commands + """ + new_qs = self.clone() + new_qs._let = let + return new_qs + + def skip(self, skip: int) -> "QS[T]": + """ + Skip `skip` documents before returning the results. + + :param skip: the number of objects to skip before returning results + """ + new_qs = self.clone() + new_qs._skip = skip + return new_qs + + def sort(self, key_or_list: _Hint, direction: int | str | None = None) -> "QS[T]": + """ + Sorts this cursor's results. + + Pass a field name and a direction, either + :data:`~pymongo.ASCENDING` or :data:`~pymongo.DESCENDING`.:: + + async for doc in collection.find().sort('field', pymongo.ASCENDING): + print(doc) + + To sort by multiple fields, pass a list of (key, direction) pairs. + If just a name is given, :data:`~pymongo.ASCENDING` will be inferred:: + + async for doc in collection.find().sort([ + 'field1', + ('field2', pymongo.DESCENDING)]): + print(doc) + + Text search results can be sorted by relevance:: + + cursor = db.test.find( + {'$text': {'$search': 'some words'}}, + {'score': {'$meta': 'textScore'}}) + + # Sort by 'score' field. + cursor.sort([('score', {'$meta': 'textScore'})]) + + async for doc in cursor: + print(doc) + + For more advanced text search functionality, see MongoDB's + `Atlas Search `_. + + Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has + already been used. Only the last :meth:`sort` applied to this + cursor has any effect. + + :param key_or_list: a single key or a list of (key, direction) + pairs specifying the keys to sort on + :param direction: only used if `key_or_list` is a single + key, if not given :data:`~pymongo.ASCENDING` is assumed + """ + new_qs = self.clone() + new_qs._sort = (key_or_list, direction) + return new_qs + + def only(self, *fields: str) -> "QS[T]": + """ + Load only a subset of this document's fields. :: + + post = BlogPost.objects(...).only('title', 'author.name') + + .. note :: + `only()` is chainable and will perform a union :: + So with the following it will fetch both: `title` and `author.name`:: + + post = BlogPost.objects.only('title').only('author.name') + + :param fields: fields to include + """ + fields = dict.fromkeys(fields, QueryFieldList.ONLY) + return self.fields(_only_called=True, **fields) + + def exclude(self, *fields: str) -> "QS[T]": + """Opposite to .only(), exclude some document's fields. :: + + post = BlogPost.objects(...).exclude('comments') + + .. note :: `exclude()` is chainable and will perform a union :: + So with the following it will exclude both: `title` and `author.name`:: + + post = BlogPost.objects.exclude('title').exclude('author.name') + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + + :param fields: fields to exclude + """ + fields = dict.fromkeys(fields, QueryFieldList.EXCLUDE) + return self.fields(**fields) + + def _fields_to_db_fields(self, fields: list[str]) -> list[str]: + """Translate fields' paths to their db equivalents.""" + subclasses = [] + + # noinspection PyUnresolvedReferences,PyProtectedMember + if self._model._meta["allow_inheritance"]: + # noinspection PyUnresolvedReferences,PyProtectedMember + subclasses = [get_document(x) for x in self._model._subclasses][1:] + + db_field_paths: list[str] = [] + for field in fields: + field_parts = field.split(".") + try: + _field = ".".join( + f if isinstance(f, str) else f.db_field for f in self._model._lookup_field(field_parts) + ) + db_field_paths.append(_field) + except LookUpError as err: + found = False + + # If a field path wasn't found on the main document, go + # through its subclasses and see if it exists on any of them. + for subdoc in subclasses: + try: + subfield = ".".join( + f if isinstance(f, str) else f.db_field for f in subdoc._lookup_field(field_parts) + ) + db_field_paths.append(subfield) + found = True + break + except LookUpError: + pass + + if not found: + raise err + + return db_field_paths + + def fields(self, _only_called: bool = False, **kwargs: int) -> "QS[T]": + """ + Manipulate how you load this document's fields. Used by `.only()` + and `.exclude()` to manipulate which fields to retrieve. If called + directly, use a set of kwargs similar to the MongoDB projection + document. For example: + + Include only a subset of fields: + + posts = BlogPost.objects(...).fields(author=1, title=1) + + Exclude a specific field: + + posts = BlogPost.objects(...).fields(comments=0) + + To retrieve a subrange or sublist of array elements, + support exist for both the `slice` and `elemMatch` projection operator: + + posts = BlogPost.objects(...).fields(slice__comments=5) + posts = BlogPost.objects(...).fields(elemMatch__comments="test") + + :param kwargs: A set of keyword arguments identifying what to include, exclude, or slice. + :param _only_called: Internal use only + """ + + new_qs = self.clone() + + # Check for an operator and transform to mongo-style if there is + operators = ["slice", "elemMatch"] + cleaned_fields = [] + for key, value in kwargs.items(): + parts = key.split("__") + new_value = value + if parts[0] in operators: + operator = parts.pop(0) + new_value = {"$" + operator: value} + new_key = ".".join(parts) + cleaned_fields.append((new_key, new_value)) + + # Sort fields by their values, explicitly excluded fields first, then + # explicitly included, and then more complicated operators such as + # $slice. + def _sort_key(field_tuple: tuple) -> int: + _, value = field_tuple + if isinstance(value, int): + return value # 0 for exclusion, 1 for inclusion + return 2 # so that complex values appear last + + fields = sorted(cleaned_fields, key=_sort_key) + + # Clone the queryset, group all fields by their value, convert + # each of them to db_fields, and set the queryset's _loaded_fields + for value, group in itertools.groupby(fields, lambda x: x[1]): + fields = [field for field, value in group] + fields = new_qs._fields_to_db_fields(fields) + new_qs._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called) + + return new_qs + + def _validate_doc_param(self, doc: T | ObjectId) -> None: + """Validates passed doc""" + if not isinstance(doc, self._model | ObjectId): + raise ValueError(f"doc is not instance of {self._model} OR ObjectId") + + if isinstance(doc, self._model) and doc.id is None: + raise ValueError("doc does not have id") + + # ============================ MAIN OPS ============================ + + async def get(self, id: ObjectId | None = None) -> T: + """ + Get a document by id. + + :param id: Either can be passed here or used along with a filter + """ + new_qs = self + if id is not None: + new_qs = self.filter(id=id) + + data = await new_qs.to_list() + + if len(data) == 0: + raise DoesNotExist(f"{self._model._class_name} matching query does not exist.") + + if len(data) > 1: + raise MultipleObjectsReturned("2 or more items returned, instead of 1") + + return data.pop() + + async def insert(self, docs: list[T], load_bulk: bool = True) -> list[T] | list[ObjectId]: + """ + Bulk insert documents + + :param docs: a list of documents to be inserted + :param load_bulk: If True returns the list of document instances + + By default, returns document instances, set ``load_bulk`` to False to + return just ``ObjectIds`` + + Supports: + comment + """ + parsed_docs = [] + + for doc in docs: + if not isinstance(doc, self._model): + raise ValueError(f"Some documents inserted aren't instances of {self._model}") + if doc.pk and not doc._created: + raise ValueError("Some documents have ObjectIds, use doc.update() instead") + + doc.validate() + parsed_docs.append(doc.to_mongo()) + try: + result = await self._collection.insert_many(parsed_docs, session=self._session, comment=self._comment) + except DuplicateKeyError as err: + if self._throw_pymongo_errors: + raise err + raise NotUniqueError(f"Tried to save duplicate unique keys ({err})") from err + except OperationFailure as err: + if self._throw_pymongo_errors: + raise err + if re.match("^E1100[01] duplicate key", str(err)): + # E11000 - duplicate key error index + # E11001 - duplicate key on update + raise NotUniqueError(f"Tried to save duplicate unique keys ({err})") from err + raise OperationError(f"Could not save document ({err})") from err + + if not load_bulk: + return result.inserted_ids + + # Apply inserted_ids to documents + for doc, doc_id in zip(docs, result.inserted_ids): + doc.pk = doc_id + + return docs + + async def update(self, full_result: bool = False, **update: Any) -> UpdateResult | int: + """ + Updates the queryset result with given parameters. + + :param full_result: If true, returns UpdateResult, else modified_count + :param update: fields to update + + Supports: + comment + hint + let + """ + update: dict | list + if not update: + raise ValueError("No update parameters, would remove data") + + query = self._mongo_query + if "__raw__" in update and isinstance(update["__raw__"], list): # Case of Update with Aggregation Pipeline + update = [transform.update(self._model, **{"__raw__": u}) for u in update["__raw__"]] + else: + update = transform.update(self._model, **update) + + result = await self._collection.update_many( + query, + update, + session=self._session, + comment=self._comment, + hint=self._hint, + let=self._let, + ) + if full_result: + return result + + return result.matched_count + + async def bulk_write( + self, + requests: Sequence[_WriteOp[_DocumentType]], + ordered: bool = True, + ) -> BulkWriteResult: + """ + Send a batch of write operations to the server. + + :param requests: A list of write operations (see examples above). + :param ordered: If ``True`` (the default) requests will be + performed on the server serially, in the order provided. If an error + occurs all remaining operations are aborted. If ``False`` requests + will be performed on the server in arbitrary order, possibly in + parallel, and all operations will be attempted. + + Supports: + comment + let + """ + return await self._collection.bulk_write( + requests=requests, + ordered=ordered, + session=self._session, + comment=self._comment, + let=self._let, + ) + + async def aggregate_cursor( + self, + pipeline: Sequence[Mapping[str, Any]], + allow_disk_use: bool | None = None, + max_time_ms: int | None = None, + batch_size: int | None = None, + collation: Collation | None = None, + ) -> AsyncCommandCursor[_DocumentType]: + """ + Perform an aggregation using the aggregation framework on this collection. + + Does not perform to_list() + + The :meth:`aggregate` method obeys the :attr:`read_preference` of this + :class:`AsyncCollection`, except when ``$out`` or ``$merge`` are used on + MongoDB <5.0, in which case + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` is used. + + .. note:: This method does not support the 'explain' option. Please + use `PyMongoExplain `_ + instead. An example is included in the :ref:`aggregate-examples` + documentation. + + .. note:: The :attr:`~pymongo.asynchronous.collection.AsyncCollection.write_concern` of + this collection is automatically applied to this operation. + + :param pipeline: a list of aggregation pipeline stages + :param allow_disk_use: Enables writing to temporary files. When set + to True, aggregation stages can write data to the _tmp subdirectory + of the --dbpath directory. The default is False. + :param max_time_ms: The maximum amount of time to allow the operation + to run in milliseconds. + :param batch_size: The maximum number of documents to return per + batch. Ignored if the connected mongod or mongos does not support + returning aggregate results using a cursor. + :param collation: An instance of :class:`~pymongo.collation.Collation`. + + + Supports: + comment + hint + let + """ + + # Following is expected as kwargs + kwargs = {} + if self._hint is not None: + kwargs["hint"] = self._hint + if allow_disk_use is not None: + kwargs["allowDiskUse"] = allow_disk_use + if max_time_ms is not None: + kwargs["maxTimeMs"] = max_time_ms + if batch_size is not None: + kwargs["batchSize"] = batch_size + if collation is not None: + kwargs["collation"] = collation + + # Add subclass cls query + if self._cls_query: + pipeline = list(pipeline) + if len(pipeline) > 0 and isinstance(pipeline[0], dict | Mapping): + first_stage = dict(pipeline[0]) if isinstance(pipeline[0], Mapping) else pipeline[0] + if "$match" in first_stage: # Pipeline already has a match + first_stage["$match"].update(self._cls_query) + pipeline[0] = first_stage + else: + pipeline.append({"$match": self._cls_query}) + + return await self._collection.aggregate( + pipeline, + session=self._session, + comment=self._comment, + let=self._let, + **kwargs, + ) + + async def aggregate( + self, + pipeline: Sequence[Mapping[str, Any]], + allow_disk_use: bool | None = None, + max_time_ms: int | None = None, + batch_size: int | None = None, + collation: Collation | None = None, + ) -> list[_DocumentType]: + """ + Perform an aggregation using the aggregation framework on this collection. + + Performs to_list() automatically + + The :meth:`aggregate` method obeys the :attr:`read_preference` of this + :class:`AsyncCollection`, except when ``$out`` or ``$merge`` are used on + MongoDB <5.0, in which case + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` is used. + + .. note:: This method does not support the 'explain' option. Please + use `PyMongoExplain `_ + instead. An example is included in the :ref:`aggregate-examples` + documentation. + + .. note:: The :attr:`~pymongo.asynchronous.collection.AsyncCollection.write_concern` of + this collection is automatically applied to this operation. + + :param pipeline: a list of aggregation pipeline stages + :param allow_disk_use: Enables writing to temporary files. When set + to True, aggregation stages can write data to the _tmp subdirectory + of the --dbpath directory. The default is False. + :param max_time_ms: The maximum amount of time to allow the operation + to run in milliseconds. + :param batch_size: The maximum number of documents to return per + batch. Ignored if the connected mongod or mongos does not support + returning aggregate results using a cursor. + :param collation: An instance of :class:`~pymongo.collation.Collation`. + + + Supports: + comment + hint + let + """ + return await ( + await self.aggregate_cursor( + pipeline, + allow_disk_use=allow_disk_use, + max_time_ms=max_time_ms, + batch_size=batch_size, + collation=collation, + ) + ).to_list() + + async def delete(self) -> DeleteResult: + """ + Delete one or more documents matching the filter. + + Supports: + comment + hint + let + """ + + delete_rules = self._model._meta.get("delete_rules") or {} + + if delete_rules: + result = [doc.id for doc in await self.to_list()] + + # Check for DENY rules before actually deleting/nullifying any other references + for rule_entry, _rule in delete_rules.items(): + document_cls, field_name = rule_entry + if document_cls._meta.get("abstract"): + continue + + # todo: the following is actually DENY rule, but we are enforcing this for all + refs = QS(document_cls).filter(**{field_name + "__in": result}).limit(1) + if await refs.count() > 0: + raise OperationError( + f"Could not delete document ({document_cls.__name__}.{field_name} refers to it)" + ) + + return await self._collection.delete_many( + filter=self._mongo_query, + session=self._session, + hint=self._hint, + comment=self._comment, + let=self._let, + ) + + async def to_list(self, length: int | None = None) -> list[T]: + """ + Converts query results to a list of documents. + + :param length: read length no. of documents from cursor + """ + return [self._from_son(data) for data in await self._build_cursor().to_list(length=length)] + + async def count(self, max_time_ms: int | None = None, collation: Collation | None = None) -> int: + """ + Get count of documents matching the filter. + + :param max_time_ms: The maximum amount of time to allow the operation + to run in milliseconds. + :param collation: An instance of :class:`~pymongo.collation.Collation`. + + Supports: + comment + hint + """ + # Following is expected as kwargs + kwargs = {} + if self._hint is not None: + kwargs["hint"] = self._hint + if self._sort is not None: + kwargs["sort"] = self._sort + if self._limit is not None: + kwargs["limit"] = self._limit + if max_time_ms is not None: + kwargs["maxTimeMs"] = max_time_ms + if collation is not None: + kwargs["collation"] = collation + + return await self._collection.count_documents( + filter=self._mongo_query, + session=self._session, + comment=self._comment, + ) + + # ============================ MONGOENGINE DOC OPS ============================ + + def doc(self, doc: T) -> "DocumentOp": + """Perform mongo document operations""" + return DocumentOp(document=doc, queryset=self) + + async def insert_one(self, doc: T, validate: bool = True, force_insert: bool = False) -> T: + """ + Save a document + + Equivalent to mongoengine create()/save() + + :param doc: document to be inserted + :param validate: whether to validate the document + :param force_insert: Allows insertion of a document using a custom ID + + Supports: + comment + """ + if doc._meta.get("abstract"): + raise InvalidDocumentError("Cannot save an abstract document.") + + if validate: + doc.validate() + + if doc.id is not None and not force_insert: + raise InvalidDocumentError("Cannot insert a document with ID. Use modify() instead.") + + try: + result = await self._collection.insert_one( + document=doc.to_mongo(), session=self._session, comment=self._comment + ) + object_id = result.inserted_id + except DuplicateKeyError as err: + raise NotUniqueError(f"Tried to save duplicate unique keys ({err})") from err + except OperationFailure as err: + if re.match("^E1100[01] duplicate key", str(err)): + # E11000 - duplicate key error index + # E11001 - duplicate key on update + raise NotUniqueError(f"Tried to save duplicate unique keys ({err})") from err + raise OperationError(f"Could not save document ({err})") from err + + id_field = doc._meta["id_field"] + doc[id_field] = doc._fields[id_field].to_python(object_id) + doc._clear_changed_fields() + doc._created = False + + return doc + + async def update_one(self, doc: T | ObjectId | None = None, upsert: bool = False, **update) -> T | None: + """ + Updates the given document/ObjectID/queryset with given parameters. + + Equivalent to mongoengine modify() + + :param doc: (optional) Document to be updated/ ObjectID. (default ``queryset``) + :param upsert: insert if document doesn't exist (default ``False``) + :param update: fields to update + + Returns: + Updated document + None if no matching document was found + + Supports: + comment + hint + let + """ + qs = self + if doc is not None: + self._validate_doc_param(doc) + qs = self.filter(id=doc.id if isinstance(doc, Document) else doc) + + if "__raw__" in update and isinstance(update["__raw__"], list): # Case of Update with Aggregation Pipeline + update = [transform.update(self._model, **{"__raw__": u}) for u in update["__raw__"]] + else: + update = transform.update(self._model, **update) + + try: + document = await qs._collection.find_one_and_update( + filter=qs._mongo_query, + update=update, + upsert=upsert, + sort=qs._sort, + return_document=ReturnDocument.AFTER, + projection=(self._loaded_fields.as_dict() if self._loaded_fields else None), + session=self._session, + comment=self._comment, + hint=self._hint, + let=self._let, + ) + except pymongo.errors.DuplicateKeyError as err: + raise NotUniqueError(f"Update failed ({err})") from err + except pymongo.errors.OperationFailure as err: + if str(err) == "multi not coded yet": + message = "update() method requires MongoDB 1.1.3+" + raise OperationError(message) from err + raise OperationError(f"Update failed ({err})") from err + + if not document: + return None + + updated_model: T = self._from_son(document) + if isinstance(doc, self._model): + for field in doc._fields_ordered: + setattr(doc, field, doc._reload(field, updated_model[field])) + + doc._changed_fields = updated_model._changed_fields + doc._created = False + + return doc + + return updated_model + + async def delete_one(self, doc: T | ObjectId) -> bool: + """ + Deletes the given document/ObjectID with given parameters. + + Equivalent to mongoengine delete() + + :param doc: Document to be updated/ ObjectID + + Supports: + comment + hint + let + """ + self._validate_doc_param(doc) + result = await self.filter(id=doc.id if isinstance(doc, Document) else doc).delete() + return bool(result.deleted_count) + + +class DocumentOp[T: Document]: + def __init__(self, document: T, queryset: QS[T]) -> None: + self.document: Document = document + self.queryset = queryset + + async def save(self, validate: bool = True, force_insert: bool = False) -> T: + """ + Save a document + + :param validate: whether to validate the document + :param force_insert: Allows insertion of a document using a custom ID + + Supports: + comment + """ + return await self.queryset.insert_one(doc=self.document, validate=validate, force_insert=force_insert) + + async def modify(self, upsert: bool = False, **update) -> T | None: + """ + Updates the given document/ObjectID with given parameters. + + :param upsert: insert if document doesn't exist (default ``False``) + :param update: fields to update + + Supports: + comment + hint + let + """ + return await self.queryset.update_one(doc=self.document, upsert=upsert, **update) + + async def delete(self) -> bool: + """ + Deletes the given document/ObjectID with given parameters. + + Supports: + comment + hint + let + """ + return await self.queryset.delete_one(doc=self.document) diff --git a/mongoengine_async_extension/exceptions.py b/mongoengine_async_extension/exceptions.py new file mode 100644 index 0000000..0b13261 --- /dev/null +++ b/mongoengine_async_extension/exceptions.py @@ -0,0 +1,21 @@ +class MongoengineAsyncException(Exception): + pass + + +class MongoengineAsyncDependencyMissingException(Exception): + pass + + +# Transaction Exceptions +class MongoengineAsyncTransactionException(MongoengineAsyncException): + pass + + +class MongoengineAsyncTransactionCommitException(MongoengineAsyncException): + def __init__(self, message: str): + super().__init__(message) + + +class MongoengineAsyncTransactionAbortException(MongoengineAsyncException): + def __init__(self, message: str): + super().__init__(message) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..2175b0b --- /dev/null +++ b/poetry.lock @@ -0,0 +1,609 @@ +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. + +[[package]] +name = "bandit" +version = "1.8.6" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "bandit-1.8.6-py3-none-any.whl", hash = "sha256:3348e934d736fcdb68b6aa4030487097e23a501adf3e7827b63658df464dddd0"}, + {file = "bandit-1.8.6.tar.gz", hash = "sha256:dbfe9c25fc6961c2078593de55fd19f2559f9e45b99f1272341f5b95dea4e56b"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0) ; python_version < \"3.11\""] +yaml = ["PyYAML"] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distlib" +version = "0.4.0" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}, + {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "faker" +version = "37.4.2" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "faker-37.4.2-py3-none-any.whl", hash = "sha256:b70ed1af57bfe988cbcd0afd95f4768c51eaf4e1ce8a30962e127ac5c139c93f"}, + {file = "faker-37.4.2.tar.gz", hash = "sha256:8e281bbaea30e5658895b8bea21cc50d27aaf3a43db3f2694409ca5701c56b0a"}, +] + +[package.dependencies] +tzdata = "*" + +[[package]] +name = "filelock" +version = "3.18.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] + +[[package]] +name = "identify" +version = "2.6.12" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}, + {file = "identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mongoengine" +version = "0.29.1" +description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mongoengine-0.29.1-py3-none-any.whl", hash = "sha256:9302ec407dd60f47f62cc07684d9f6cac87f1e93283c54203851788104d33df4"}, + {file = "mongoengine-0.29.1.tar.gz", hash = "sha256:3b43abaf2d5f0b7d39efc2b7d9e78f4d4a5dc7ce92b9889ba81a5a9b8dee3cf3"}, +] + +[package.dependencies] +pymongo = ">=3.4,<5.0" + +[package.extras] +test = ["Pillow (>=7.0.0)", "blinker", "coverage", "pytest", "pytest-cov"] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pbr" +version = "6.1.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +groups = ["dev"] +files = [ + {file = "pbr-6.1.1-py2.py3-none-any.whl", hash = "sha256:38d4daea5d9fa63b3f626131b9d34947fd0c8be9b05a29276870580050a25a76"}, + {file = "pbr-6.1.1.tar.gz", hash = "sha256:93ea72ce6989eb2eed99d0f75721474f69ad88128afdef5ac377eb797c4bf76b"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "platformdirs" +version = "4.3.8" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "4.2.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, + {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pymongo" +version = "4.13.2" +description = "PyMongo - the Official MongoDB Python driver" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pymongo-4.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:01065eb1838e3621a30045ab14d1a60ee62e01f65b7cf154e69c5c722ef14d2f"}, + {file = "pymongo-4.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ab0325d436075f5f1901cde95afae811141d162bc42d9a5befb647fda585ae6"}, + {file = "pymongo-4.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdd8041902963c84dc4e27034fa045ac55fabcb2a4ba5b68b880678557573e70"}, + {file = "pymongo-4.13.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b00ab04630aa4af97294e9abdbe0506242396269619c26f5761fd7b2524ef501"}, + {file = "pymongo-4.13.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16440d0da30ba804c6c01ea730405fdbbb476eae760588ea09e6e7d28afc06de"}, + {file = "pymongo-4.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad9a2d1357aed5d6750deb315f62cb6f5b3c4c03ffb650da559cb09cb29e6fe8"}, + {file = "pymongo-4.13.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c793223aef21a8c415c840af1ca36c55a05d6fa3297378da35de3fb6661c0174"}, + {file = "pymongo-4.13.2-cp310-cp310-win32.whl", hash = "sha256:8ef6ae029a3390565a0510c872624514dde350007275ecd8126b09175aa02cca"}, + {file = "pymongo-4.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:66f168f8c5b1e2e3d518507cf9f200f0c86ac79e2b2be9e7b6c8fd1e2f7d7824"}, + {file = "pymongo-4.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7af8c56d0a7fcaf966d5292e951f308fb1f8bac080257349e14742725fd7990d"}, + {file = "pymongo-4.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad24f5864706f052b05069a6bc59ff875026e28709548131448fe1e40fc5d80f"}, + {file = "pymongo-4.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a10069454195d1d2dda98d681b1dbac9a425f4b0fe744aed5230c734021c1cb9"}, + {file = "pymongo-4.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e20862b81e3863bcd72334e3577a3107604553b614a8d25ee1bb2caaea4eb90"}, + {file = "pymongo-4.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b4d5794ca408317c985d7acfb346a60f96f85a7c221d512ff0ecb3cce9d6110"}, + {file = "pymongo-4.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e0420fb4901006ae7893e76108c2a36a343b4f8922466d51c45e9e2ceb717"}, + {file = "pymongo-4.13.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:239b5f83b83008471d54095e145d4c010f534af99e87cc8877fc6827736451a0"}, + {file = "pymongo-4.13.2-cp311-cp311-win32.whl", hash = "sha256:6bceb524110c32319eb7119422e400dbcafc5b21bcc430d2049a894f69b604e5"}, + {file = "pymongo-4.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab87484c97ae837b0a7bbdaa978fa932fbb6acada3f42c3b2bee99121a594715"}, + {file = "pymongo-4.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ec89516622dfc8b0fdff499612c0bd235aa45eeb176c9e311bcc0af44bf952b6"}, + {file = "pymongo-4.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30eab4d4326df54fee54f31f93e532dc2918962f733ee8e115b33e6fe151d92"}, + {file = "pymongo-4.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cce9428d12ba396ea245fc4c51f20228cead01119fcc959e1c80791ea45f820"}, + {file = "pymongo-4.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac9241b727a69c39117c12ac1e52d817ea472260dadc66262c3fdca0bab0709b"}, + {file = "pymongo-4.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3efc4c515b371a9fa1d198b6e03340985bfe1a55ae2d2b599a714934e7bc61ab"}, + {file = "pymongo-4.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57a664aa74610eb7a52fa93f2cf794a1491f4f76098343485dd7da5b3bcff06"}, + {file = "pymongo-4.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dcb0b8cdd499636017a53f63ef64cf9b6bd3fd9355796c5a1d228e4be4a4c94"}, + {file = "pymongo-4.13.2-cp312-cp312-win32.whl", hash = "sha256:bf43ae07804d7762b509f68e5ec73450bb8824e960b03b861143ce588b41f467"}, + {file = "pymongo-4.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:812a473d584bcb02ab819d379cd5e752995026a2bb0d7713e78462b6650d3f3a"}, + {file = "pymongo-4.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d6044ca0eb74d97f7d3415264de86a50a401b7b0b136d30705f022f9163c3124"}, + {file = "pymongo-4.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd326bcb92d28d28a3e7ef0121602bad78691b6d4d1f44b018a4616122f1ba8b"}, + {file = "pymongo-4.13.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb0c21bdd58e58625c9cd8de13e859630c29c9537944ec0a14574fdf88c2ac4"}, + {file = "pymongo-4.13.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9c7d345d57f17b1361008aea78a37e8c139631a46aeb185dd2749850883c7ba"}, + {file = "pymongo-4.13.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8860445a8da1b1545406fab189dc20319aff5ce28e65442b2b4a8f4228a88478"}, + {file = "pymongo-4.13.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c184b612f67d5a4c8f864ae7c40b6cc33c0e9bb05e39d08666f8831d120504"}, + {file = "pymongo-4.13.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ea8c62d5f3c6529407c12471385d9a05f9fb890ce68d64976340c85cd661b"}, + {file = "pymongo-4.13.2-cp313-cp313-win32.whl", hash = "sha256:d13556e91c4a8cb07393b8c8be81e66a11ebc8335a40fa4af02f4d8d3b40c8a1"}, + {file = "pymongo-4.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfc69d7bc4d4d5872fd1e6de25e6a16e2372c7d5556b75c3b8e2204dce73e3fb"}, + {file = "pymongo-4.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a457d2ac34c05e9e8a6bb724115b093300bf270f0655fb897df8d8604b2e3700"}, + {file = "pymongo-4.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:02f131a6e61559613b1171b53fbe21fed64e71b0cb4858c47fc9bc7c8e0e501c"}, + {file = "pymongo-4.13.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c942d1c6334e894271489080404b1a2e3b8bd5de399f2a0c14a77d966be5bc9"}, + {file = "pymongo-4.13.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:850168d115680ab66a0931a6aa9dd98ed6aa5e9c3b9a6c12128049b9a5721bc5"}, + {file = "pymongo-4.13.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af7dfff90647ee77c53410f7fe8ca4fe343f8b768f40d2d0f71a5602f7b5a541"}, + {file = "pymongo-4.13.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8057f9bc9c94a8fd54ee4f5e5106e445a8f406aff2df74746f21c8791ee2403"}, + {file = "pymongo-4.13.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51040e1ba78d6671f8c65b29e2864483451e789ce93b1536de9cc4456ede87fa"}, + {file = "pymongo-4.13.2-cp313-cp313t-win32.whl", hash = "sha256:7ab86b98a18c8689514a9f8d0ec7d9ad23a949369b31c9a06ce4a45dcbffcc5e"}, + {file = "pymongo-4.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c38168263ed94a250fc5cf9c6d33adea8ab11c9178994da1c3481c2a49d235f8"}, + {file = "pymongo-4.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a89739a86da31adcef41f6c3ae62b38a8bad156bba71fe5898871746c5af83"}, + {file = "pymongo-4.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de529aebd1ddae2de778d926b3e8e2e42a9b37b5c668396aad8f28af75e606f9"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cc7d4cd7586c1c4f7af2b97447404046c2d8e7ed4c7214ed0e21dbeb17d57d"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:884cb88a9d4c4c9810056b9c71817bd9714bbe58c461f32b65be60c56759823b"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389cb6415ec341c73f81fbf54970ccd0cd5d3fa7c238dcdb072db051d24e2cb4"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49f9968ea7e6a86d4c9bd31d2095f0419efc498ea5e6067e75ade1f9e64aea3d"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae07315bb106719c678477e61077cd28505bb7d3fd0a2341e75a9510118cb785"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4dc60b3f5e1448fd011c729ad5d8735f603b0a08a8773ec8e34a876ccc7de45f"}, + {file = "pymongo-4.13.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:75462d6ce34fb2dd98f8ac3732a7a1a1fbb2e293c4f6e615766731d044ad730e"}, + {file = "pymongo-4.13.2-cp39-cp39-win32.whl", hash = "sha256:b7e04c45f6a7d5a13fe064f42130d29b0730cb83dd387a623563ff3b9bd2f4d1"}, + {file = "pymongo-4.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:0603145c9be5e195ae61ba7a93eb283abafdbd87f6f30e6c2dfc242940fe280c"}, + {file = "pymongo-4.13.2.tar.gz", hash = "sha256:0f64c6469c2362962e6ce97258ae1391abba1566a953a492562d2924b44815c2"}, +] + +[package.dependencies] +dnspython = ">=1.16.0,<3.0.0" + +[package.extras] +aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] +docs = ["furo (==2024.8.6)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<9)", "sphinx-autobuild (>=2020.9.1)", "sphinx-rtd-theme (>=2,<4)", "sphinxcontrib-shellcheck (>=1,<2)"] +encryption = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.13.0,<2.0.0)"] +gssapi = ["pykerberos ; os_name != \"nt\"", "winkerberos (>=0.5.0) ; os_name == \"nt\""] +ocsp = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +snappy = ["python-snappy"] +test = ["pytest (>=8.2)", "pytest-asyncio (>=0.24.0)"] +zstd = ["zstandard"] + +[[package]] +name = "pytest" +version = "8.4.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "1.1.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf"}, + {file = "pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "rich" +version = "14.1.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f"}, + {file = "rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruff" +version = "0.12.5" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.12.5-py3-none-linux_armv6l.whl", hash = "sha256:1de2c887e9dec6cb31fcb9948299de5b2db38144e66403b9660c9548a67abd92"}, + {file = "ruff-0.12.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d1ab65e7d8152f519e7dea4de892317c9da7a108da1c56b6a3c1d5e7cf4c5e9a"}, + {file = "ruff-0.12.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:962775ed5b27c7aa3fdc0d8f4d4433deae7659ef99ea20f783d666e77338b8cf"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b4cae449597e7195a49eb1cdca89fd9fbb16140c7579899e87f4c85bf82f73"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b13489c3dc50de5e2d40110c0cce371e00186b880842e245186ca862bf9a1ac"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1504fea81461cf4841778b3ef0a078757602a3b3ea4b008feb1308cb3f23e08"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c7da4129016ae26c32dfcbd5b671fe652b5ab7fc40095d80dcff78175e7eddd4"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca972c80f7ebcfd8af75a0f18b17c42d9f1ef203d163669150453f50ca98ab7b"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbf9f25dfb501f4237ae7501d6364b76a01341c6f1b2cd6764fe449124bb2a"}, + {file = "ruff-0.12.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c47dea6ae39421851685141ba9734767f960113d51e83fd7bb9958d5be8763a"}, + {file = "ruff-0.12.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5076aa0e61e30f848846f0265c873c249d4b558105b221be1828f9f79903dc5"}, + {file = "ruff-0.12.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a5a4c7830dadd3d8c39b1cc85386e2c1e62344f20766be6f173c22fb5f72f293"}, + {file = "ruff-0.12.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:46699f73c2b5b137b9dc0fc1a190b43e35b008b398c6066ea1350cce6326adcb"}, + {file = "ruff-0.12.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a655a0a0d396f0f072faafc18ebd59adde8ca85fb848dc1b0d9f024b9c4d3bb"}, + {file = "ruff-0.12.5-py3-none-win32.whl", hash = "sha256:dfeb2627c459b0b78ca2bbdc38dd11cc9a0a88bf91db982058b26ce41714ffa9"}, + {file = "ruff-0.12.5-py3-none-win_amd64.whl", hash = "sha256:ae0d90cf5f49466c954991b9d8b953bd093c32c27608e409ae3564c63c5306a5"}, + {file = "ruff-0.12.5-py3-none-win_arm64.whl", hash = "sha256:48cdbfc633de2c5c37d9f090ba3b352d1576b0015bfc3bc98eaf230275b7e805"}, + {file = "ruff-0.12.5.tar.gz", hash = "sha256:b209db6102b66f13625940b7f8c7d0f18e20039bb7f6101fbdac935c9612057e"}, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "stevedore" +version = "5.4.1" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "stevedore-5.4.1-py3-none-any.whl", hash = "sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe"}, + {file = "stevedore-5.4.1.tar.gz", hash = "sha256:3135b5ae50fe12816ef291baff420acb727fcd356106e3e9cbfa9e5985cd6f4b"}, +] + +[package.dependencies] +pbr = ">=2.0.0" + +[[package]] +name = "tzdata" +version = "2025.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["dev"] +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[[package]] +name = "virtualenv" +version = "20.32.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56"}, + {file = "virtualenv-20.32.0.tar.gz", hash = "sha256:886bf75cadfdc964674e6e33eb74d787dff31ca314ceace03ca5810620f4ecf0"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[metadata] +lock-version = "2.1" +python-versions = "^3.12" +content-hash = "c2fc7443d483f6b87e876efa1978fc158a61d06898424370dddacd8a7de842f2" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..24c41ea --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,54 @@ +[tool.poetry] +name = "monogengine-async-extension" +version = "0.1.0" +description = "Pymongo Async Extension for mongoengine" +authors = ["Strollby "] +packages = [{include = "mongoengine_async_extension"}] +requires-poetry = ">=2.0" +readme = "README.md" +license = "MIT" +homepage = "https://github.com/strollby/mongoengine-async-extension" +repository = "https://github.com/strollby/mongoengine-async-extension" +documentation = "https://github.com/strollby/mongoengine-async-extension" +keywords = [ + "mongoengine-async-extension", + "mongoengine", + "async", + "pymongo", + "pymongo-async", +] +classifiers = [ + "Intended Audience :: Developers", + "Framework :: AsyncIO", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Database", +] + +[tool.poetry.dependencies] +python = "^3.12" +mongoengine = "*" + +[tool.poetry.group.dev.dependencies] +bandit = "*" +pre-commit = "*" +ruff = "*" +pytest = "*" +pytest-asyncio = "*" +faker = "*" + +[tool.ruff] +line-length = 120 +target-version = "py312" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..8646397 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +# pytest.ini +[pytest] +asyncio_mode = auto diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..1a971c6 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,13 @@ +import pytest +from mongoengine import connect + +from mongoengine_async_extension import async_mongo_connect + +DB_NAME = "mongoengine_async_test" + + +@pytest.fixture(scope="session", autouse=True) +async def setup_mongodb(): + connect(db=DB_NAME) # Sync + client = async_mongo_connect(db=DB_NAME) # Async + await client.drop_database(DB_NAME) # Clear DB diff --git a/tests/db/__init__.py b/tests/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/db/model.py b/tests/db/model.py new file mode 100644 index 0000000..cfae0e8 --- /dev/null +++ b/tests/db/model.py @@ -0,0 +1,16 @@ +from mongoengine import DateField, Document, ObjectIdField + + +class FooModel(Document): + meta = { + "collection": "foo", + "indexes": [ + {"fields": ["bar"]}, + {"name": "default sort index", "fields": ["bar", "start_date"]}, + {"fields": ["start_date"]}, + ], + } + + start_date = DateField(required=True, description="Start date") + end_date = DateField(required=True, description="End date") + bar = ObjectIdField(required=True) diff --git a/tests/test_operations.py b/tests/test_operations.py new file mode 100644 index 0000000..069ba60 --- /dev/null +++ b/tests/test_operations.py @@ -0,0 +1,340 @@ +import datetime +import re +from typing import Any, AsyncGenerator + +import pymongo +from pymongo.results import DeleteResult +import pytest +from bson import ObjectId +from faker import Faker +from mongoengine import DoesNotExist, InvalidDocumentError, MultipleObjectsReturned + +from mongoengine_async_extension import QS, async_mongo_client +from tests.db.model import FooModel + +fake = Faker() + +BATCH_SIZE = 10 + + +@pytest.fixture(scope="session", autouse=True) +def bar_ref() -> ObjectId: + """Reference to a BarModel object""" + return ObjectId() + + +@pytest.fixture(scope="session", autouse=True) +@pytest.mark.asyncio(scope="session") +async def inserted_docs(bar_ref: ObjectId) -> AsyncGenerator[list[FooModel], Any]: + """ + Fixture to insert a set of documents before tests and clean them up afterwards. + + Yields: + list[FooModel]: A list of the newly inserted FooModel documents. + """ + queryset = QS(FooModel) + docs = [FooModel(start_date=fake.date(), end_date=fake.date(), bar=bar_ref) for _ in range(BATCH_SIZE)] + new_docs = await queryset.insert(docs=docs, load_bulk=True) + yield new_docs + result = await queryset.filter(bar=bar_ref).delete() + assert result.deleted_count == BATCH_SIZE + + +# ------------------- Tests ------------------- + + +@pytest.mark.asyncio(scope="session") +async def test_listing() -> None: + """ + Tests the basic listing functionality with limit, only, and exclude. + """ + queryset = QS(FooModel) + results = await queryset.limit(10).only("id", "start_date").sort("start_date").to_list() + assert len(results) == 10 + assert {"_id", "start_date"} == set(results[0].to_mongo().keys()) + + exclude_result = await queryset.filter(id__in=[results[0].id]).exclude("start_date").to_list() + assert "start_date" not in exclude_result[0].to_mongo() + + +@pytest.mark.asyncio(scope="session") +async def test_get_single(inserted_docs: list[FooModel]) -> None: + """ + Tests retrieving a single document using `get()`. + + Args: + inserted_docs (list[FooModel]): Documents inserted by the fixture. + """ + queryset = QS(FooModel) + doc = inserted_docs[0] + one = await queryset.filter(id=doc.id).get() + assert one.id == doc.id + + one2 = await queryset.get(id=doc.id) + assert one2.id == doc.id + + +@pytest.mark.asyncio(scope="session") +async def test_count(inserted_docs: list[FooModel], bar_ref: ObjectId) -> None: + """ + Tests counting documents matching a filter. + + Args: + inserted_docs (list[FooModel]): Documents inserted by the fixture. + """ + queryset = QS(FooModel) + first_doc = inserted_docs[0] + count = await queryset.filter(id=first_doc.id).count() + assert count == 1 + count = await queryset.filter(bar=bar_ref).count() + assert count == BATCH_SIZE + + +@pytest.mark.asyncio(scope="session") +async def test_update(bar_ref: ObjectId) -> None: + """ + Tests updating multiple documents in a queryset. + """ + queryset = QS(FooModel) + updated_count = await queryset.filter(bar=bar_ref).update(start_date=datetime.datetime.now()) + assert updated_count == BATCH_SIZE + + +@pytest.mark.asyncio(scope="session") +async def test_delete() -> None: + """ + Tests deleting documents using a queryset. + """ + queryset = QS(FooModel) + doc = FooModel(start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + await queryset.doc(doc).save() + + result: DeleteResult = await queryset.filter(id=doc.pk).delete() + assert isinstance(result, DeleteResult) + assert result.deleted_count > 0 + + +@pytest.mark.asyncio(scope="session") +async def test_aggregation() -> None: + """ + Tests aggregation using both `aggregate_cursor` and `aggregate`. + """ + queryset = QS(FooModel) + cursor_result = await (await queryset.aggregate_cursor(pipeline=[{"$project": {"_id": 1}}])).to_list() + agg_result = await queryset.aggregate(pipeline=[{"$project": {"_id": 1}}]) + assert isinstance(cursor_result, list) + assert isinstance(agg_result, list) + assert len(cursor_result) > 0 + assert cursor_result[0] == agg_result[0] + + +@pytest.mark.asyncio(scope="session") +async def test_document_level_create_modify_delete() -> None: + """ + Tests document-level operations: save, modify, and delete. + """ + queryset = QS(FooModel) + new_doc = FooModel(start_date=fake.past_date(), end_date=fake.date(), bar=ObjectId()) + await queryset.doc(new_doc).save() + + doc = await queryset.get(id=new_doc.pk) + updated_doc = await queryset.doc(doc).modify( + start_date=datetime.datetime.now().replace(year=2999), + ) + assert updated_doc.start_date.year == 2999 + + deleted = await queryset.doc(doc).delete() + assert deleted + + +@pytest.mark.asyncio(scope="session") +async def test_transaction() -> None: + """ + Tests basic transaction functionality with document operations within a session. + """ + db = async_mongo_client() + session = db.start_session() + + async with await session.start_transaction(): + transactional_qs = QS(FooModel, session=session) + new_doc = FooModel(start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + await transactional_qs.doc(new_doc).save() + + doc = await transactional_qs.get(id=new_doc.pk) + await transactional_qs.doc(doc).modify(start_date=datetime.datetime.now().replace(year=2999)) + + deleted = await transactional_qs.doc(doc).delete() + assert deleted + + +@pytest.mark.asyncio(scope="session") +async def test_filter_combinations(inserted_docs: list[FooModel]) -> None: + """ + Tests chaining multiple `filter` calls to refine a query. + + Args: + inserted_docs (list[FooModel]): Documents inserted by the fixture. + """ + queryset = QS(FooModel) + doc_id = inserted_docs[0].id + results = await queryset.filter(id=doc_id).filter(bar=inserted_docs[0].bar).to_list() + assert len(results) == 1 + assert results[0].id == doc_id + + +@pytest.mark.asyncio(scope="session") +async def test_limit_skip_sort_chaining() -> None: + """ + Tests chaining `limit`, `skip`, and `sort` methods. + """ + queryset = QS(FooModel) + results = await queryset.sort("start_date", pymongo.ASCENDING).skip(1).limit(1).to_list() + assert len(results) == 1 + + +@pytest.mark.asyncio(scope="session") +async def test_no_update_parameters() -> None: + """ + Tests that calling `update` without any update parameters raises a ValueError. + """ + queryset = QS(FooModel) + with pytest.raises(ValueError, match="No update parameters, would remove data"): + await queryset.update() + + +@pytest.mark.asyncio(scope="session") +async def test_insert_with_existing_id() -> None: + """ + Tests that `insert_one` raises an InvalidDocumentError when attempting to + insert a document that already has an ID, without `force_insert`. + """ + queryset = QS(FooModel) + doc = FooModel(start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + await queryset.insert_one(doc) # First insert to give it an ID + with pytest.raises( + InvalidDocumentError, match=re.escape("Cannot insert a document with ID. Use modify() instead.") + ): + await queryset.insert_one(doc) + + +@pytest.mark.asyncio(scope="session") +async def test_update_one_by_objectid(inserted_docs: list[FooModel]) -> None: + """ + Tests `update_one` by passing an ObjectId directly as the `doc` parameter. + + Args: + inserted_docs (list[FooModel]): Documents inserted by the fixture. + """ + queryset = QS(FooModel) + doc_id = inserted_docs[1].id + new_date = datetime.datetime.now().replace(year=2050) + updated_doc: FooModel = await queryset.update_one(doc=doc_id, start_date=new_date) + assert updated_doc is not None + assert updated_doc.id == doc_id + assert updated_doc.start_date.year == 2050 + + +@pytest.mark.asyncio(scope="session") +async def test_update_one_upsert() -> None: + """ + Tests the `upsert` functionality of `update_one`, ensuring a new document + is created if no match is found. + """ + queryset = QS(FooModel) + non_existent_id = ObjectId() + new_date = datetime.datetime.now().replace(year=2060) + upserted_doc: FooModel = await queryset.filter(id=non_existent_id).update_one( + upsert=True, start_date=new_date, end_date=fake.date(), bar=ObjectId() + ) + assert upserted_doc is not None + assert upserted_doc.id == non_existent_id + assert upserted_doc.start_date.year == 2060 + # Clean up the upserted document + await queryset.filter(id=non_existent_id).delete() + + +@pytest.mark.asyncio(scope="session") +async def test_get_does_not_exist() -> None: + """ + Tests that `get` raises a `DoesNotExist` error when no document matches the query. + """ + queryset = QS(FooModel) + non_existent_id = ObjectId() + with pytest.raises(DoesNotExist, match="FooModel matching query does not exist."): + await queryset.get(id=non_existent_id) + + +@pytest.mark.asyncio(scope="session") +async def test_get_multiple_objects_returned(inserted_docs: list[FooModel]) -> None: + """ + Tests that `get` raises a `MultipleObjectsReturned` error when more than one + document matches the query. + + Args: + inserted_docs (list[FooModel]): Documents inserted by the fixture. + """ + queryset = QS(FooModel) + # Filter to ensure more than one document is returned + with pytest.raises(MultipleObjectsReturned, match="2 or more items returned, instead of 1"): + await queryset.filter(bar=inserted_docs[0].bar).get() + + +@pytest.mark.asyncio(scope="session") +async def test_comment_hint_let() -> None: + """ + Tests the application of `comment`, `hint`, and `let` options across + various queryset operations. + """ + queryset = QS(FooModel) + test_comment = "This is a test comment" + # Corrected hint format: dictionary + test_hint = {"start_date": pymongo.ASCENDING} + test_let = {"foo": 1} # Example let variable + + doc = FooModel(start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + + # Test with insert_one + result_insert = await queryset.comment(test_comment).insert_one(doc) + assert result_insert.id is not None + + # Test with update + updated_count = ( + await queryset.filter(id=doc.id) + .comment(test_comment) + .hint(test_hint) + .let(test_let) + .update(start_date=datetime.datetime.now()) + ) + assert updated_count > 0 + + # Test with delete + deleted_count = await queryset.filter(id=doc.id).comment(test_comment).hint(test_hint).let(test_let).delete() + assert deleted_count.deleted_count > 0 + + # Test with aggregate + # Hint for aggregate_cursor should also be a dictionary if it's a compound index. + # For a single field, {'field_name': 1} or {'field_name': -1} is correct. + # If using a named index, it would be a string: 'my_index_name' + await queryset.comment(test_comment).hint(test_hint).let(test_let).aggregate(pipeline=[{"$match": {"_id": doc.id}}]) + + +@pytest.mark.asyncio(scope="session") +async def test_insert_one_with_force_insert() -> None: + """ + Tests `insert_one` with `force_insert=True` to allow inserting a document + with a pre-defined custom ID. + """ + queryset = QS(FooModel) + custom_id = ObjectId() + doc = FooModel(id=custom_id, start_date=fake.date(), end_date=fake.date(), bar=ObjectId()) + + # Insert with force_insert=True + inserted_doc = await queryset.insert_one(doc, force_insert=True) + assert inserted_doc.id == custom_id + + # Verify it exists + retrieved_doc = await queryset.get(id=custom_id) + assert retrieved_doc.id == custom_id + + # Clean up + await queryset.delete_one(doc)