pax_global_header00006660000000000000000000000064145264143420014517gustar00rootroot0000000000000052 comment=5e40a28bd543af6717b8d50258a9a6316949f81e qasync-0.27.1/000077500000000000000000000000001452641434200131045ustar00rootroot00000000000000qasync-0.27.1/.github/000077500000000000000000000000001452641434200144445ustar00rootroot00000000000000qasync-0.27.1/.github/dependabot.yml000066400000000000000000000004371452641434200173000ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "monthly" # disable version updates for dependencies open-pull-requests-limit: 0 - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" qasync-0.27.1/.github/workflows/000077500000000000000000000000001452641434200165015ustar00rootroot00000000000000qasync-0.27.1/.github/workflows/main.yml000066400000000000000000000063551452641434200201610ustar00rootroot00000000000000name: Tests on: push: branches: - master - develop pull_request: branches: - '**' concurrency: group: tests-${{ github.head_ref || github.ref }} cancel-in-progress: ${{ github.event_name == 'pull_request' }} jobs: tests: name: ${{ matrix.os }} / ${{ matrix.python-version }} runs-on: ${{ matrix.image }} strategy: matrix: os: [Ubuntu, Windows, MacOS] python-version: ["3.8", "3.9", "3.10"] qt-version: ["pyside2", "pyside6", "pyqt5", "pyqt6"] include: - os: Ubuntu image: ubuntu-20.04 - os: Windows image: windows-2022 - os: MacOS image: macos-12 fail-fast: false defaults: run: shell: bash steps: - name: Checkout uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Get full Python version id: full-python-version run: echo version=$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))") >> $GITHUB_OUTPUT - name: Bootstrap poetry run: | curl -sSL https://install.python-poetry.org | python - -y - name: Update Path if: ${{ matrix.os != 'Windows' }} run: echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Update Path for Windows if: ${{ matrix.os == 'Windows' }} run: echo "$APPDATA\Python\Scripts" >> $GITHUB_PATH - name: Enable long paths on Windows if: ${{ matrix.os == 'Windows' }} run: git config --system core.longpaths true - name: Configure poetry run: poetry config virtualenvs.in-project true - name: Setup cache uses: actions/cache@v3 id: cache with: path: .venv key: venv-${{ runner.os }}-${{ matrix.qt-version }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }} - name: Valdate cache if: steps.cache.outputs.cache-hit == 'true' run: | # `timeout` is not available on macOS, so we define a custom function. [ "$(command -v timeout)" ] || function timeout() { perl -e 'alarm shift; exec @ARGV' "$@"; } # Using `timeout` is a safeguard against the Poetry command hanging for some reason. timeout 10s poetry run pip --version || rm -rf .venv - name: Check lock file run: poetry check --lock - name: Install dependencies run: poetry install --with github-actions # - name: Run mypy # run: poetry run mypy - name: Install Qt run: poetry run pip install ${{ matrix.qt-version }} - name: Install libxcb dependencies if: ${{ matrix.os == 'Ubuntu' }} env: DEBIAN_FRONTEND: noninteractive run: | sudo apt-get -qq update sudo apt-get -qq install '^libxcb.*-dev' libx11-xcb-dev libglu1-mesa-dev libxrender-dev libxi-dev libxkbcommon-dev libxkbcommon-x11-dev - name: Run pytest uses: coactions/setup-xvfb@v1 env: QT_API: ${{ matrix.qt-version }} with: run: poetry run pytest --cov qasync -v qasync-0.27.1/.github/workflows/release.yml000066400000000000000000000021731452641434200206470ustar00rootroot00000000000000name: Release on: push: tags: - '*.*.*' jobs: release: name: Release runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install Poetry run: | curl -sSL https://install.python-poetry.org | python - -y - name: Update PATH run: echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Build project for distribution run: poetry build - name: Check Version id: check-version run: | [[ "$(poetry version --short)" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] || echo prerelease=true >> $GITHUB_OUTPUT - name: Create Release uses: ncipollo/release-action@v1 with: artifacts: "dist/*" token: ${{ secrets.GITHUB_TOKEN }} draft: false prerelease: steps.check-version.outputs.prerelease == 'true' - name: Publish to PyPI env: POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} run: poetry publish qasync-0.27.1/.gitignore000066400000000000000000000005401452641434200150730ustar00rootroot00000000000000# editors .idea/* .vscode/* .python-version .DS_Store # python .venv __pycache__/ *.py[cod] *$py.class .mypy_cache # packaging *.egg !/tests/**/*.egg /*.egg-info /dist/* build _build .cache *.so # logs pip-log.txt # testing / coverage .coverage .pytest_cache # release /setup.cfg MANIFEST.in # /setup.py /releases/* pip-wheel-metadata poetry.toml qasync-0.27.1/.pre-commit-config.yaml000066400000000000000000000010641452641434200173660ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer exclude: ^.*\.egg-info/ - id: check-merge-conflict - id: check-case-conflict - id: check-toml - id: check-yaml - id: check-ast - id: check-docstring-first - repo: https://github.com/psf/black-pre-commit-mirror rev: 23.9.1 hooks: - id: black - repo: https://github.com/pre-commit/pre-commit rev: v3.4.0 hooks: - id: validate_manifest qasync-0.27.1/.pre-commit-hooks.yaml000066400000000000000000000016161452641434200172470ustar00rootroot00000000000000- id: poetry-check name: poetry-check description: run poetry check to validate config entry: poetry check language: python pass_filenames: false files: ^(.*/)?pyproject\.toml$ - id: poetry-lock name: poetry-lock description: run poetry lock to update lock file entry: poetry lock language: python pass_filenames: false files: ^(.*/)?(poetry\.lock|pyproject\.toml)$ - id: poetry-export name: poetry-export description: run poetry export to sync lock file with requirements.txt entry: poetry export language: python pass_filenames: false files: ^(.*/)?poetry\.lock$ args: ["-f", "requirements.txt", "-o", "requirements.txt"] - id: poetry-install name: poetry-install description: run poetry install to install dependencies from the lock file entry: poetry install language: python pass_filenames: false stages: [post-checkout, post-merge] always_run: true qasync-0.27.1/LICENSE000066400000000000000000000025541452641434200141170ustar00rootroot00000000000000Copyright (c) 2019, Sam McCormack Copyright (c) 2018, Gerard Marull-Paretas Copyright (c) 2014-2018, Mark Harviston, Arve Knudsen All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. qasync-0.27.1/Pipfile000066400000000000000000000002721452641434200144200ustar00rootroot00000000000000[[source]] url = "https://pypi.org/simple" name = "pypi" verify_ssl = true [dev-packages] pytest = "*" pytest-raises = "*" [packages] [dev-packages.qasync] editable = true path = "." qasync-0.27.1/README.md000066400000000000000000000057421452641434200143730ustar00rootroot00000000000000# qasync [![Maintenance](https://img.shields.io/maintenance/yes/2023)](https://pypi.org/project/qasync) [![PyPI](https://img.shields.io/pypi/v/qasync)](https://pypi.org/project/qasync) [![PyPI - License](https://img.shields.io/pypi/l/qasync)](/LICENSE) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/qasync)](https://pypi.org/project/qasync) [![PyPI - Download](https://img.shields.io/pypi/dm/qasync)](https://pypi.org/project/qasync) [![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/CabbageDevelopment/qasync/main.yml)](https://github.com/CabbageDevelopment/qasync/actions/workflows/main.yml) ## Introduction `qasync` allows coroutines to be used in PyQt/PySide applications by providing an implementation of the `PEP 3156` event loop. With `qasync`, you can use `asyncio` functionalities directly inside Qt app's event loop, in the main thread. Using async functions for Python tasks can be much easier and cleaner than using `threading.Thread` or `QThread`. If you need some CPU-intensive tasks to be executed in parallel, `qasync` also got that covered, providing `QEventLoop.run_in_executor` which is functionally identical to that of `asyncio`. ### Basic Example ```python import sys import asyncio from qasync import QEventLoop, QApplication from PySide6.QtWidgets import QWidget, QVBoxLayout class MainWindow(QWidget): def __init__(self): super().__init__() self.setLayout(QVBoxLayout()) self.lbl_status = QLabel("Idle", self) self.layout().addWidget(self.lbl_status) @asyncClose async def closeEvent(self, event): pass @asyncSlot() async def onMyEvent(self): pass if __name__ == "__main__": app = QApplication(sys.argv) event_loop = QEventLoop(app) asyncio.set_event_loop(event_loop) app_close_event = asyncio.Event() app.aboutToQuit.connect(app_close_event.set) main_window = MainWindow() main_window.show() with event_loop: event_loop.run_until_complete(app_close_event.wait()) ``` More detailed examples can be found [here](https://github.com/CabbageDevelopment/qasync/tree/master/examples). ### The Future of `qasync` `qasync` is a fork of [asyncqt](https://github.com/gmarull/asyncqt), which is a fork of [quamash](https://github.com/harvimt/quamash). `qasync` was created because those are no longer maintained. May it live longer than its predecessors. **`qasync` will continue to be maintained, and will still be accepting pull requests.** ## Requirements - Python >= 3.8 - PyQt5/PyQt6 or PySide2/PySide6 `qasync` is tested on Ubuntu, Windows and MacOS. If you need Python 3.6 or 3.7 support, use the [v0.25.0](https://github.com/CabbageDevelopment/qasync/releases/tag/v0.25.0) tag/release. ## Installation To install `qasync`, use `pip`: ``` pip install qasync ``` ## License You may use, modify and redistribute this software under the terms of the [BSD License](http://opensource.org/licenses/BSD-2-Clause). See [LICENSE](/LICENSE). qasync-0.27.1/examples/000077500000000000000000000000001452641434200147225ustar00rootroot00000000000000qasync-0.27.1/examples/aiohttp_fetch.py000066400000000000000000000041011452641434200201110ustar00rootroot00000000000000import asyncio import sys import aiohttp # from PyQt6.QtWidgets import ( from PySide6.QtWidgets import ( QApplication, QLabel, QLineEdit, QPushButton, QTextEdit, QVBoxLayout, QWidget, ) from qasync import QEventLoop, asyncClose, asyncSlot class MainWindow(QWidget): """Main window.""" _DEF_URL: str = "https://jsonplaceholder.typicode.com/todos/1" """Default URL.""" def __init__(self): super().__init__() self.setLayout(QVBoxLayout()) self.lbl_status = QLabel("Idle", self) self.layout().addWidget(self.lbl_status) self.edit_url = QLineEdit(self._DEF_URL, self) self.layout().addWidget(self.edit_url) self.edit_response = QTextEdit("", self) self.layout().addWidget(self.edit_response) self.btn_fetch = QPushButton("Fetch", self) self.btn_fetch.clicked.connect(self.on_btn_fetch_clicked) self.layout().addWidget(self.btn_fetch) self.session: aiohttp.ClientSession @asyncClose async def closeEvent(self, event): # noqa:N802 await self.session.close() async def boot(self): self.session = aiohttp.ClientSession() @asyncSlot() async def on_btn_fetch_clicked(self): self.btn_fetch.setEnabled(False) self.lbl_status.setText("Fetching...") try: async with self.session.get(self.edit_url.text()) as r: self.edit_response.setText(await r.text()) except Exception as exc: self.lbl_status.setText("Error: {}".format(exc)) else: self.lbl_status.setText("Finished!") finally: self.btn_fetch.setEnabled(True) if __name__ == "__main__": app = QApplication(sys.argv) event_loop = QEventLoop(app) asyncio.set_event_loop(event_loop) app_close_event = asyncio.Event() app.aboutToQuit.connect(app_close_event.set) main_window = MainWindow() main_window.show() event_loop.create_task(main_window.boot()) event_loop.run_until_complete(app_close_event.wait()) event_loop.close() qasync-0.27.1/examples/executor_example.py000066400000000000000000000016561452641434200206550ustar00rootroot00000000000000import functools import asyncio import time import sys # from PyQt6.QtWidgets import from PySide6.QtWidgets import QApplication, QProgressBar from qasync import QEventLoop, QThreadExecutor async def master(): progress = QProgressBar() progress.setRange(0, 99) progress.show() await first_50(progress) loop = asyncio.get_running_loop() with QThreadExecutor(1) as exec: await loop.run_in_executor(exec, functools.partial(last_50, progress), loop) async def first_50(progress): for i in range(50): progress.setValue(i) await asyncio.sleep(0.1) def last_50(progress, loop): for i in range(50, 100): loop.call_soon_threadsafe(progress.setValue, i) time.sleep(0.1) if __name__ == "__main__": app = QApplication(sys.argv) event_loop = QEventLoop(app) asyncio.set_event_loop(event_loop) event_loop.run_until_complete(master()) event_loop.close() qasync-0.27.1/examples/qml_httpx/000077500000000000000000000000001452641434200167425ustar00rootroot00000000000000qasync-0.27.1/examples/qml_httpx/app.py000066400000000000000000000022771452641434200201040ustar00rootroot00000000000000import sys import asyncio from pathlib import Path from qasync import QEventLoop, QApplication from PySide6.QtCore import QUrl from PySide6.QtQml import QQmlApplicationEngine, qmlRegisterType from service import ExampleService QML_PATH = Path(__file__).parent.absolute().joinpath("qml") if __name__ == "__main__": app = QApplication(sys.argv) engine = QQmlApplicationEngine() engine.addImportPath(QML_PATH) app.aboutToQuit.connect(engine.deleteLater) engine.quit.connect(app.quit) # register our service, making it usable directly in QML qmlRegisterType(ExampleService, "qasync", 1, 0, ExampleService.__name__) # alternatively, instantiate the service and inject it into the QML engine # service = ExampleService() # engine.rootContext().setContextProperty("service", service) event_loop = QEventLoop(app) asyncio.set_event_loop(event_loop) app_close_event = asyncio.Event() app.aboutToQuit.connect(app_close_event.set) engine.quit.connect(app_close_event.set) qml_entry = QUrl.fromLocalFile(str(QML_PATH.joinpath("Main.qml"))) engine.load(qml_entry) with event_loop: event_loop.run_until_complete(app_close_event.wait()) qasync-0.27.1/examples/qml_httpx/qml/000077500000000000000000000000001452641434200175335ustar00rootroot00000000000000qasync-0.27.1/examples/qml_httpx/qml/Main.qml000066400000000000000000000004641452641434200211360ustar00rootroot00000000000000import QtQuick 2.15 import QtQuick.Controls 2.15 import QtQuick.Layouts 1.15 import QtQuick.Window 2.15 ApplicationWindow { id: root title: "qasync" visible: true width: 420 height: 240 Loader { id: mainLoader anchors.fill: parent source: "Page.qml" } } qasync-0.27.1/examples/qml_httpx/qml/Page.qml000066400000000000000000000026621452641434200211300ustar00rootroot00000000000000import QtQuick 2.15 import QtQuick.Controls 2.15 import QtQuick.Controls.Material 2.15 import QtQuick.Layouts 1.15 Item { ExampleService { id: service // handle value changes inside the service object onValueChanged: { // use value } } Connections { target: service // handle value changes with an external Connection function onValueChanged(value) { // use value } } ColumnLayout { anchors { fill: parent margins: 10 } RowLayout { Layout.fillWidth: true Button { id: button Layout.preferredWidth: 100 enabled: !service.isLoading text: { return service.isLoading ? qsTr("Loading...") : qsTr("Fetch") } onClicked: function() { service.fetch(url.text) } } TextField { id: url Layout.fillWidth: true enabled: !service.isLoading text: qsTr("https://jsonplaceholder.typicode.com/todos/1") } } TextEdit { id: text Layout.fillHeight: true Layout.fillWidth: true // react to value changes from other widgets text: service.value } } } qasync-0.27.1/examples/qml_httpx/service.py000066400000000000000000000022371452641434200207600ustar00rootroot00000000000000import httpx from qasync import asyncSlot from PySide6.QtCore import QObject, Signal, Property, Slot class ExampleService(QObject): valueChanged = Signal(str, arguments=["value"]) loadingChanged = Signal(bool, arguments=["loading"]) def __init__(self, parent=None): QObject.__init__(self, parent) self._value = None self._loading = False def _set_value(self, value): if self._value != value: self._value = value self.valueChanged.emit(value) def _set_loading(self, value): if self._loading != value: self._loading = value self.loadingChanged.emit(value) @Property(str, notify=valueChanged) def value(self) -> str: return self._value @Property(bool, notify=loadingChanged) def isLoading(self) -> bool: return self._loading @asyncSlot(str) async def fetch(self, endpoint: str): if not endpoint: return self._set_loading(True) async with httpx.AsyncClient() as client: resp = await client.get(endpoint) self._set_value(resp.text) self._set_loading(False) qasync-0.27.1/poetry.lock000066400000000000000000001067041452641434200153100ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "distlib" version = "0.3.7" description = "Distribution utilities" optional = false python-versions = "*" files = [ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] [[package]] name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "execnet" version = "2.0.2" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.7" files = [ {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, ] [package.extras] testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" version = "3.12.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "identify" version = "2.5.30" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, ] [package.extras] license = ["ukkonen"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "mypy" version = "1.5.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, ] [package.dependencies] setuptools = "*" [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.7" files = [ {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] [package.dependencies] cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" [[package]] name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, ] [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "pytest" version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-github-actions-annotate-failures" version = "0.1.8" description = "pytest plugin to annotate failed tests with a workflow command for GitHub Actions" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" files = [ {file = "pytest-github-actions-annotate-failures-0.1.8.tar.gz", hash = "sha256:2d6e6cb5f8d0aae4a27a20cc4e20fabd3199a121c57f44bc48fe28e372e0be23"}, {file = "pytest_github_actions_annotate_failures-0.1.8-py2.py3-none-any.whl", hash = "sha256:6a882ff21672fa79deae8d917eb965a6bde2b25191e7632e1adfc23ffac008ab"}, ] [package.dependencies] pytest = ">=4.0.0" [[package]] name = "pytest-xdist" version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.7" files = [ {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, ] [package.dependencies] execnet = ">=1.1" psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""} pytest = ">=6.2.0" [package.extras] psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] [[package]] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] name = "virtualenv" version = "20.24.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<4" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [metadata] lock-version = "2.0" python-versions = "^3.8" content-hash = "2190cfcc5d86f1314fe84f2427426f996de3724b617a61d96849bde109403581" qasync-0.27.1/pyproject.toml000066400000000000000000000027671452641434200160340ustar00rootroot00000000000000[tool.poetry] name = "qasync" version = "0.27.1" description = "Python library for using asyncio in Qt-based applications" authors = [ "Arve Knudsen ", "Gerard Marull-Paretas ", "Mark Harviston ", "Sam McCormack", ] maintainers = ["Alex March "] license = "BSD-2-Clause" readme = "README.md" homepage = "https://github.com/CabbageDevelopment/qasync" repository = "https://github.com/CabbageDevelopment/qasync" keywords = ["Qt", "asncio"] classifiers = [ "Development Status :: 4 - Beta", "Environment :: X11 Applications :: Qt", "Intended Audience :: Developers", "Operating System :: MacOS", "Operating System :: Microsoft", "Operating System :: POSIX", "Topic :: Software Development :: Libraries :: Python Modules", ] [tool.poetry.dependencies] python = "^3.8" [tool.poetry.group.dev.dependencies] pre-commit = "^2.21" [tool.poetry.group.test.dependencies] pytest = "^7.4" pytest-cov = "^4.1" pytest-xdist = { version = "^3.3", extras = ["psutil"] } [tool.poetry.group.typing.dependencies] mypy = ">=1.0" # only used in github actions [tool.poetry.group.github-actions] optional = true [tool.poetry.group.github-actions.dependencies] pytest-github-actions-annotate-failures = "^0.1.7" [build-system] requires = ["poetry-core>=1.5.0"] build-backend = "poetry.core.masonry.api" [tool.pytest] addopts = "-n auto" testpaths = ["tests"] [tool.pytest.ini_options] markers = ["raises"] qasync-0.27.1/qasync/000077500000000000000000000000001452641434200144025ustar00rootroot00000000000000qasync-0.27.1/qasync/__init__.py000066400000000000000000000644011452641434200165200ustar00rootroot00000000000000""" Implementation of the PEP 3156 Event-Loop with Qt. Copyright (c) 2018 Gerard Marull-Paretas Copyright (c) 2014 Mark Harviston Copyright (c) 2014 Arve Knudsen BSD License """ __author__ = ( "Sam McCormack", "Gerard Marull-Paretas , " "Mark Harviston , " "Arve Knudsen ", ) __all__ = ["QEventLoop", "QThreadExecutor", "asyncSlot", "asyncClose"] import asyncio import contextlib import functools import importlib import inspect import itertools import logging import os import sys import time from concurrent.futures import Future from queue import Queue logger = logging.getLogger(__name__) QtModule = None # If QT_API env variable is given, use that or fail trying qtapi_env = os.getenv("QT_API", "").strip().lower() if qtapi_env: env_to_mod_map = { "pyqt5": "PyQt5", "pyqt6": "PyQt6", "pyqt": "PyQt4", "pyqt4": "PyQt4", "pyside6": "PySide6", "pyside2": "PySide2", "pyside": "PySide", } if qtapi_env in env_to_mod_map: QtModuleName = env_to_mod_map[qtapi_env] else: raise ImportError( "QT_API environment variable set ({}) but not one of [{}].".format( qtapi_env, ", ".join(env_to_mod_map.keys()) ) ) logger.info("Forcing use of {} as Qt Implementation".format(QtModuleName)) QtModule = importlib.import_module(QtModuleName) # If a Qt lib is already imported, use that if not QtModule: for QtModuleName in ("PyQt5", "PyQt6", "PySide2", "PySide6"): if QtModuleName in sys.modules: QtModule = sys.modules[QtModuleName] break # Try importing qt libs if not QtModule: for QtModuleName in ("PyQt5", "PyQt6", "PySide2", "PySide6"): try: QtModule = importlib.import_module(QtModuleName) except ImportError: continue else: break if not QtModule: raise ImportError("No Qt implementations found") QtCore = importlib.import_module(QtModuleName + ".QtCore", package=QtModuleName) QtGui = importlib.import_module(QtModuleName + ".QtGui", package=QtModuleName) if QtModuleName == "PyQt5": from PyQt5 import QtWidgets from PyQt5.QtCore import pyqtSlot as Slot QApplication = QtWidgets.QApplication elif QtModuleName == "PyQt6": from PyQt6 import QtWidgets from PyQt6.QtCore import pyqtSlot as Slot QApplication = QtWidgets.QApplication elif QtModuleName == "PySide2": from PySide2 import QtWidgets from PySide2.QtCore import Slot QApplication = QtWidgets.QApplication elif QtModuleName == "PySide6": from PySide6 import QtWidgets from PySide6.QtCore import Slot QApplication = QtWidgets.QApplication from ._common import with_logger # noqa @with_logger class _QThreadWorker(QtCore.QThread): """ Read jobs from the queue and then execute them. For use by the QThreadExecutor """ def __init__(self, queue, num, stackSize=None): self.__queue = queue self.__stop = False self.__num = num super().__init__() if stackSize is not None: self.setStackSize(stackSize) def run(self): queue = self.__queue while True: command = queue.get() if command is None: # Stopping... break future, callback, args, kwargs = command self._logger.debug( "#%s got callback %s with args %s and kwargs %s from queue", self.__num, callback, args, kwargs, ) if future.set_running_or_notify_cancel(): self._logger.debug("Invoking callback") try: r = callback(*args, **kwargs) except Exception as err: self._logger.debug("Setting Future exception: %s", err) future.set_exception(err) else: self._logger.debug("Setting Future result: %s", r) future.set_result(r) else: self._logger.debug("Future was canceled") self._logger.debug("Thread #%s stopped", self.__num) def wait(self): self._logger.debug("Waiting for thread #%s to stop...", self.__num) super().wait() @with_logger class QThreadExecutor: """ ThreadExecutor that produces QThreads. Same API as `concurrent.futures.Executor` >>> from qasync import QThreadExecutor >>> with QThreadExecutor(5) as executor: ... f = executor.submit(lambda x: 2 + x, 2) ... r = f.result() ... assert r == 4 """ def __init__(self, max_workers=10, stack_size=None): super().__init__() self.__max_workers = max_workers self.__queue = Queue() if stack_size is None: # Match cpython/Python/thread_pthread.h if sys.platform.startswith("darwin"): stack_size = 16 * 2**20 elif sys.platform.startswith("freebsd"): stack_size = 4 * 2**20 elif sys.platform.startswith("aix"): stack_size = 2 * 2**20 self.__workers = [ _QThreadWorker(self.__queue, i + 1, stack_size) for i in range(max_workers) ] self.__been_shutdown = False for w in self.__workers: w.start() def submit(self, callback, *args, **kwargs): if self.__been_shutdown: raise RuntimeError("QThreadExecutor has been shutdown") future = Future() self._logger.debug( "Submitting callback %s with args %s and kwargs %s to thread worker queue", callback, args, kwargs, ) self.__queue.put((future, callback, args, kwargs)) return future def map(self, func, *iterables, timeout=None): raise NotImplementedError("use as_completed on the event loop") def shutdown(self, wait=True): if self.__been_shutdown: raise RuntimeError("QThreadExecutor has been shutdown") self.__been_shutdown = True self._logger.debug("Shutting down") for i in range(len(self.__workers)): # Signal workers to stop self.__queue.put(None) if wait: for w in self.__workers: w.wait() def __enter__(self, *args): if self.__been_shutdown: raise RuntimeError("QThreadExecutor has been shutdown") return self def __exit__(self, *args): self.shutdown() def _make_signaller(qtimpl_qtcore, *args): class Signaller(qtimpl_qtcore.QObject): try: signal = qtimpl_qtcore.Signal(*args) except AttributeError: signal = qtimpl_qtcore.pyqtSignal(*args) return Signaller() @with_logger class _SimpleTimer(QtCore.QObject): def __init__(self): super().__init__() self.__callbacks = {} self._stopped = False self.__debug_enabled = False def add_callback(self, handle, delay=0): timerid = self.startTimer(int(max(0, delay) * 1000)) self.__log_debug("Registering timer id %s", timerid) assert timerid not in self.__callbacks self.__callbacks[timerid] = handle return handle def timerEvent(self, event): # noqa: N802 timerid = event.timerId() self.__log_debug("Timer event on id %s", timerid) if self._stopped: self.__log_debug("Timer stopped, killing %s", timerid) self.killTimer(timerid) del self.__callbacks[timerid] else: try: handle = self.__callbacks[timerid] except KeyError as e: self.__log_debug(e) pass else: if handle._cancelled: self.__log_debug("Handle %s cancelled", handle) else: self.__log_debug("Calling handle %s", handle) handle._run() finally: del self.__callbacks[timerid] handle = None self.killTimer(timerid) def stop(self): self.__log_debug("Stopping timers") self._stopped = True def set_debug(self, enabled): self.__debug_enabled = enabled def __log_debug(self, *args, **kwargs): if self.__debug_enabled: self._logger.debug(*args, **kwargs) def _fileno(fd): if isinstance(fd, int): return fd try: return int(fd.fileno()) except (AttributeError, TypeError, ValueError): raise ValueError(f"Invalid file object: {fd!r}") from None @with_logger class _QEventLoop: """ Implementation of asyncio event loop that uses the Qt Event loop. >>> import asyncio >>> >>> app = getfixture('application') >>> >>> async def xplusy(x, y): ... await asyncio.sleep(.1) ... assert x + y == 4 ... await asyncio.sleep(.1) >>> >>> loop = QEventLoop(app) >>> asyncio.set_event_loop(loop) >>> with loop: ... loop.run_until_complete(xplusy(2, 2)) If the event loop shall be used with an existing and already running QApplication it must be specified in the constructor via already_running=True In this case the user is responsible for loop cleanup with stop() and close() The set_running_loop parameter is there for backwards compatibility and does nothing. """ def __init__(self, app=None, set_running_loop=False, already_running=False): self.__app = app or QApplication.instance() assert self.__app is not None, "No QApplication has been instantiated" self.__is_running = False self.__debug_enabled = False self.__default_executor = None self.__exception_handler = None self._read_notifiers = {} self._write_notifiers = {} self._timer = _SimpleTimer() self.__call_soon_signaller = signaller = _make_signaller(QtCore, object, tuple) self.__call_soon_signal = signaller.signal signaller.signal.connect(lambda callback, args: self.call_soon(callback, *args)) assert self.__app is not None super().__init__() # We have to set __is_running to True after calling # super().__init__() because of a bug in BaseEventLoop. if already_running: self.__is_running = True # it must be ensured that all pre- and # postprocessing for the eventloop is done self._before_run_forever() self.__app.aboutToQuit.connect(self._after_run_forever) # for asyncio to recognize the already running loop asyncio.events._set_running_loop(self) def run_forever(self): """Run eventloop forever.""" if self.__is_running: raise RuntimeError("Event loop already running") self.__is_running = True self._before_run_forever() try: self.__log_debug("Starting Qt event loop") asyncio.events._set_running_loop(self) rslt = -1 if hasattr(self.__app, "exec"): rslt = self.__app.exec() else: rslt = self.__app.exec_() self.__log_debug("Qt event loop ended with result %s", rslt) return rslt finally: asyncio.events._set_running_loop(None) self._after_run_forever() self.__is_running = False def run_until_complete(self, future): """Run until Future is complete.""" if self.__is_running: raise RuntimeError("Event loop already running") self.__log_debug("Running %s until complete", future) future = asyncio.ensure_future(future, loop=self) def stop(*args): self.stop() # noqa future.add_done_callback(stop) try: self.run_forever() finally: future.remove_done_callback(stop) self.__app.processEvents() # run loop one last time to process all the events if not future.done(): raise RuntimeError("Event loop stopped before Future completed.") self.__log_debug("Future %s finished running", future) return future.result() def stop(self): """Stop event loop.""" if not self.__is_running: self.__log_debug("Already stopped") return self.__log_debug("Stopping event loop...") self.__is_running = False self.__app.exit() self.__log_debug("Stopped event loop") def is_running(self): """Return True if the event loop is running, False otherwise.""" return self.__is_running def close(self): """ Release all resources used by the event loop. The loop cannot be restarted after it has been closed. """ if self.is_running(): raise RuntimeError("Cannot close a running event loop") if self.is_closed(): return self.__log_debug("Closing event loop...") if self.__default_executor is not None: self.__default_executor.shutdown() super().close() self._timer.stop() self.__app = None for notifier in itertools.chain( self._read_notifiers.values(), self._write_notifiers.values() ): notifier.setEnabled(False) self._read_notifiers = None self._write_notifiers = None def call_later(self, delay, callback, *args, context=None): """Register callback to be invoked after a certain delay.""" if asyncio.iscoroutinefunction(callback): raise TypeError("coroutines cannot be used with call_later") if not callable(callback): raise TypeError( "callback must be callable: {}".format(type(callback).__name__) ) self.__log_debug( "Registering callback %s to be invoked with arguments %s after %s second(s)", callback, args, delay, ) if sys.version_info >= (3, 7): return self._add_callback( asyncio.Handle(callback, args, self, context=context), delay ) return self._add_callback(asyncio.Handle(callback, args, self), delay) def _add_callback(self, handle, delay=0): return self._timer.add_callback(handle, delay) def call_soon(self, callback, *args, context=None): """Register a callback to be run on the next iteration of the event loop.""" return self.call_later(0, callback, *args, context=context) def call_at(self, when, callback, *args, context=None): """Register callback to be invoked at a certain time.""" return self.call_later(when - self.time(), callback, *args, context=context) def time(self): """Get time according to event loop's clock.""" return time.monotonic() def _add_reader(self, fd, callback, *args): """Register a callback for when a file descriptor is ready for reading.""" self._check_closed() try: existing = self._read_notifiers[fd] except KeyError: pass else: # this is necessary to avoid race condition-like issues existing.setEnabled(False) existing.activated["int"].disconnect() # will get overwritten by the assignment below anyways notifier = QtCore.QSocketNotifier(_fileno(fd), QtCore.QSocketNotifier.Type.Read) notifier.setEnabled(True) self.__log_debug("Adding reader callback for file descriptor %s", fd) notifier.activated["int"].connect( lambda: self.__on_notifier_ready( self._read_notifiers, notifier, fd, callback, args ) # noqa: C812 ) self._read_notifiers[fd] = notifier def _remove_reader(self, fd): """Remove reader callback.""" if self.is_closed(): return self.__log_debug("Removing reader callback for file descriptor %s", fd) try: notifier = self._read_notifiers.pop(fd) except KeyError: return False else: notifier.setEnabled(False) return True def _add_writer(self, fd, callback, *args): """Register a callback for when a file descriptor is ready for writing.""" self._check_closed() try: existing = self._write_notifiers[fd] except KeyError: pass else: # this is necessary to avoid race condition-like issues existing.setEnabled(False) existing.activated["int"].disconnect() # will get overwritten by the assignment below anyways notifier = QtCore.QSocketNotifier( _fileno(fd), QtCore.QSocketNotifier.Type.Write, ) notifier.setEnabled(True) self.__log_debug("Adding writer callback for file descriptor %s", fd) notifier.activated["int"].connect( lambda: self.__on_notifier_ready( self._write_notifiers, notifier, fd, callback, args ) # noqa: C812 ) self._write_notifiers[fd] = notifier def _remove_writer(self, fd): """Remove writer callback.""" if self.is_closed(): return self.__log_debug("Removing writer callback for file descriptor %s", fd) try: notifier = self._write_notifiers.pop(fd) except KeyError: return False else: notifier.setEnabled(False) return True def __notifier_cb_wrapper(self, notifiers, notifier, fd, callback, args): # This wrapper gets called with a certain delay. We cannot know # for sure that the notifier is still the current notifier for # the fd. if notifiers.get(fd, None) is not notifier: return try: callback(*args) finally: # The notifier might have been overriden by the # callback. We must not re-enable it in that case. if notifiers.get(fd, None) is notifier: notifier.setEnabled(True) else: notifier.activated["int"].disconnect() def __on_notifier_ready(self, notifiers, notifier, fd, callback, args): if fd not in notifiers: self._logger.warning( "Socket notifier for fd %s is ready, even though it should " "be disabled, not calling %s and disabling", fd, callback, ) notifier.setEnabled(False) return # It can be necessary to disable QSocketNotifier when e.g. checking # ZeroMQ sockets for events assert notifier.isEnabled() self.__log_debug("Socket notifier for fd %s is ready", fd) notifier.setEnabled(False) self.call_soon( self.__notifier_cb_wrapper, notifiers, notifier, fd, callback, args ) # Methods for interacting with threads. def call_soon_threadsafe(self, callback, *args, context=None): """Thread-safe version of call_soon.""" self.__call_soon_signal.emit(callback, args) def run_in_executor(self, executor, callback, *args): """Run callback in executor. If no executor is provided, the default executor will be used, which defers execution to a background thread. """ self.__log_debug("Running callback %s with args %s in executor", callback, args) if isinstance(callback, asyncio.Handle): assert not args assert not isinstance(callback, asyncio.TimerHandle) if callback._cancelled: f = asyncio.Future() f.set_result(None) return f callback, args = callback.callback, callback.args if executor is None: self.__log_debug("Using default executor") executor = self.__default_executor if executor is None: self.__log_debug("Creating default executor") executor = self.__default_executor = QThreadExecutor() return asyncio.wrap_future(executor.submit(callback, *args)) def set_default_executor(self, executor): self.__default_executor = executor # Error handlers. def set_exception_handler(self, handler): self.__exception_handler = handler def default_exception_handler(self, context): """Handle exceptions. This is the default exception handler. This is called when an exception occurs and no exception handler is set, and can be called by a custom exception handler that wants to defer to the default behavior. context parameter has the same meaning as in `call_exception_handler()`. """ self.__log_debug("Default exception handler executing") message = context.get("message") if not message: message = "Unhandled exception in event loop" try: exception = context["exception"] except KeyError: exc_info = False else: exc_info = (type(exception), exception, exception.__traceback__) log_lines = [message] for key in [k for k in sorted(context) if k not in {"message", "exception"}]: log_lines.append("{}: {!r}".format(key, context[key])) self.__log_error("\n".join(log_lines), exc_info=exc_info) def call_exception_handler(self, context): if self.__exception_handler is None: try: self.default_exception_handler(context) except Exception: # Second protection layer for unexpected errors # in the default implementation, as well as for subclassed # event loops with overloaded "default_exception_handler". self.__log_error( "Exception in default exception handler", exc_info=True ) return try: self.__exception_handler(self, context) except Exception as exc: # Exception in the user set custom exception handler. try: # Let's try the default handler. self.default_exception_handler( { "message": "Unhandled error in custom exception handler", "exception": exc, "context": context, } ) except Exception: # Guard 'default_exception_handler' in case it's # overloaded. self.__log_error( "Exception in default exception handler while handling an unexpected error " "in custom exception handler", exc_info=True, ) # Debug flag management. def get_debug(self): return self.__debug_enabled def set_debug(self, enabled): super().set_debug(enabled) self.__debug_enabled = enabled self._timer.set_debug(enabled) def __enter__(self): return self def __exit__(self, *args): self.stop() self.close() def __log_debug(self, *args, **kwargs): if self.__debug_enabled: self._logger.debug(*args, **kwargs) @classmethod def __log_error(cls, *args, **kwds): # In some cases, the error method itself fails, don't have a lot of options in that case try: cls._logger.error(*args, **kwds) except: # noqa E722 sys.stderr.write("{!r}, {!r}\n".format(args, kwds)) from ._unix import _SelectorEventLoop # noqa QSelectorEventLoop = type("QSelectorEventLoop", (_QEventLoop, _SelectorEventLoop), {}) if os.name == "nt": from ._windows import _ProactorEventLoop QIOCPEventLoop = type("QIOCPEventLoop", (_QEventLoop, _ProactorEventLoop), {}) QEventLoop = QIOCPEventLoop else: QEventLoop = QSelectorEventLoop class _Cancellable: def __init__(self, timer, loop): self.__timer = timer self.__loop = loop def cancel(self): self.__timer.stop() def asyncClose(fn): """Allow to run async code before application is closed.""" @functools.wraps(fn) def wrapper(*args, **kwargs): f = asyncio.ensure_future(fn(*args, **kwargs)) while not f.done(): QApplication.instance().processEvents() return wrapper def asyncSlot(*args, **kwargs): """Make a Qt async slot run on asyncio loop.""" def _error_handler(task): try: task.result() except Exception: sys.excepthook(*sys.exc_info()) def outer_decorator(fn): @Slot(*args, **kwargs) @functools.wraps(fn) def wrapper(*args, **kwargs): # Qt ignores trailing args from a signal but python does # not so inspect the slot signature and if it's not # callable try removing args until it is. task = None while len(args): try: inspect.signature(fn).bind(*args, **kwargs) except TypeError: if len(args): # Only convert args to a list if we need to pop() args = list(args) args.pop() continue else: task = asyncio.ensure_future(fn(*args, **kwargs)) task.add_done_callback(_error_handler) break if task is None: raise TypeError( "asyncSlot was not callable from Signal. Potential signature mismatch." ) return task return wrapper return outer_decorator class QEventLoopPolicyMixin: def new_event_loop(self): return QEventLoop(QApplication.instance() or QApplication(sys.argv)) class DefaultQEventLoopPolicy( QEventLoopPolicyMixin, asyncio.DefaultEventLoopPolicy, ): pass @contextlib.contextmanager def _set_event_loop_policy(policy): old_policy = asyncio.get_event_loop_policy() asyncio.set_event_loop_policy(policy) try: yield finally: asyncio.set_event_loop_policy(old_policy) def run(*args, **kwargs): with _set_event_loop_policy(DefaultQEventLoopPolicy()): return asyncio.run(*args, **kwargs) qasync-0.27.1/qasync/_common.py000066400000000000000000000011321452641434200164000ustar00rootroot00000000000000# © 2018 Gerard Marull-Paretas # © 2014 Mark Harviston # © 2014 Arve Knudsen # BSD License """Mostly irrelevant, but useful utilities common to UNIX and Windows.""" import logging def with_logger(cls): """Class decorator to add a logger to a class.""" attr_name = "_logger" cls_name = cls.__qualname__ module = cls.__module__ if module is not None: cls_name = module + "." + cls_name else: raise AssertionError setattr(cls, attr_name, logging.getLogger(cls_name)) return cls qasync-0.27.1/qasync/_unix.py000066400000000000000000000142201452641434200160750ustar00rootroot00000000000000# © 2018 Gerard Marull-Paretas # © 2014 Mark Harviston # © 2014 Arve Knudsen # BSD License """UNIX specific Quamash functionality.""" import asyncio import selectors import collections from . import QtCore, with_logger, _fileno EVENT_READ = 1 << 0 EVENT_WRITE = 1 << 1 class _SelectorMapping(collections.abc.Mapping): """Mapping of file objects to selector keys.""" def __init__(self, selector): self._selector = selector def __len__(self): return len(self._selector._fd_to_key) def __getitem__(self, fileobj): try: fd = self._selector._fileobj_lookup(fileobj) return self._selector._fd_to_key[fd] except KeyError: raise KeyError("{!r} is not registered".format(fileobj)) from None def __iter__(self): return iter(self._selector._fd_to_key) @with_logger class _Selector(selectors.BaseSelector): def __init__(self, parent): # this maps file descriptors to keys self._fd_to_key = {} # read-only mapping returned by get_map() self.__map = _SelectorMapping(self) self.__read_notifiers = {} self.__write_notifiers = {} self.__parent = parent def select(self, *args, **kwargs): """Implement abstract method even though we don't need it.""" raise NotImplementedError def _fileobj_lookup(self, fileobj): """Return a file descriptor from a file object. This wraps _fileno() to do an exhaustive search in case the object is invalid but we still have it in our map. This is used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping. """ try: return _fileno(fileobj) except ValueError: # Do an exhaustive search. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise def register(self, fileobj, events, data=None): if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): raise ValueError("Invalid events: {!r}".format(events)) key = selectors.SelectorKey( fileobj, self._fileobj_lookup(fileobj), events, data ) if key.fd in self._fd_to_key: raise KeyError("{!r} (FD {}) is already registered".format(fileobj, key.fd)) self._fd_to_key[key.fd] = key if events & EVENT_READ: notifier = QtCore.QSocketNotifier(key.fd, QtCore.QSocketNotifier.Read) notifier.activated["int"].connect(self.__on_read_activated) self.__read_notifiers[key.fd] = notifier if events & EVENT_WRITE: notifier = QtCore.QSocketNotifier(key.fd, QtCore.QSocketNotifier.Write) notifier.activated["int"].connect(self.__on_write_activated) self.__write_notifiers[key.fd] = notifier return key def __on_read_activated(self, fd): self._logger.debug("File %s ready to read", fd) key = self._key_from_fd(fd) if key: self.__parent._process_event(key, EVENT_READ & key.events) def __on_write_activated(self, fd): self._logger.debug("File %s ready to write", fd) key = self._key_from_fd(fd) if key: self.__parent._process_event(key, EVENT_WRITE & key.events) def unregister(self, fileobj): def drop_notifier(notifiers): try: notifier = notifiers.pop(key.fd) except KeyError: pass else: notifier.activated["int"].disconnect() try: key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) except KeyError: raise KeyError("{!r} is not registered".format(fileobj)) from None drop_notifier(self.__read_notifiers) drop_notifier(self.__write_notifiers) return key def modify(self, fileobj, events, data=None): try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except KeyError: raise KeyError("{!r} is not registered".format(fileobj)) from None if events != key.events: self.unregister(fileobj) key = self.register(fileobj, events, data) elif data != key.data: # Use a shortcut to update the data. key = key._replace(data=data) self._fd_to_key[key.fd] = key return key def close(self): self._logger.debug("Closing") self._fd_to_key.clear() self.__read_notifiers.clear() self.__write_notifiers.clear() def get_map(self): return self.__map def _key_from_fd(self, fd): """ Return the key associated to a given file descriptor. Parameters: fd -- file descriptor Returns: corresponding key, or None if not found """ try: return self._fd_to_key[fd] except KeyError: return None class _SelectorEventLoop(asyncio.SelectorEventLoop): def __init__(self): self._signal_safe_callbacks = [] selector = _Selector(self) asyncio.SelectorEventLoop.__init__(self, selector) def _before_run_forever(self): pass def _after_run_forever(self): pass def _process_event(self, key, mask): """Selector has delivered us an event.""" self._logger.debug("Processing event with key %s and mask %s", key, mask) fileobj, (reader, writer) = key.fileobj, key.data if mask & selectors.EVENT_READ and reader is not None: if reader._cancelled: self.remove_reader(fileobj) else: self._logger.debug("Invoking reader callback: %s", reader) reader._run() if mask & selectors.EVENT_WRITE and writer is not None: if writer._cancelled: self.remove_writer(fileobj) else: self._logger.debug("Invoking writer callback: %s", writer) writer._run() qasync-0.27.1/qasync/_windows.py000066400000000000000000000162021452641434200166060ustar00rootroot00000000000000# © 2018 Gerard Marull-Paretas # © 2014 Mark Harviston # © 2014 Arve Knudsen # BSD License """Windows specific Quamash functionality.""" import asyncio import sys try: import _winapi from asyncio import windows_events import _overlapped except ImportError: # noqa pass # w/o guarding this import py.test can't gather doctests on platforms w/o _winapi import math from . import QtCore, _make_signaller from ._common import with_logger UINT32_MAX = 0xFFFFFFFF class _ProactorEventLoop(asyncio.ProactorEventLoop): """Proactor based event loop.""" def __init__(self): super().__init__(_IocpProactor()) self.__event_signaller = _make_signaller(QtCore, list) self.__event_signal = self.__event_signaller.signal self.__event_signal.connect(self._process_events) self.__event_poller = _EventPoller(self.__event_signal) def _process_events(self, events): """Process events from proactor.""" for f, callback, transferred, key, ov in events: try: self._logger.debug("Invoking event callback %s", callback) value = callback(transferred, key, ov) except OSError as e: self._logger.debug("Event callback failed", exc_info=sys.exc_info()) if not f.done(): f.set_exception(e) else: if not f.cancelled(): f.set_result(value) def _before_run_forever(self): self.__event_poller.start(self._proactor) def _after_run_forever(self): self.__event_poller.stop() @with_logger class _IocpProactor(windows_events.IocpProactor): def __init__(self): self.__events = [] super(_IocpProactor, self).__init__() self._lock = QtCore.QMutex() def select(self, timeout=None): """Override in order to handle events in a threadsafe manner.""" if not self.__events: self._poll(timeout) tmp = self.__events self.__events = [] return tmp def close(self): self._logger.debug("Closing") super(_IocpProactor, self).close() # Wrap all I/O submission methods to acquire the internal lock first; listed # in the order they appear in the base class source code. def recv(self, conn, nbytes, flags=0): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).recv(conn, nbytes, flags) def recv_into(self, conn, buf, flags=0): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).recv_into(conn, buf, flags) def recvfrom(self, conn, nbytes, flags=0): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).recvfrom(conn, nbytes, flags) def recvfrom_into(self, conn, buf, flags=0): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).recvfrom_into(conn, buf, flags) def sendto(self, conn, buf, flags=0, addr=None): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).sendto(conn, buf, flags, addr) def send(self, conn, buf, flags=0): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).send(conn, buf, flags) def accept(self, listener): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).accept(listener) def connect(self, conn, address): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).connect(conn, address) def sendfile(self, sock, file, offset, count): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).sendfile(sock, file, offset, count) def accept_pipe(self, pipe): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self).accept_pipe(pipe) # connect_pipe() does not actually use the delayed completion machinery. # This takes care of wait_for_handle() too. def _wait_for_handle(self, handle, timeout, _is_cancel): with QtCore.QMutexLocker(self._lock): return super(_IocpProactor, self)._wait_for_handle( handle, timeout, _is_cancel ) def _poll(self, timeout=None): """Override in order to handle events in a threadsafe manner.""" if timeout is None: ms = UINT32_MAX # wait for eternity elif timeout < 0: raise ValueError("negative timeout") else: # GetQueuedCompletionStatus() has a resolution of 1 millisecond, # round away from zero to wait *at least* timeout seconds. ms = math.ceil(timeout * 1e3) if ms >= UINT32_MAX: raise ValueError("timeout too big") while True: status = _overlapped.GetQueuedCompletionStatus(self._iocp, ms) if status is None: break ms = 0 with QtCore.QMutexLocker(self._lock): err, transferred, key, address = status try: f, ov, obj, callback = self._cache.pop(address) except KeyError: # key is either zero, or it is used to return a pipe # handle which should be closed to avoid a leak. if key not in (0, _overlapped.INVALID_HANDLE_VALUE): _winapi.CloseHandle(key) continue if obj in self._stopped_serving: f.cancel() # Futures might already be resolved or cancelled elif not f.done(): self.__events.append((f, callback, transferred, key, ov)) # Remove unregistered futures for ov in self._unregistered: self._cache.pop(ov.address, None) self._unregistered.clear() @with_logger class _EventWorker(QtCore.QThread): def __init__(self, proactor, parent): super().__init__() self.__stop = False self.__proactor = proactor self.__sig_events = parent.sig_events self.__semaphore = QtCore.QSemaphore() def start(self): super().start() self.__semaphore.acquire() def stop(self): self.__stop = True # Wait for thread to end self.wait() def run(self): self._logger.debug("Thread started") self.__semaphore.release() while not self.__stop: events = self.__proactor.select(0.01) if events: self._logger.debug("Got events from poll: %s", events) self.__sig_events.emit(events) self._logger.debug("Exiting thread") @with_logger class _EventPoller: """Polling of events in separate thread.""" def __init__(self, sig_events): self.sig_events = sig_events def start(self, proactor): self._logger.debug("Starting (proactor: %s)...", proactor) self.__worker = _EventWorker(proactor, self) self.__worker.start() def stop(self): self._logger.debug("Stopping worker thread...") self.__worker.stop() qasync-0.27.1/tests/000077500000000000000000000000001452641434200142465ustar00rootroot00000000000000qasync-0.27.1/tests/conftest.py000066400000000000000000000010531452641434200164440ustar00rootroot00000000000000# © 2018 Gerard Marull-Paretas # © 2014 Mark Harviston # © 2014 Arve Knudsen # BSD License import os import logging from pytest import fixture logging.basicConfig( level=logging.DEBUG, format="%(levelname)s\t%(filename)s:%(lineno)s %(message)s" ) if os.name == "nt": collect_ignore = ["qasync/_unix.py"] else: collect_ignore = ["qasync/_windows.py"] @fixture(scope="session") def application(): from qasync import QApplication return QApplication([]) qasync-0.27.1/tests/test_qeventloop.py000066400000000000000000000551621452641434200200640ustar00rootroot00000000000000# © 2018 Gerard Marull-Paretas # © 2014 Mark Harviston # © 2014 Arve Knudsen # BSD License import asyncio import logging import sys import os import ctypes import multiprocessing from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor import socket import subprocess import qasync import pytest @pytest.fixture def loop(request, application): lp = qasync.QEventLoop(application) asyncio.set_event_loop(lp) additional_exceptions = [] def fin(): sys.excepthook = orig_excepthook try: lp.close() finally: asyncio.set_event_loop(None) for exc in additional_exceptions: if ( os.name == "nt" and isinstance(exc["exception"], WindowsError) and exc["exception"].winerror == 6 ): # ignore Invalid Handle Errors continue raise exc["exception"] def except_handler(loop, ctx): additional_exceptions.append(ctx) def excepthook(type, *args): lp.stop() orig_excepthook(type, *args) orig_excepthook = sys.excepthook sys.excepthook = excepthook lp.set_exception_handler(except_handler) request.addfinalizer(fin) return lp @pytest.fixture( params=[None, qasync.QThreadExecutor, ThreadPoolExecutor, ProcessPoolExecutor], ) def executor(request): exc_cls = request.param if exc_cls is None: return None exc = exc_cls(1) # FIXME? fixed number of workers? request.addfinalizer(exc.shutdown) return exc ExceptionTester = type( "ExceptionTester", (Exception,), {} ) # to make flake8 not complain class TestCanRunTasksInExecutor: """ Test Cases Concerning running jobs in Executors. This needs to be a class because pickle can't serialize closures, but can serialize bound methods. multiprocessing can only handle pickleable functions. """ def test_can_run_tasks_in_executor(self, loop, executor): """Verify that tasks can be run in an executor.""" logging.debug("Loop: {!r}".format(loop)) logging.debug("Executor: {!r}".format(executor)) manager = multiprocessing.Manager() was_invoked = manager.Value(ctypes.c_int, 0) logging.debug("running until complete") loop.run_until_complete(self.blocking_task(loop, executor, was_invoked)) logging.debug("ran") assert was_invoked.value == 1 def test_can_handle_exception_in_executor(self, loop, executor): with pytest.raises(ExceptionTester) as excinfo: loop.run_until_complete( asyncio.wait_for( loop.run_in_executor(executor, self.blocking_failure), timeout=3.0, ) ) assert str(excinfo.value) == "Testing" def blocking_failure(self): logging.debug("raising") try: raise ExceptionTester("Testing") finally: logging.debug("raised!") def blocking_func(self, was_invoked): logging.debug("start blocking_func()") was_invoked.value = 1 logging.debug("end blocking_func()") async def blocking_task(self, loop, executor, was_invoked): logging.debug("start blocking task()") fut = loop.run_in_executor(executor, self.blocking_func, was_invoked) await asyncio.wait_for(fut, timeout=5.0) logging.debug("start blocking task()") def test_can_execute_subprocess(loop): """Verify that a subprocess can be executed.""" async def mycoro(): process = await asyncio.create_subprocess_exec( sys.executable or "python", "-c", "import sys; sys.exit(5)" ) await process.wait() assert process.returncode == 5 loop.run_until_complete(asyncio.wait_for(mycoro(), timeout=3)) def test_can_read_subprocess(loop): """Verify that a subprocess's data can be read from stdout.""" async def mycoro(): process = await asyncio.create_subprocess_exec( sys.executable or "python", "-c", 'print("Hello async world!")', stdout=subprocess.PIPE, ) if process.stdout is None: raise Exception("Output from the process is none") received_stdout = await process.stdout.readexactly(len(b"Hello async world!\n")) await process.wait() assert process.returncode == 0 assert received_stdout.strip() == b"Hello async world!" loop.run_until_complete(asyncio.wait_for(mycoro(), timeout=3)) def test_can_communicate_subprocess(loop): """Verify that a subprocess's data can be passed in/out via stdin/stdout.""" async def mycoro(): process = await asyncio.create_subprocess_exec( sys.executable or "python", "-c", "print(input())", stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) received_stdout, received_stderr = await process.communicate( b"Hello async world!\n" ) await process.wait() assert process.returncode == 0 assert received_stdout.strip() == b"Hello async world!" loop.run_until_complete(asyncio.wait_for(mycoro(), timeout=3)) def test_can_terminate_subprocess(loop): """Verify that a subprocess can be terminated.""" # Start a never-ending process async def mycoro(): process = await asyncio.create_subprocess_exec( sys.executable or "python", "-c", "import time\nwhile True: time.sleep(1)" ) process.terminate() await process.wait() assert process.returncode != 0 loop.run_until_complete(mycoro()) @pytest.mark.raises(ExceptionTester) def test_loop_callback_exceptions_bubble_up(loop): """Verify that test exceptions raised in event loop callbacks bubble up.""" def raise_test_exception(): raise ExceptionTester("Test Message") loop.call_soon(raise_test_exception) loop.run_until_complete(asyncio.sleep(0.1)) def test_loop_running(loop): """Verify that loop.is_running returns True when running.""" async def is_running(): nonlocal loop assert loop.is_running() loop.run_until_complete(is_running()) def test_loop_not_running(loop): """Verify that loop.is_running returns False when not running.""" assert not loop.is_running() def test_get_running_loop_fails_after_completion(loop): """Verify that after loop stops, asyncio._get_running_loop() correctly returns None.""" async def is_running_loop(): nonlocal loop assert asyncio._get_running_loop() == loop loop.run_until_complete(is_running_loop()) assert asyncio._get_running_loop() is None def test_loop_can_run_twice(loop): """Verify that loop is correctly reset as asyncio._get_running_loop() when restarted.""" async def is_running_loop(): nonlocal loop assert asyncio._get_running_loop() == loop loop.run_until_complete(is_running_loop()) loop.run_until_complete(is_running_loop()) def test_can_function_as_context_manager(application): """Verify that a QEventLoop can function as its own context manager.""" with qasync.QEventLoop(application) as loop: assert isinstance(loop, qasync.QEventLoop) loop.call_soon(loop.stop) loop.run_forever() def test_future_not_done_on_loop_shutdown(loop): """Verify RuntimError occurs when loop stopped before Future completed with run_until_complete.""" loop.call_later(0.1, loop.stop) fut = asyncio.Future() with pytest.raises(RuntimeError): loop.run_until_complete(fut) def test_call_later_must_not_coroutine(loop): """Verify TypeError occurs call_later is given a coroutine.""" async def mycoro(): pass with pytest.raises(TypeError): loop.call_soon(mycoro) def test_call_later_must_be_callable(loop): """Verify TypeError occurs call_later is not given a callable.""" not_callable = object() with pytest.raises(TypeError): loop.call_soon(not_callable) def test_call_at(loop): """Verify that loop.call_at works as expected.""" def mycallback(): nonlocal was_invoked was_invoked = True was_invoked = False loop.call_at(loop.time() + 0.05, mycallback) loop.run_until_complete(asyncio.sleep(0.1)) assert was_invoked def test_get_set_debug(loop): """Verify get_debug and set_debug work as expected.""" loop.set_debug(True) assert loop.get_debug() loop.set_debug(False) assert not loop.get_debug() @pytest.fixture def sock_pair(request): """Create socket pair. If socket.socketpair isn't available, we emulate it. """ def fin(): if client_sock is not None: client_sock.close() if srv_sock is not None: srv_sock.close() client_sock = srv_sock = None request.addfinalizer(fin) # See if socketpair() is available. have_socketpair = hasattr(socket, "socketpair") if have_socketpair: client_sock, srv_sock = socket.socketpair() return client_sock, srv_sock # Create a non-blocking temporary server socket temp_srv_sock = socket.socket() temp_srv_sock.setblocking(False) temp_srv_sock.bind(("", 0)) port = temp_srv_sock.getsockname()[1] temp_srv_sock.listen(1) # Create non-blocking client socket client_sock = socket.socket() client_sock.setblocking(False) try: client_sock.connect(("localhost", port)) except socket.error as err: # Error 10035 (operation would block) is not an error, as we're doing this with a # non-blocking socket. if err.errno != 10035: raise # Use select to wait for connect() to succeed. import select timeout = 1 readable = select.select([temp_srv_sock], [], [], timeout)[0] if temp_srv_sock not in readable: raise Exception("Client socket not connected in {} second(s)".format(timeout)) srv_sock, _ = temp_srv_sock.accept() return client_sock, srv_sock def test_can_add_reader(loop, sock_pair): """Verify that we can add a reader callback to an event loop.""" def can_read(): if fut.done(): return data = srv_sock.recv(1) if len(data) != 1: return nonlocal got_msg got_msg = data # Indicate that we're done fut.set_result(None) srv_sock.close() def write(): client_sock.send(ref_msg) client_sock.close() ref_msg = b"a" client_sock, srv_sock = sock_pair loop.call_soon(write) exp_num_notifiers = len(loop._read_notifiers) + 1 got_msg = None fut = asyncio.Future() loop._add_reader(srv_sock.fileno(), can_read) assert len(loop._read_notifiers) == exp_num_notifiers, "Notifier should be added" loop.run_until_complete(asyncio.wait_for(fut, timeout=1.0)) assert got_msg == ref_msg def test_can_remove_reader(loop, sock_pair): """Verify that we can remove a reader callback from an event loop.""" def can_read(): data = srv_sock.recv(1) if len(data) != 1: return nonlocal got_msg got_msg = data client_sock, srv_sock = sock_pair got_msg = None loop._add_reader(srv_sock.fileno(), can_read) exp_num_notifiers = len(loop._read_notifiers) - 1 loop._remove_reader(srv_sock.fileno()) assert len(loop._read_notifiers) == exp_num_notifiers, "Notifier should be removed" client_sock.send(b"a") client_sock.close() # Run for a short while to see if we get a read notification loop.call_later(0.1, loop.stop) loop.run_forever() assert got_msg is None, "Should not have received a read notification" def test_remove_reader_after_closing(loop, sock_pair): """Verify that we can remove a reader callback from an event loop.""" client_sock, srv_sock = sock_pair loop._add_reader(srv_sock.fileno(), lambda: None) loop.close() loop._remove_reader(srv_sock.fileno()) def test_remove_writer_after_closing(loop, sock_pair): """Verify that we can remove a reader callback from an event loop.""" client_sock, srv_sock = sock_pair loop._add_writer(client_sock.fileno(), lambda: None) loop.close() loop._remove_writer(client_sock.fileno()) def test_add_reader_after_closing(loop, sock_pair): """Verify that we can remove a reader callback from an event loop.""" client_sock, srv_sock = sock_pair loop.close() with pytest.raises(RuntimeError): loop._add_reader(srv_sock.fileno(), lambda: None) def test_add_writer_after_closing(loop, sock_pair): """Verify that we can remove a reader callback from an event loop.""" client_sock, srv_sock = sock_pair loop.close() with pytest.raises(RuntimeError): loop._add_writer(client_sock.fileno(), lambda: None) def test_can_add_writer(loop, sock_pair): """Verify that we can add a writer callback to an event loop.""" def can_write(): if not fut.done(): # Indicate that we're done fut.set_result(None) client_sock.close() client_sock, _ = sock_pair fut = asyncio.Future() loop._add_writer(client_sock.fileno(), can_write) assert len(loop._write_notifiers) == 1, "Notifier should be added" loop.run_until_complete(asyncio.wait_for(fut, timeout=1.0)) def test_can_remove_writer(loop, sock_pair): """Verify that we can remove a writer callback from an event loop.""" client_sock, _ = sock_pair loop._add_writer(client_sock.fileno(), lambda: None) loop._remove_writer(client_sock.fileno()) assert not loop._write_notifiers, "Notifier should be removed" def test_add_reader_should_disable_qsocket_notifier_on_callback(loop, sock_pair): """Verify that add_reader disables QSocketNotifier during callback.""" def can_read(): nonlocal num_calls num_calls += 1 if num_calls == 2: # Since we get called again, the QSocketNotifier should've been re-enabled before # this call (although disabled during) assert not notifier.isEnabled() srv_sock.recv(1) fut.set_result(None) srv_sock.close() return assert not notifier.isEnabled() def write(): client_sock.send(b"a") client_sock.close() num_calls = 0 client_sock, srv_sock = sock_pair loop.call_soon(write) fut = asyncio.Future() loop._add_reader(srv_sock.fileno(), can_read) notifier = loop._read_notifiers[srv_sock.fileno()] loop.run_until_complete(asyncio.wait_for(fut, timeout=1.0)) def test_add_writer_should_disable_qsocket_notifier_on_callback(loop, sock_pair): """Verify that add_writer disables QSocketNotifier during callback.""" def can_write(): nonlocal num_calls num_calls += 1 if num_calls == 2: # Since we get called again, the QSocketNotifier should've been re-enabled before # this call (although disabled during) assert not notifier.isEnabled() fut.set_result(None) client_sock.close() return assert not notifier.isEnabled() num_calls = 0 client_sock, _ = sock_pair fut = asyncio.Future() loop._add_writer(client_sock.fileno(), can_write) notifier = loop._write_notifiers[client_sock.fileno()] loop.run_until_complete(asyncio.wait_for(fut, timeout=1.0)) def test_reader_writer_echo(loop, sock_pair): """Verify readers and writers can send data to each other.""" c_sock, s_sock = sock_pair async def mycoro(): c_reader, c_writer = await asyncio.open_connection(sock=c_sock) s_reader, s_writer = await asyncio.open_connection(sock=s_sock) data = b"Echo... Echo... Echo..." s_writer.write(data) await s_writer.drain() read_data = await c_reader.readexactly(len(data)) assert data == read_data s_writer.close() loop.run_until_complete(asyncio.wait_for(mycoro(), timeout=1.0)) def test_regression_bug13(loop, sock_pair): """Verify that a simple handshake between client and server works as expected.""" c_sock, s_sock = sock_pair client_done, server_done = asyncio.Future(), asyncio.Future() async def server_coro(): s_reader, s_writer = await asyncio.open_connection(sock=s_sock) s_writer.write(b"1") await s_writer.drain() assert (await s_reader.readexactly(1)) == b"2" s_writer.write(b"3") await s_writer.drain() server_done.set_result(True) result1 = None result3 = None async def client_coro(): def cb1(): nonlocal result1 assert result1 is None loop._remove_reader(c_sock.fileno()) result1 = c_sock.recv(1) loop._add_writer(c_sock.fileno(), cb2) def cb2(): nonlocal result3 assert result3 is None c_sock.send(b"2") loop._remove_writer(c_sock.fileno()) loop._add_reader(c_sock.fileno(), cb3) def cb3(): nonlocal result3 assert result3 is None result3 = c_sock.recv(1) client_done.set_result(True) loop._add_reader(c_sock.fileno(), cb1) _client_task = asyncio.ensure_future(client_coro()) _server_task = asyncio.ensure_future(server_coro()) both_done = asyncio.gather(client_done, server_done) loop.run_until_complete(asyncio.wait_for(both_done, timeout=1.0)) assert result1 == b"1" assert result3 == b"3" def test_add_reader_replace(loop, sock_pair): c_sock, s_sock = sock_pair callback_invoked = asyncio.Future() called1 = False called2 = False def any_callback(): if not callback_invoked.done(): callback_invoked.set_result(True) loop._remove_reader(c_sock.fileno()) def callback1(): # the "bad" callback: if this gets invoked, something went wrong nonlocal called1 called1 = True any_callback() def callback2(): # the "good" callback: this is the one which should get called nonlocal called2 called2 = True any_callback() async def server_coro(): s_reader, s_writer = await asyncio.open_connection(sock=s_sock) s_writer.write(b"foo") await s_writer.drain() async def client_coro(): loop._add_reader(c_sock.fileno(), callback1) loop._add_reader(c_sock.fileno(), callback2) await callback_invoked loop._remove_reader(c_sock.fileno()) assert (await loop.sock_recv(c_sock, 3)) == b"foo" client_done = asyncio.ensure_future(client_coro()) server_done = asyncio.ensure_future(server_coro()) both_done = asyncio.wait( [server_done, client_done], return_when=asyncio.FIRST_EXCEPTION ) loop.run_until_complete(asyncio.wait_for(both_done, timeout=0.1)) assert not called1 assert called2 def test_add_writer_replace(loop, sock_pair): c_sock, s_sock = sock_pair callback_invoked = asyncio.Future() called1 = False called2 = False def any_callback(): if not callback_invoked.done(): callback_invoked.set_result(True) loop._remove_writer(c_sock.fileno()) def callback1(): # the "bad" callback: if this gets invoked, something went wrong nonlocal called1 called1 = True any_callback() def callback2(): # the "good" callback: this is the one which should get called nonlocal called2 called2 = True any_callback() async def client_coro(): loop._add_writer(c_sock.fileno(), callback1) loop._add_writer(c_sock.fileno(), callback2) await callback_invoked loop._remove_writer(c_sock.fileno()) loop.run_until_complete(asyncio.wait_for(client_coro(), timeout=0.1)) assert not called1 assert called2 def test_remove_reader_idempotence(loop, sock_pair): fd = sock_pair[0].fileno() def cb(): pass removed0 = loop._remove_reader(fd) loop._add_reader(fd, cb) removed1 = loop._remove_reader(fd) removed2 = loop._remove_reader(fd) assert not removed0 assert removed1 assert not removed2 def test_remove_writer_idempotence(loop, sock_pair): fd = sock_pair[0].fileno() def cb(): pass removed0 = loop._remove_writer(fd) loop._add_writer(fd, cb) removed1 = loop._remove_writer(fd) removed2 = loop._remove_writer(fd) assert not removed0 assert removed1 assert not removed2 def test_scheduling(loop, sock_pair): s1, s2 = sock_pair fd = s1.fileno() cb_called = asyncio.Future() def writer_cb(fut): if fut.done(): cb_called.set_exception(ValueError("writer_cb called twice")) fut.set_result(None) def fut_cb(fut): loop._remove_writer(fd) cb_called.set_result(None) fut = asyncio.Future() fut.add_done_callback(fut_cb) loop._add_writer(fd, writer_cb, fut) loop.run_until_complete(cb_called) @pytest.mark.xfail( "sys.version_info < (3,4)", reason="Doesn't work on python older than 3.4", ) def test_exception_handler(loop): handler_called = False coro_run = False loop.set_debug(False) async def future_except(): nonlocal coro_run coro_run = True loop.stop() raise ExceptionTester() def exct_handler(loop, data): nonlocal handler_called handler_called = True loop.set_exception_handler(exct_handler) asyncio.ensure_future(future_except()) loop.run_forever() assert coro_run assert handler_called def test_exception_handler_simple(loop): handler_called = False def exct_handler(loop, data): nonlocal handler_called handler_called = True loop.set_exception_handler(exct_handler) fut1 = asyncio.Future() fut1.set_exception(ExceptionTester()) asyncio.ensure_future(fut1) del fut1 loop.call_later(0.1, loop.stop) loop.run_forever() assert handler_called def test_not_running_immediately_after_stopped(loop): async def mycoro(): assert loop.is_running() await asyncio.sleep(0) loop.stop() assert not loop.is_running() assert not loop.is_running() loop.run_until_complete(mycoro()) assert not loop.is_running() def teardown_module(module): """ Remove handlers from all loggers See: https://github.com/pytest-dev/pytest/issues/5502 """ loggers = [logging.getLogger()] + list(logging.Logger.manager.loggerDict.values()) for logger in loggers: handlers = getattr(logger, "handlers", []) for handler in handlers: if isinstance(logger, logging.Logger): logger.removeHandler(handler) qasync-0.27.1/tests/test_qthreadexec.py000066400000000000000000000023541452641434200201600ustar00rootroot00000000000000# © 2018 Gerard Marull-Paretas # © 2014 Mark Harviston # © 2014 Arve Knudsen # BSD License import pytest import qasync @pytest.fixture def executor(request): exe = qasync.QThreadExecutor(5) request.addfinalizer(exe.shutdown) return exe @pytest.fixture def shutdown_executor(): exe = qasync.QThreadExecutor(5) exe.shutdown() return exe def test_shutdown_after_shutdown(shutdown_executor): with pytest.raises(RuntimeError): shutdown_executor.shutdown() def test_ctx_after_shutdown(shutdown_executor): with pytest.raises(RuntimeError): with shutdown_executor: pass def test_submit_after_shutdown(shutdown_executor): with pytest.raises(RuntimeError): shutdown_executor.submit(None) def test_stack_recursion_limit(executor): # Test that worker threads have sufficient stack size for the default # sys.getrecursionlimit. If not this should fail with SIGSEGV or SIGBUS # (or event SIGILL?) def rec(a, *args, **kwargs): rec(a, *args, **kwargs) fs = [executor.submit(rec, 1) for _ in range(10)] for f in fs: with pytest.raises(RecursionError): f.result()