pax_global_header00006660000000000000000000000064142610052360014511gustar00rootroot0000000000000052 comment=32ed7f7396fdcee5fee10f57188d43387e055b16 zlmdb-22.6.1/000077500000000000000000000000001426100523600127115ustar00rootroot00000000000000zlmdb-22.6.1/.editorconfig000066400000000000000000000004441426100523600153700ustar00rootroot00000000000000# http://editorconfig.org root = true [*] indent_style = space indent_size = 4 trim_trailing_whitespace = true insert_final_newline = true charset = utf-8 end_of_line = lf [*.bat] indent_style = tab end_of_line = crlf [LICENSE] insert_final_newline = false [Makefile] indent_style = tab zlmdb-22.6.1/.github/000077500000000000000000000000001426100523600142515ustar00rootroot00000000000000zlmdb-22.6.1/.github/ISSUE_TEMPLATE.md000066400000000000000000000004741426100523600167630ustar00rootroot00000000000000* ZLMDB version: * Python version: * Operating System: ### Description Describe what you were trying to get done. Tell us what happened, what went wrong, and what you expected to happen. ### What I Did ``` Paste the command(s) you ran and the output. If there was a crash, please include the traceback here. ``` zlmdb-22.6.1/.github/workflows/000077500000000000000000000000001426100523600163065ustar00rootroot00000000000000zlmdb-22.6.1/.github/workflows/deploy.yml000066400000000000000000000047741426100523600203410ustar00rootroot00000000000000name: deploy on: # Trigger this workflow when the "main" workflow has completed successfully # https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows#workflow_run workflow_run: workflows: - main branches: - master types: - completed jobs: deploy: if: github.ref == 'refs/heads/master' # https://github.blog/changelog/2020-12-15-github-actions-environments-environment-protection-rules-and-environment-secrets-beta/ # https://docs.github.com/en/free-pro-team@latest/actions/reference/environments environment: deploy_aws env: AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} WAMP_PRIVATE_KEY: ${{ secrets.WAMP_PRIVATE_KEY }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#adding-a-system-path - name: Set environment run: | echo "${HOME}/.local/bin" >> $GITHUB_PATH echo BUILD_DATE=`date -u +"%Y-%m-%d"` >> $GITHUB_ENV echo ZLMDB_VCS_REF=`git rev-parse --short ${GITHUB_SHA}` >> $GITHUB_ENV echo ZLMDB_BUILD_ID=$(date --utc +%Y%m%d)-$(git rev-parse --short ${GITHUB_SHA}) >> $GITHUB_ENV echo ZLMDB_VERSION=$(grep -E '^(__version__)' ./zlmdb/_version.py | cut -d ' ' -f3 | sed -e 's|[u"'\'']||g') >> $GITHUB_ENV # - name: Set environment - 2 # run: | # echo ZLMDB_VCS_REF=`git --git-dir="./.git" rev-list -n 1 v${ZLMDB_VERSION} --abbrev-commit` >> $GITHUB_ENV - name: Install OS package dependencies run: | sudo apt update sudo apt install build-essential libssl-dev libffi-dev libunwind-dev \ libreadline-dev zlib1g-dev libbz2-dev libsqlite3-dev libncurses5-dev \ libsnappy-dev - name: Set up Python 3.x uses: actions/setup-python@v2 with: python-version: '3.x' architecture: 'x64' - name: Install Python package dependencies run: | python -m pip install --upgrade pip pip install -r requirements-dev.txt - name: Deploy (build, package and upload) run: | ./deploy.sh zlmdb-22.6.1/.github/workflows/main.yml000066400000000000000000000064461426100523600177670ustar00rootroot00000000000000# GitHub actions for zLMDB CI/CD # https://github.com/crossbario/zlmdb/actions # # See also: # # * https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions # * https://github.com/actions/starter-workflows/blob/main/ci/python-package.yml # name: main on: push: branches: - master pull_request: branches: - master jobs: check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install OS package dependencies run: | sudo apt update sudo apt install libenchant-dev libbz2-dev libsnappy-dev libunwind-dev - name: Set up Python 3.x uses: actions/setup-python@v2 with: python-version: '3.x' architecture: 'x64' - name: Install Python package dependencies run: | python -m pip install --upgrade pip pip install -r requirements-dev.txt - name: Run Flake8 run: tox -c tox.ini -e flake8 - name: Run Yapf run: tox -c tox.ini -e yapf - name: Run MyPy run: tox -c tox.ini -e mypy test: env: CB_FULLTESTS: 1 runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest] # os: [ubuntu-latest, macos-latest, windows-latest] # https://github.com/actions/setup-python#specifying-a-pypy-version python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7', 'pypy-3.8'] # https://github.blog/changelog/2020-04-15-github-actions-new-workflow-features/ # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepscontinue-on-error continue-on-error: false steps: # Checkout sources - uses: actions/checkout@v2 # Install OS packages, as we install Python packages from source: # libenchant-dev: needed for pyenchant, needed for sphinx-spellcheck # libbz2-dev, libsnappy-dev: needed for compression # libunwind-dev: needed for vmprof - name: Install OS package dependencies run: | sudo apt update sudo apt install build-essential libssl-dev libffi-dev libunwind-dev \ libreadline-dev zlib1g-dev libbz2-dev libsqlite3-dev libncurses5-dev \ libsnappy-dev # Use this Python # https://github.com/actions/setup-python/blob/main/README.md - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install Python package dependencies run: | python -m pip install -U pip pip install -U -r requirements-dev.txt - name: Install zLMDB run: | pip install . - name: Run unit tests (PyTest) run: | tox -c tox.ini docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install OS package dependencies run: | sudo apt update sudo apt install libenchant-dev libbz2-dev libsnappy-dev libunwind-dev - name: Set up Python 3.x uses: actions/setup-python@v2 with: python-version: '3.x' architecture: 'x64' - name: Install Python package dependencies run: | python -m pip install --upgrade pip pip install -r requirements-dev.txt - name: Run Sphinx run: tox -c tox.ini -e sphinx zlmdb-22.6.1/.gitignore000066400000000000000000000022741426100523600147060ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .idea .test* testdb __pycache__/* *.pyc zlmdb-22.6.1/.gitmodules000066400000000000000000000001441426100523600150650ustar00rootroot00000000000000[submodule "flatbuffers"] path = deps/flatbuffers url = https://github.com/google/flatbuffers.git zlmdb-22.6.1/LICENSE000066400000000000000000000020731426100523600137200ustar00rootroot00000000000000MIT License Copyright (c), Crossbar.io Technologies GmbH Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. zlmdb-22.6.1/MANIFEST.in000066400000000000000000000005321426100523600144470ustar00rootroot00000000000000include CONTRIBUTING.rst include HISTORY.rst include LICENSE include README.rst recursive-include tests * recursive-exclude * __pycache__ recursive-exclude * *.py[co] recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif recursive-include zlmdb/flatbuffers *.fbs *.bfbs include requirements.txt include requirements-dev.txt zlmdb-22.6.1/Makefile000066400000000000000000000066341426100523600143620ustar00rootroot00000000000000.PHONY: clean clean-docs clean-test clean-pyc clean-build docs help .DEFAULT_GOAL := help define BROWSER_PYSCRIPT import os, webbrowser, sys try: from urllib import pathname2url except: from urllib.request import pathname2url webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) endef export BROWSER_PYSCRIPT define PRINT_HELP_PYSCRIPT import re, sys for line in sys.stdin: match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) if match: target, help = match.groups() print("%-20s %s" % (target, help)) endef export PRINT_HELP_PYSCRIPT BROWSER := python -c "$$BROWSER_PYSCRIPT" help: @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) clean: clean_build clean_pyc clean_test clean_docs ## remove all build, test, coverage, Python artifacts and docs clean_docs: rm -fr docs/_build clean_build: ## remove build artifacts rm -fr build/ rm -fr dist/ rm -fr .eggs/ find . -name '*.egg-info' -exec rm -fr {} + find . -name '*.egg' -exec rm -f {} + clean_pyc: ## remove Python file artifacts find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -fr {} + clean_test: ## remove test and coverage artifacts rm -fr .tox/ rm -f .coverage rm -f .coverage.* rm -fr htmlcov/ rm -fr .pytest_cache -rm -rf .test* -rm -rf .mypy_cache lint: ## check style with flake8 flake8 zlmdb tests test_single: clear && pytest -v -s zlmdb/tests/test_basic.py test_pmaps: clear && pytest -v -s zlmdb/tests/test_pmaps.py test_indexes: clear && pytest -v -s zlmdb/tests/test_pmap_indexes.py test_select: clear && pytest -v -s zlmdb/tests/test_select.py # # test ZLMDB high level API # test_zdb: test-zdb-etcd test-zdb-df test-zdb-dyn test_zdb_etcd: python tests/zdb/test_zdb_etcd.py test_zdb_df: python tests/zdb/test_zdb_df.py test_zdb_dyn: python tests/zdb/test_zdb_dyn.py test_zdb_fbs: python tests/zdb/test_zdb_fbs.py test_quick: pytest test: tox -e py36,flake8,coverage,mypy,yapf,sphinx test_all: tox coverage: ## check code coverage quickly with the default Python #coverage run --source zlmdb -m pytest coverage run --source zlmdb --omit="zlmdb/flatbuffer/reflection/*,zlmdb/flatbuffer/demo/*,zlmdb/tests/*" -m pytest -v -s zlmdb coverage report -m coverage html $(BROWSER) htmlcov/index.html docs: ## generate Sphinx HTML documentation, including API docs sphinx-build -b html ./docs ./docs/_build $(BROWSER) docs/_build/index.html dist: clean ## builds source and wheel package python setup.py sdist bdist_wheel ls -la dist unzip -l dist/zlmdb-*-py2.py3-none-any.whl # publish to PyPI publish: dist twine upload dist/* install: -pip uninstall -y pytest_asyncio # remove the broken shit -pip uninstall -y pytest_cov # remove the broken shit pip install -e . pip install -r requirements-dev.txt yapf: yapf --version yapf -rd --style=yapf.ini --exclude="zlmdb/flatbuffers/*" --exclude="zlmdb/tests/MNodeLog.py" zlmdb # auto-format code - WARNING: this my change files, in-place! autoformat: yapf -ri --style=yapf.ini --exclude="zlmdb/flatbuffers/*" zlmdb FLATC=/usr/local/bin/flatc # git submodule update --init --recursive # git submodule update --remote --merge # git submodule foreach git pull update_flatbuffers: rm -rf ./flatbuffers cp -R deps/flatbuffers/python/flatbuffers . generate_flatbuffers_reflection: $(FLATC) --python -o zlmdb/flatbuffers/ deps/flatbuffers/reflection/reflection.fbs zlmdb-22.6.1/README.rst000066400000000000000000000017051426100523600144030ustar00rootroot00000000000000Introduction to zLMDB ===================== .. image:: https://img.shields.io/pypi/v/zlmdb.svg :target: https://pypi.python.org/pypi/zlmdb :alt: PyPI .. image:: https://github.com/crossbario/zlmdb/workflows/main/badge.svg :target: https://github.com/crossbario/zlmdb/actions?query=workflow%3Amain :alt: Build .. image:: https://readthedocs.org/projects/zlmdb/badge/?version=latest :target: https://zlmdb.readthedocs.io/en/latest/?badge=latest :alt: Documentation .. image:: https://github.com/crossbario/zlmdb/workflows/deploy/badge.svg :target: https://github.com/crossbario/zlmdb/actions?query=workflow%3Adeploy :alt: Deploy Object-relational in-memory database layer based on LMDB: * High-performance (see below) * Supports multiple serializers (JSON, CBOR, Pickle, Flatbuffers) * Supports export/import from/to Apache Arrow * Support native Numpy arrays and Pandas data frames * Automatic indexes * Free software (MIT license) zlmdb-22.6.1/deploy.sh000077500000000000000000000045231426100523600145500ustar00rootroot00000000000000#!/bin/bash set +o verbose -o errexit # AWS_DEFAULT_REGION : must be set in CI build context! # AWS_S3_BUCKET_NAME : must be set in CI build context! # AWS_ACCESS_KEY_ID : must be set in CI build context! # AWS_SECRET_ACCESS_KEY : must be set in CI build context! # WAMP_PRIVATE_KEY : must be set in CI build context! echo 'AWS env vars (should be 4):' env | grep AWS_ | wc -l echo 'WAMP_PRIVATE_KEY env var (should be 1):' env | grep WAMP_PRIVATE_KEY | wc -l # set up awscli package echo 'installing aws tools ..' pip install awscli wheel which aws aws --version # build python source dist and wheels echo 'building package ..' python setup.py sdist bdist_wheel --universal ls -la ./dist # upload to S3: https://s3.eu-central-1.amazonaws.com/crossbarbuilder/wheels/ echo 'uploading package ..' # aws s3 cp --recursive ./dist s3://${AWS_S3_BUCKET_NAME}/wheels aws s3 rm s3://${AWS_S3_BUCKET_NAME}/wheels/zlmdb-${ZLMDB_VERSION}-py2.py3-none-any.whl aws s3 rm s3://${AWS_S3_BUCKET_NAME}/wheels/zlmdb-latest-py2.py3-none-any.whl aws s3 cp --acl public-read ./dist/zlmdb-${ZLMDB_VERSION}-py2.py3-none-any.whl s3://${AWS_S3_BUCKET_NAME}/wheels/zlmdb-${ZLMDB_VERSION}-py2.py3-none-any.whl aws s3 cp --acl public-read ./dist/zlmdb-${ZLMDB_VERSION}-py2.py3-none-any.whl s3://${AWS_S3_BUCKET_NAME}/wheels/zlmdb-latest-py2.py3-none-any.whl #aws s3api copy-object --acl public-read \ # --copy-source wheels/zlmdb-${ZLMDB_VERSION}-py2.py3-none-any.whl --bucket ${AWS_S3_BUCKET_NAME} \ # --key wheels/zlmdb-latest-py2.py3-none-any.whl aws s3 ls ${AWS_S3_BUCKET_NAME}/wheels/zlmdb- # tell crossbar-builder about this new wheel push # get 'wamp' command, always with latest autobahn master pip install -q -I https://github.com/crossbario/autobahn-python/archive/master.zip#egg=autobahn[twisted,serialization,encryption] # use 'wamp' to notify crossbar-builder wamp --max-failures 3 \ --authid wheel_pusher \ --url ws://office2dmz.crossbario.com:8008/ \ --realm webhook call builder.wheel_pushed \ --keyword name zlmdb \ --keyword publish true echo '' echo 'package uploaded to:' echo '' echo ' https://crossbarbuilder.s3.eu-central-1.amazonaws.com/wheels/zlmdb-'${ZLMDB_VERSION}'-py2.py3-none-any.whl' echo ' https://crossbarbuilder.s3.eu-central-1.amazonaws.com/wheels/zlmdb-latest-py2.py3-none-any.whl' echo '' zlmdb-22.6.1/deps/000077500000000000000000000000001426100523600136445ustar00rootroot00000000000000zlmdb-22.6.1/deps/flatbuffers/000077500000000000000000000000001426100523600161475ustar00rootroot00000000000000zlmdb-22.6.1/docs/000077500000000000000000000000001426100523600136415ustar00rootroot00000000000000zlmdb-22.6.1/docs/Makefile000066400000000000000000000011361426100523600153020ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = python -msphinx SPHINXPROJ = zlmdb SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) zlmdb-22.6.1/docs/_static/000077500000000000000000000000001426100523600152675ustar00rootroot00000000000000zlmdb-22.6.1/docs/_static/performance_test1.ods000066400000000000000000000412361426100523600214250ustar00rootroot00000000000000PKAMl9..mimetypeapplication/vnd.oasis.opendocument.spreadsheetPKAM<}oThumbnails/thumbnail.pngPNG  IHDR)uPLTE   !%.&5) &36%%&(%,5/0(-4<7,'<2(42325::43959<94=;=*7F7P/AT:AJ:FV=Ne=RgD,D6)F:5H?CJA;TB/VF9cM;fR>DBBCDKEMOJDBIFLMKKHKTJRXULESNYSRK]PD\RJYWYHVgI\qTYdP\q_`PNbjNbyZbjXgw_shVGd\Xq[Gx_ZiaZvbNvfXgfgacqcjscm{imsimyopojqzukeqnpxqkvwxYj\qbmdtbucyjtitlxlzgyqwtzt}ywz}yhlyv|ssyz|jVr]oaxh}t{hykt{ryr{v{}ÛĦǧӵŰҬ׺ʳӲٶճۺԸػԺڽ۹ı˴ʸȸĽӼ¿ȷ˳ҼBaIDATx TWnR[QZhUim H ڂӌGRXď@XтXT Чt9Fj$. `Ԥ 1M;Aߑ9d2s?7w;w}^kmb_ܵmo5㶞vrNJWqߝ{S팺sS[2zi/q֊]sUS?+-k9Ч?8ot"7h60Ϝ4@!g&\`[Ag>C P=#r\[@c"8ыYd7.|tAkCPv >dQ~ mjth@ҩ5,02,zQ3/:l٣c-L;!rYQ8J p A/M9 9Q[Ǻ{E8@r :iRn*H6Ͱ%vF?rVWFcSV9ßӾ -`:9r+udoJ:?vg(F˗}g[x\wwg4zUBȖVx|DSK#eQ{7fK+(.st8Vg2<_Z%y[f_$?T"T_5]czsK-vF+fu']xQ$#mx6cS*s{ˇ/*ϑT9g}W0}f[dg?Jw;#f)l2#ZV,@y`>Jc C%pd!9̓.9gusVn{VJkѕS;V*W"~SJ s,?T㊚sLWJj.KJAG|V:8Ue]:AR:ޅ]*eDTv"TQ2|RU Y!1É> @f!!XkS5C!6,Akh PY;hO"Wةr$JP`D_D5!* g$⾷HB$.G;rbUD|7h ="`D (nμcZ7!hB]惿Zo[OMO9G/_Z|3э؏؏؏؏؏^t}:tLN'2j.?>ؠn I>o3s4KG_\rTLzfڮ,*6 \=QH = gJ=yn|kfZ:Y2nt]eHu9̸V4dk`fP tLY!q64f=PmLkށbinN~a&YdElbf-73x mh] %Y${CyradUBJ+!*D)lzwuU? * 'gv֭EQ +}%9V^o9 s~H*>p(1$yfOlggX屙k\'6ޞE$p4zzas#{\҈V`*&e.Kc6 .lk7kGAmKaQ Mz*^wڅYsfɒazE_QK.p>:22Dꎮ%=AP2DApru k ,ʗĎf&G&'K,LUH"mC0b'!0T7yi̍})d*=FEV;;Vm=. uٟ2Muq`) UQ†"۟С hwjg)dI/ߨٗ\bHRzB<Wن|~"J)Ь;MM,  Sv^_UD|$PBIݨL?ω7o񅴘_%wJ .#ęUހtf&DtjR#h*(%k+ߎֻTeq5I]zIgMQ?i1?5/nݻRw0~&Eb~~~~IYPydȆ~422LO[g<03Sm2|*KeYUl> dc` t!YFS䬁{\?x.8G*:f.bd9t;~s}]nt=tDBzh(h?cYm<"ŘY٘uo+~&HtS dIy\T& 7Qۥ`gY /mak ?k F!邀3ⷳ} z*5vj!MDΐ=tϙt\wyШYN`^ Ҥ _d=CȌ!he?cqzk yGZ$4`ٱ9gjFI,kЩ5`2eL+2"ľ^!D/͌ 8^+D3”DQ:7:3Q-}sttoblG.,!J!CF<4N󱪧W)6μֽnbPmQ [/= gI>‡U<ҝ\7V?cOއߋ/Թ{TLA!h*gR'z&mRn9c5s5|f4T͆׬7i KPr,ϓtwL}與:jG\$ uB5plFVwku6t)gtլ ,k'6-9L:Q]/QsJ3yჵ=ىF";CQ,p߷S  ΐM,Ӈ<^?L-Q}yB hG'-wѕ?O%G{k*y8}"|OT?O&Ol(UrJ$i M;7먥~mdeMMh>\y6cU.VvJ^9pZ8󤣅Vo:?؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏؏YZ IENDB`PKAM settings.xmlZr8}߯Hu6Iv ߸ɶȒ˒1WbV%dVtKpcVR$.J,|~TOOȅq /0ȹ.8a[)HX w+4d'Vy=VYbD淒yX)$hVTrqt7ե NUZEQ*1fLt5kTn՛KqJBee`526bP 0i(U("¥|S| !.| Cu'[_@X@ăKMZˈV g%㑈ƃ!KS=;M oD P9!qh4 q>o+FBVCn=S6Bxzp|}"ađ;e_s9Z5{ܝV?F*͖w59 2R%ӈNAm}m18`Sv%OVۡC@*bx;$fC'tLb/b//Z7G:|܈baK+>e8tLOm" `v$NQ3]>ҧ) <|9ESL=hL,R뒪^]g9x 0=N6QbrA(9wc1"X̀U7%oӺ.AR<OȲ3M~Te`,ִM,IwbWހ6kKČq|ה'0O H7#7b,uçíFm DdOTL ^a,J N =a3 B,A0"C b8|fF@)Z٣W&λuȊ-夜[ҹ~d ho{}$uLYTGuQ_|8rІ 0D^=([{ 3p7йhk3Ŝv|mX ֈoD6ח5D@(Skj jZW:V]sEJo2cwbKCZag:t]OTM2sJnjHzEizjC[ImFWt)-lYs8~޵a/Vyյ+#%+}éQ/˖}0>LrFl#tNM%a>Q%8ScztﴍL~>dp'Z^6ܖL[ F}c=NT¡>F>.|Xkm۲%ڻ kX_h͚?UmC8oȩ˴F`ڵ,7lmɴ }H5i͆=>=3+,Akw쳢.%yQAA&oHފo~WT7ng\iahp[H82qdiY#U| )ۦK̔]8z9"0e:uVu bq&mM{?޿;KUWEfmyJiU.:55,Y=:$jߋoH8pra.^~.: gX^K=28f^|aۆI5IVPF {w]O ؖ)x>nfqYJg3"P" N3TV{ZKCߘtR zZ@0@|ޒ,}PS'ƫcGIOϦŶ C5SLnAY !PGIqs]b!ݫݡ 2 q͚drZ|W'ZG8G@(v(:9{Ĉ><__Z<*K0ej &SihwoxMuQe]EG[Na+%.4)vlLV  mMj6Q!0ڊΗ CB1ubߑXcxAJ-89a  ~5yi`X0l١ayða--o1;6rH77ph 'X9')J0g[<vO{9#贷kb /9Zc>q6G6);1)+<6D4%LP5] ;9bҪ'R;`!nJ2`%&Ft dM8m.;NgO ]\KOWK;d6}ye蚡|Cp,A_AEF3(r"I!c[HPFQ򫣄3Lv|b8Njm)_8R)鑝HҖYM4Gk)k#fO5cb1.7W4}{:W$-q5}B/Jz]R+6ʎdu,:*Ъ] :'T鼂NtN_A :gN8P(GsS3`(bsCyhu'#q HZVv EKN." ]QYqmVc;z-U #d&ȑ;w:@5k?8]aξܷ{oV0AI|1l)ͧ=@ڏO%l[|%bXb6<չra=]q/ow?<'9۫"YLV7ȂUrd!|E` ,#TfFh٣s-/ ŧ<2"vncws|شf ҳϣ+珜򞯖7*vBc-4l7᧖7SG԰ 7g2gD4tlԼ^ 3JO$S^-@ ÑS^]y;RF!5\C0b2IoB߲m[-뭁woė[xJE͵ր=7n=9PoTͦ^ઢ|X;`2\: jV9ߦ<ٱw*xyM'Y,nI}sP NWޤܯ^hKAo6QܳN_?6<7/y9WF9Yu5/Zvkby]?PK#Jr6PKAM styles.xmlZnܶS Z@v다E{NI{Zl(Q )>y$"WJb'@&A t^^ߕ,%BR^]*RkH֎us+)(%^c =!oPfAhkQpd(ˣ uA'?[f,P ^Y ˸8> ƫlddq]уο$, dɣ?VS_쁡UΙA$OXdeH<)Gfꎛ}y 4S wX\Hh69L,kֵr{Crgk7yyzWOO452ZTHF6wykcBͨ3ge0U^1}wn,r֢ɣLܱĤG]-HOSw )xU1|M]&h``zB߄꾵:WqRVLhO< }6)}%@v9~acHҮ bNfSbQ#9Eaу~x0Q"0 nq" xAԬ춧ymypWt]$x_mgg*tD8'*O i{t0O$DN?Xs4YۍKBw9ͩ )V|ǩb~F[=!e4\lWi];~\dzO"AuS.qbtN$' BwV]c7np_j>b]zK&ÜG&G>$=k$da\$F1\ -XV }8ˇYkБ6^lV/6/>bkxǢ' Geul@7-. PKqr"PKAMObject 1/content.xmlYI6W "ɶ Ib蕦h % $dʖ9- 0ɷ-t֯)gOñ?HyLi;o9|ew;Is|HI<3;}Dq$2)d%WRGF=zb[ˬikh_!vcN}5-egɼ9RƊ3ק^< B$VV.?fbF2qPҦDiZפn]V1鍈cG76 q=a?aH[r > "KB{i]~yefj̝F~vO'A9$Lj*eR${t)%BQ",@_A@3Хnҭ|ry\Re`|-Lx(K5+ %W֎ao\UƔA]ȹ~fx{$0a CLq)AG%(/iM4YWOzN}v#1% qO) (A QqovpJ|mI*qPEFh=l~&L  ;dQX5aw} OoL0xy1LpcӽzջY:n^=qؙҾj/]LoH=Q*4[ 9IO"_p^ܦ#9# G#VG/soQкk[_Ʀb6Vhqj#'z[h/f֫Iy@?_/&sF%K'R@"=EO*^AnPKn"PPKAMObject 1/styles.xmlK 9KYc,+)۳LM $SA # KAPYJ//c#w=FdbP U_[rtXpe?%7;'VLKiEa 72-W4 ?Y<Ѧ=ҏեCس.Th$1hZjP?.ږ`< ~(kN'ˁµ*`ϬG-2nT'7ezru|Ezc=,Cҏ<}G\G8{3*ْhq9v/Lk釞b[v9H>.;ǥmo[geصk> >|^pVmTp3KW߫J2S nǖkMTYy{Loj- .Y5 ZWkLa =NL CtPKPPKAMObject 1/meta.xmlAO0~ RwX/x[}cUhI)cQdAk=UpB*3hDHˌ<Rs8(\ը]X+KnXHg57EZ[7 ꩅiƝsgF5{/%$IW'T+t -0`bJ \s QQq=ѥu a𤰿:ıH`$1/Q-N[|?+LZѷ`|؂oj:j;lөyfa4墕Uޟ0DCYw´,KzTXtRaw profile type exifxU @CSŖcr(A_X2nyt&@] !L=wCk7i^LfXr&,32(=D,RF iTXtXML:com.adobe.xmp :&sBITO IDATxu`'u^()P@qww.ǐ1`lp@"uo~䒦m.i~*I{}.\N|`&@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x,`Oba5BofAa/d˙>=g>]gZ,a O=ޑZɫq)G[v0(oӚzAcǠ>~+{!I?oS}Sl]WӰy}u ` ы9I89M-c>{1Kpc3笺[wGя.X;s#׉av8ho#^OQaA|;u.RԮ-H.q;F?#xbv =Ά)vD(*cSD6U mXkBs%YvһXrɼK۰3ז8$p)֩ڡ4JI{Ȉ5FA% ѷq=Տg}_PD𴦓yo~ׁ}^}uoGtWŖٻLvþ[wՇstUe{!B!Ks$̏G|p$8?xY>Zf3-W^hMv=9iǘ5sFɿ>3ݡ6t9{l/qʯ IFDkݨl}*4{ɹY/6cn׍Un1e_x7\AFD9/3XefmrsMݸ2#Q: ;[ڪz WU5<{D Q1~k8ՄJ ?|v!~8m;dGu,޹h5Ky1ֽD'/0\ νt*@ 4X%1^&2)9 ]8kD`Ͽ,b]W\eU*N:7O N2֦uGzo鉗4,Uws졭)"#N13Ҧ,7ϐog2D6cM˳ߌ,8AϾT2n7::bQqiz>WJbO-a{~{JC7]-19!^Jz{AK8xgX. 6 .*N:+'?Vd~]׈s8j@NMӰkrYͣ|p]_ 54_|wkg57Nq9VeG,10f{wPͯqm2\$x7A.OP3IC@&^]ܳWlt D^ 2J'&inDLM^^[y2MLkBW{qfƋ=]N㠑 s,8ɉ9ebS\v=u:M L-7\ԴGkxT50eۛ"TZ۽i cl:7VFZ6iQRچWRjlRX1j`cbGgiOqi}/&{tгJ>ySL 1sqa)%.Ļ۠%o ߎ`Km͚NoDQk6w4Eo*-Z֯X܍2>F:EQ]swcji 1G?ͅ bBTwG$+~aq6,h;b"V$>֭WD=tF??z[DDeԭP܍Rx<1Jbm2|A\AҾ.7߉""<]3؃:rSu.㕧}>ju/ȩt݆UKzcxN7yqbdzSwoAZ*2kJ`{';cCS/ղײWqU|](#Û;3ٻܽ?[%~zamBHRb];{󍐈62/klX6hAt++'0b~Sm<Mhaҵk.T4Um26u;lyIy#н糿 &QK)Ny򨚮DD6!w:},q"ykDcU_v2^Y^'!"J8}Ӕ7ͳ EM}Щ[D~Tm|_9$O}i?/iZ-H,KuY@]sDd|;f_٪ϪDPnhGˮQkkv §wbsfU/(J|eCV~F=[9D\nʁlR\}+W?mF>=1ͮ5L#SQԉm>e˳i;m1yqi}w.uvg es)F΅80z6C ,YvBM󭟤/uTcոMOT[.|oꊬ}۝ҼSޡLM&n4AAp$A,K93GNKn$ݹg S̸qS(󻲟MCDGҽ߹vHCe,V5&8"'Wh]$\G CDv!Èe~/siZx']3ͳiۺ§Eyi) hMU}92ˣ^#"*rwV{Bo rtt ZUajOR~)M{Dcg,26D6Ul>T|JYhN~ ,STfhKZA9wM52هb˼Tβ< sl;Ivq"K>9K~<&Tbn@_0@, UVR/L͝t  <Չ}?z}rF>TeyJf-Ӈjy6Ϯe[f*fYdSX 1sqa)]YǺ*Ǿ&1*GN~VyJ4yک~L5D,~gKF'9kY]5>DtawJS_3>5jιDQ٧)O';A,6v'Gx+Ժ{3UצiwL,fWQUF=}M~ș_fvkoeڼBCَN?GeyXiDQ[ǵ?7妚Zb׬Cרbym9Ծ c$Y!ƢWw;fBmvn'sM 8jIM"" ~#?%_5BGdBbc?h=ƣzӿyu~lp JcN\(}o>aD5'65>1rVڇ-&9FMoWR;T:i~uQ^|:v>FлWiۥMhrv{k\QkشD飽d5,UߜsYEK{rV)k#F6lrIWԯKYC]O=uQG2 P[>|m)nzFDTU>^'V>0V&>N3ܺN\B9lOiRKtn8hewNTӫa+_ΰk8G B4d~Ldvam˔19UY3}b%}87[ @pQ8iY1K,ReL})3SѦTH(ZՕoQQ:{*۾R2R&O$"8oOCN(o{:è;)D%Z6+ŮzX] w͇Ub}QAO h=i#n̓V>=hd̰ SOݺ{.G2˯iX6i{xMݟ-`3 9TNڔ5)ym i# cY ]ܵʳ|oH7S|`1{@Sa_S8q'YWxxஔzZ.;5r%" ?r'YQ2TUٺ$3ki$/|3'S$΍:_RW8kVӠk<*7R?6lXɩ)+Yv8 w3N?Ŭ8b)e 0 UW.JOϔK_tOMD$(켋it+۽ơH'KYBWh%R\I|,&<$~siG|S EK:jk[2^ASˉ^%2?T 5BvXm_7[]١[۷mݴzr㓏̏I~K)(qcT X4,LA(ǽ} fSkBטN͔䴑 C8y_?#&[UF8'CRc҉uh`K֤7H́M_8Ll;@m{jkoSHeVR9,U޺+?&^+d [nXɩS'*V̶҇V7iusEZd姘uSB:ŰPCREJID&V('WV1$JHVB嶺?qq!eHW.&.oYLxdyRxG.ʮF.e')NWok]ևȄbx]zyCn3'7>>q!yTlѣ#$ea_}PaBt)q԰VS?|^=GM4M ]s5D l6&Dqz(g/ÏKDnz)lSljH""vD۰}E)NDD$p[""*ֲgUg.V`]ŃV}Xtt3q.\FYYGm۔15U{&*V̴҇7Y 9$-2S:)FkB邠ȡD7Hka?VSe$E[G]9]ɎHLD$KU'1];oڱej}=󂜗[f-!ΎRk_wz̨}+nhfHe-'U`͊/i0cN\5-`6a9;&~:0ǯኸ7:SZ#F.I&T=ш6Sn-lI&zszNi["YCDDvp%"r,צOQI}ZelwO"laTV(|uČ юH\2eLNUOT礳2b:߹o@,0YgQؒ e.p11( )nJ9y~%6MN X|!\d:JՈxeM쏜Bٳeo{ ܈-Sź\-fIoԨ:rsϒT{k {E8TѲPU*ףۆ}_qM-y]ȻFl;0'3Gf z:n9."zrA^DߗUVSD.UԱtNBO?HP[@OSlHZ~oTVXbb68;u#X[lXɩ%#'*V҇sYqwF.K@aS(S GaK7e 0ĹSOEW0V÷qQ~o@eƧ)'0 }e)a՗Y+)yoafM]u)\,[ep:y6W&>}$K榧*V=4Q/UTO;F^qݙ R KaVyڵzNX[fލCKYߦ._#[g6_9OK*5g~!cϳ(DDۅSUH*DD;G³Huj ߖ$L IDATέEgOW(;%[OeSUX[lXɩjS*&*V҇sYqwF.K@aS(S Gj(tAPY64SFMҟN~,'ޡumW>*JZ%+;.<&KǧJW)ICTW & VTO1ߏN~Oі&Oʃ]c|d֫N{9lvɫ]C^z FqKR~ |#;GV}z,z,8[[oԢׅkdk&""@qLxiqDK߉IDDM:VT}q}`X@"KsH)Cvi[G' mX-Bxg7l 6j6eʘf]+Y}^ p;G,3yNd.N1  ?BA.fMn f{W>䫻rVL㍘ȩLS=jIK1ZV*{>IӖY*7dHYɳc-K.e4{ykSԺe 5[G|CPf JVQ_lWV<{ne-k"ׅ|ld Q jRyϋ|-1yNd.N1 &BAQ.fܹMUK6vzmklEUb 7a%\w3Mhzel_8fp5Yu8f!lUl/ϸTe"ܚ}6 b~~ZmIS&vQ'm־Z֣7ӌ2rVwP M>\ID΋5Cwgs6(*O2 ]n,FEB@6ueS.:\YJ!=0filYjxCR;fQ6K9_k>(o$5짼` Lqع;*cE VOnmYh^mi:s-py,STel&*z?2}8gEa:9,2<2hBj(OIOOgOOu˩{32D) WAw}?=LVu`FA.ry2MӦWa$`LM"|]0'=ٸk(GVbDm=;<*?qIGz#^'~{r^"ie3*{&Ҹwj;9Veyаw% Sng}jI<ě{ "J25b1a[Ggev\N}G]B$1[׶Kw)9mʨqêaYVq)C'fl'7U.\bv~{ՉU^[0SvRQ㭱2 MkZ'9QVSsOɖaU#X~74Q+,9 Qz18UY3MgGjޠ>jXh3BXUS>kk؅bNoՕ'D.|s`rgq+Өa<˔15UE'*?}8g7OOgч]똟XS80Pm&]_߈[oO~Ҕ% v#'L?r`e֮;47]f|NugGճXuv_;Vl;tY_MۯEE^~Ĭ?TPQ`Q3bk(f%vTEm8in Mw^_ZDŽb\zCw7 ڮ[!fZqݦ;y$կioџ պN oVRfڰ,q#$fCbWentۃ,n6U.ϯ֔UBBjV z&M_k LkE,Wg,Iꌓ7[G5~q2hX g`ptӇo$"u|%i='͘:B+fe&UobBu֩uhUe ߁-S !s/P_lZ3qgR  v}/5(LSUOsbGgiYj}WيYXSt80hHC x1K.˥ih$W x^ѻ_yx#?ykb)1'UA=*xnږ/7qX.D. >YE1K.^Vۡ0|SDZ W̒eFUwkv]KXMH{E},0'fݰq#ZFt~"M;zB^XY嵅"Ptγ0Y$ʼn)?ԣWGbr eڻU]Yv}&<^tg1v)i_(TK[l7P˫U*|r,?tӓקGr^γl~}dE';w Q(sOdt:-گ4vKtgUpp`c2&*g1Q1-Ӈ,iEacfdHs 1sqaER]?@r:OrIHO ۍ\&˿M&g~䏫E\'#+nPyM;.2_]q]_[4r Q~@(Pk&;4DDIu/&̉D]-M̌=q@хqm3{+~ػBIofu!3lg'.=U +wsUKC%Qֵ]-48%Qʿtlɕ0qo~˰.$qU6!}Cq!@хbX{trϑåDD/|x_wjNh.i3OOɋm`Ε{{o炉=W.*p mmb#v~^W`YwϮ_|-=1+(u\V^, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x, 7P@1 x,eߙZJ .N[FK \ڬqFxf@ 8v9FDe-ז7b9:>lh*:_K̝< VB@ MǐG[xuY@ 8t>( (fh?ˈ:懱=[ s0*0e[rc;zek=qަ 4nv%?mّ4BAg_5eC:FlX!nYԮJpppFӮeX㾥^RǟIb^isڎ^};9侤ɿq2}CkJhHޭi@DC/sܾ[/KCcʶH<18O&+"87sdZ #po""Wp,5%~`Z54 jZ#V)0hD.##r^,q4 I[xim9#})+͓oojIFiw '+{j ܦ?\L̻֗#K#RAu]$Ө26V~9i_QL;MC!X o^`ron=5Q$3}Gѻӛuyx Eg i;qϖGﰵ[yi^pgWuu Ӊ Rߴo%2>Dݿ|G)3SɇK<BV5q&# zk"Gf!k'K^խ&6 75eaQmFlԭTYŽ+7_e>4k;do@R\37EDugy&io[d3”KÙwnſӦnYo츨'wzB$0[e P -y|&Թ濎}"{cZMODDԱ~b҄6yEWj)x0Z$ ,\?ɒ/lsyh2dp ;G@driUj)0SgwWvvf./4' KdO7/u#2ۖ+DOlI wpg*AcְyZeܚUϼiتJY~M6\}8_9^VbhХŢ;VIW~\_ilB.f2\Jq۶|.feߝĜ}:PŪֻd<X]  |(WُVQl~ 7-G8/~W#aY햁ú Ögf"E ,H`Ew[kKwFMyſ'%]eȺsϓ"tC8[`Ж$9o\FNr MZ|8"KvMS#jV5d_lF_:ȉXXHtN2kޛn׷C|B7R 2ị|=En̊qbA}Ö9F\.~{YCZ *[ޫ\.z.3]lbNb|U\bJB]GLq ɑ3Zg,YVfHԧN֨rkʍ{N^|ykd;9~14%EcrY?i^WqDŽMrfm5hͪz:z2/r_cX`Mu{FfK=Q-;(LY7'3k'26)=umn1+lGL.]tc]J͇;%&&g*bODr8b>Ra?49:ѳsfaWx&|Cf/,15;Xlimzv hur⏆6DD53TOZKb3mQۇBX-Gu?bmQm*H"X-n&qi~$Ҟ=q:fY.-$Ly;,t K>u8bVkL#.iZqo38i @a=[r3SpW<>*u͸)D-xiZ&aO>[sЩ-m'B<>b,~g3_=3<3+bZסs*|uXVlYr˥uWb:,.C]WLM-YbhEqY?:hoZ.UK??ԋg%2&&S#/u%m|%2eZ_SEcWJY15n3C.:d@_\ƻ,=rM_:t:jdRlhP1Kju`'sq4ve4Vͻzo[$1똓_asg|{96OIb̖N.DDW681RvW!yuLNSWdP~#)TsWD ǘ]g99Y >`G'V؅;60U;  IDATi|gw}]b@1 tZI2O&_p&Iavރ93ul ,UUB͊mG.ܸqĶ?PZyOIկ S^OfoWOT">߭S\rqHㅬ*/{%bR̒%m&&Dx5n6ߏ|&O0-T\u+^^y[u Q1zM.g?^yl+vƽN9yxoVUҪ,CYjsGKxk :UdtV::xTAYXoצ(-;o=Ϟ?w|8S/#δRDD%rj\M \(؟Vޭ+PI\鐅",ӢExzrRf/߸c K¬š-XW.g\v'w,p~"ȤX䥮U[Yj|H/5%&E+1j۾7Voi'ɸ SĭE~L!L[4/N0BڷfH6jse2Ha۴_qqhLʣM%q|?`'""&S]hlYmg7T@ȽD2'_mW~?qo }I3^%K>;VPJt[WN\6})" ԡ8',SP(1'zn{%uC1W^}wW1eA},v>YajVT6DDwjO@ܮՖҔCmlL+fҟsnˬl*սl݂u9u=d~VymS7b1:)mmDx<E/7anŤKu+ϓD/2w5tdcƝJ4dُI?lnexM9/T4cqRslG?*ocv>\.=?.2Z2,q|_3`̜OgMБK/Ŋ-E1Y$,~g1lVLǣmLzUNԲz՚s7IHXLk @1 @ӌ$<,Vh1`ʼ.?A ?ɗaL9~pQM: nXs0S̒wsQ\X\DDNvYTe_-×5fe۪v1s!iܮ7i;s2)QzlYw{lݤn*Wɻ5 w -fStѽ7nT1r;FffxIKZ3i+ض9a.,OKzlm֨_dbjk(ѓ̞;+LYѾos@Ofq֤#`,U]5?ITo  bM_]C &Ƥg Y` g#1G#ШSF8?nmizR׎V|7s7OsJDdonxJPև (Wa#e"O?7HCVslu/_m7wBZ$LR}zPsa aO=% 5I;~YeAY4/5zet> eNE/W3 l,x^<:ͫ779ҘLEYkb\|˪ &"ru8Q&7L5r\(x/ųl[lѼGu;:U[ݐV͛\K0)fDʜB5AP7eP#3攒ZJkʻoɣe Z̤_-Rz _c V&fJM_}C5iђ~~"W0^wݦ/6[\NӜԞn֓pq_Z*WUˌK CYh6L`efn%y݉FjTtãD_?&ĬRd!*V:yH7O#syq RSogYj8r֐|s_& )S妼l~ƣ7DDݲKE' dYP_X&"-?x'oMV*'KˀqW2.$4}ei7~5 s e* yY{se^eSUo%Z̴F˸l Zi Zw@?q2KN^WՑGV]qdjL~Y7kNF|ó:Wr?Xܮ.~&*F5L6ઌ~ل{gSsWTuqJ&KMe%g8eqۚ( D9HveJH,E4MIN4wryqEnLWv%`[exSVfHPՀrϥI9YY1oe؈w:?1D$=>Pl!IFZ2Q2AyRg̰2DD/6l~\%]y"fzs;+^A1vݏ=8ɟ>&2)[A˕D['@-5M1&)hON"$T #ǹёi_~$}A_46Հ2u pwݸʧ?93V_EfE9oO!"۷!Ygƣb,Й5,;%Kq\8ke{4mOOEd[ VoLKqRNDB3Wٹ`8[@Dd&0J /u_H|uۣV51l~{чӈȥݪ}s0YK.yk#h#TL# ;U1Ң9/(z˺?ȅd'WQ!jpӘz/^qwBX}{A ! مFݫ҉D'"7iŗ Q*8-M$'˲S2...˓! ?p}7K%\+y`d/@s&Ĺёi[vNN ~m5'r϶("ʣUu40bY_nkw8ĉᗏؽuS%DMTeM[{ulS"Y&k|Z<9뺔mZٺ |{Vņ=g&*[4n4^{ŀcAE)"HR~?f),KY|?!s3we2UUU\ִuI"$O0ZjktfJ2eWw[Abc_EO\@ޞMDTmW 'Քh Բ". $"tC'i7SDb29_S]*qbyT.oWRiTkl23D2bϞB6D1=I!O^ ;}> 3N5% 33eO,e&g C :/ǟܵa6 q TE& JD4Ž6:v;K;!TmS%{9W_%D*Sqt ٭Λձue~{/n /c;4.KDD%deKz3@<ԕGu齍⼏ΤG?5ix*ږΞ?,}#8/NDD5R;܀Ddޱ^9eY|pTsÙlҳ/"7t줻.VU\sϞ@'_egߦz*2J>g%\v9L3(*h8't7$"z{Z(_j ww/:󆈈 pTթڂ+,aav3PX'JveZ* ~j.͙؊x!&3ajT*ZZh}VWMO2AN@Ԩ (*-sIaIO13HQ Q "2޴]i; > [b/Ù$"m{{ hWk<0QU|2mL)QVv$|=!uUcF3?ۚ({\{KC,Q-3SN aR"ccSX%INJX-JveZ ~]uNȌ{ލ5TU+Uؓ =xUyuxnlPYƌ7׃dWtLd|!*,BAV-.Z^tW_XY{tYE,Ig!"!GNM-fjaQ={^j?<E$6laDΚv=v%h3'?qܦ $q%jEy>Q]$vzɰ4ZދiB_|ӂa&bvFPW.KƮW#"wf߉p"ⴝҿb^-y.C1OGBDb‹:-ԃ]'{Qy6ޙɎB;~+N4HӐHӑ딧^ŒϷ0E x?FNjOCKM^LTd'Әju޵iyOff߱Ǟk@nNg&ǣoM%&./176ԚCͶXfX2FO>YSP!ԋsFf֫vc"UvECVjBƏ8҇in}?mCDDN hʲfԹ/> 0TܢRhfMn JI!kD&И}y6TL*^AaFfNYM¡vҷcgCMLZk20G5'Jv|,OU3ϯiļw`ϵn>Td,<*+W}K2_,^i7>>d:#weU6Kٓ3bi;^FiJ*시 t|2l+yzW@kNU١GXN'2T_HP(AH8Tm>5k~micthhVmu 6A}6sMӥ1qobC?sc\[|Щ/B?VLYolYIoG=_̢:CfoF]ZO"aƿswY()젆;Oܽzt+3 +5=msN/=zӻV'"_]^to^g:Oŋ;e*A窌̜;3^geQ3D:pt9sWn޼rЦyC rZ .zp_Y?0Nm#W>y}A.> $Q ե}ʪ B IDATڳͤ3F+TU\D_8<+ vkؑw5A{T6*V$)IU$3í!tE+VZOZwm B%kǾ}i[3Fkd=a.k3D.(;*b{CF2kſ_lp7ܸw9^,XyToW>_/NQn7q+ܶeB.V^6Vp'go3;gtwt;}Q'ԫ1gvzxgw&2O崼eދLr0 Y=>4Lgwlԭ}b91M1o9;๶{oS%kyngSs\]-N0, DbR7TU,vsanFeHR[_Zֻ-e~%Dq9!1jG%9)j !ԋsF tk|(>\g%봘+?f `:·cgYmp6[[L#,.J-Y!G'%v7V9sYpzvG"#ڠŸ]Oe6*V$)~(08&U_̒H$Y!'&6)IC$RM;r͑f_ߣƏ|U",Y4lR7|n;_F W 4XyToWN_6/2G-/,X|Ok7G|Y\O@W oXL{l5swJcqdΚf9q\;uK_Y qmʠN b&~UYD"L~uze*k`~=~Y%p?nlle',I3fbt1Kٽגy%9 nԔ]\߮Q_D߷`DGg; =~#Tߚ8+12>x޾qqD_߅y^}IE;c-uuTEŹj#Sf#|R We>(#_th\,wgUϥէ'*YOiw?ܥeYx΢l(gJӬeRF8+3cξH߮I^ [Q,ɉ}vfնYic̒EF %ﶔd$D=[9XHial߸9;}By*l}ȝl݃Rxc:9ZcTU%5LwܳY5cyˡb$l7'@1 ˆLlC0G9eG#qe 9?xc3<]B.WfI5Q򨲥- b3-W,fЬ9x1[v )Wg۷[2&mMDTg#|&A.$HFˣJ~b7cmDD_.>%7RnMDr|#ˆsGIu񭫚8'=hk"դCR|dTVMX3rDDdbg\V#ɨnyTi3BPpۮY %#3-q vz I5QH3B0o8"桑vc/NjL.}H9o{uqD,]fa?7E\8_#.%mU>s|Y^1!=4tĞ|%e&;hGw\?S8蚙1?pۥ %꣦obXˬwv6 +BR|dTՊnyWf̿@}C4/)]u\XR"|On(~:׹sn%汫W>@%WQ91h|GvJy5c_\] 4sGJ̘v^璙Qs1:8[k$sY$~wp{6zۜ(SI`FxGl`*Jp×3,_ߜP gUou,~2 E =S  DY!Uy|;|";7լ}K陋; @_[Z]83DzZ r#">,]3Յ""}۴M~^4]{'7p+-aV7'^",S3=d(,L~#sQ_|μ'"26B}/UԮґLg-0)I8N;.j0󘌸K'e-Jj G "ʺ˜ b$(@e1m\t?QTx!>K:gN"uP{͊(*mKR|ϺIDq5TLٶeK͝tv#yTj6FDDςsc: ;7З3&piŬWr'44%φQ.)X7yD\eh,oq.Jƛ8d)gƼr[^&|?3ˬãٿjoIQx"TVOs`^=Le6Ls}Ql7%D`NDD),&ATrNf""Ƀŋ/?Kxv.:AD`,ׂoQ/S/q=6VE]r:snp"Y{mD)467e㟛kkؠxΏ %""z5 8ƂN̆BSE8pP,~ !"1qz(A =FVǖlp8mjMJI ~|""&uqо[=۶oo^|}Oyn8:>jfa֠uĜjH@fUNuXrLf~z)&isZ(z/0@eU# &ۍE޹vj~Ϳ[BB~- \'\=\*˚}rBPvUh{"t_#L(X;an +Όq̴jZuҢE}_M5r`O;/Ld2,reo3Oêwa{7w,$ɂ$?>8`۔cÚtsejNIヿth<9TY͐]!֑dlXۥע/b ]V%X[e :.t5C>K4‚}>X1[85;[=5l٪׹sV:Sv쒫9zYC""痆5 P|,՝ܙzN}GSڧ7_D"n:$I 5~yY UP֫yW~EggXOuKdFMaӖaÚeF{z R% :Z,KʅLշ%"^|!&=ޱ%kHq3neGiZ8& Fu1gW1+;fC-dlm=\%hM/rVU [{ҟF*@W?˸8J*0];qM*b쒫9.Iv{0̹L0- ܑk;y,Q} Ƣ~rr/ʳ}Fэ. !gIP,C.1eA]gJě8+Y:q~ Nm_pΜ+7 }g/\q&K"YЙӸ!q=wF6 {ZzևGW@2"gf'Ί}wjɃ;.9o &}3aiuT,]1-sv|ļ  ̤gޛ'1Oj#N~ν8&N?y'\(93K}K.ˡNZDD#8vUbc mF߰ࡿ,ؽ6i5ѓ.wL}]N;S&śPc?ffAD|n§kǻ[qhswpv,4dfdddddd dMݝΖ<#R>YЙ,8D[iд—DUޮ7Yʇsoj93ncD o2\Y+fI$7b~d1Y<{K*$#9u;6'Q)RwN+f)PvɥliIDD=ĉp @Kerb%Zn :N_ixY (Qa+N WQM01W])N=7J,L~3|)]4NJuRbVO9gi[8u›S2S2cX;iD2?:V~f^(n?02NcM]#xЖ, ڵHD}s>Q˵cׇQ`ʩ}K.U /t;Oln|S, N?ڿ`G= ""Cu?Fq҈Ȧ s9ZҞ}D$ lSd^\ D+.KD$z8\ě=?Ot&'VǰNjDD96KRY?HlɕNj]g'R܎u!m%y@z zR_1n|Ϩ'QƉ[>]ԘD2y,:tVFfw L;_ZwM=6}d*I/;kI&Nݚir8ZE/pYD\:HDD(9qZ浦/22NLF7SDb2u.,ꚠ ҫ[A%:oPT-(,LTklGBDəb"gf E ,@YBnLH7Aɵ icw~&[6]x>t_10ߦH% 9g&.c%9mmDQ6^e#Qunޣ)Fzi@e"&0k&kiL4t3R<a' ,wG8JDT}泏[ZrDD8te"" r\w""j2k6r'7㈈tZh&Nu&N>G;/}쪳gC}9 ">\(iD3oQDDkimPi'jo9٭бVK-8׍.ɪFkeTiӮ%yA'i ak̤,ǢVXyPhYi,gќp42ЬWar "" w3du;JDv{fuRν~eODD|*Dg]m6^M!mm5$޲?^8adgI0{LpuʿmɒI 2j:U(T:zsQSaݶ3>XF4'y0jAZҲiՒ3.Z[d)JXY{t)Qep}y*# ; U~vv/mLDĽ4}} ѫӡ1S({f̓sx:yJ/iӅU,e TY!~9DDJ#w[-CF3|-|jODDF}fp,;MiHDshŘU Iʝ[8BcCܘpxaO:0L6hP6IbeDqWV1">CJ,LǪL^z2`H2w""}z9TNteK:n=L J[3wyaZ/HzqhRSi;d|R.ZrMJuɯ.a{b]5fgـ3kMk-jeAȍ{X(œud]:ȕ25uK^]r)8x;fg>A\0K%];k݁y_&N>܌O-1{e>.Ѧqԯky`zq٭jGxit7| 7_xD]Քiw:),m {AٱǪ{ƚ8nd NiT34E ($$l-߼w*up$yj.o߳0Ba.W'uScmۭwNm9̴ȏ Ncmuۓӝ \::9a`\ϭ}F 5;?Ϭ:s]L ܃,;D|bf,uhb?8,x~Ms\m:H~铇yc/|xrmU.F<(1Dv'5◺INx،zHDle@e\E8#F#ßMsl,|qއ_COo>} iZOՏ}#ۥ/H Wzxoxc!Q^;44:}""m׵~XIvKMx#{8| 9uqSwվy U#CU">;S :uأu=k}QJ;W~s.ފwYQ~Sz;gF&^\WdAgiP$"7I~vh`*y}A FU-iaPu+B|:5b~H%{4)d9;<[57G,z Gn'"3$DD<913 2]Ksp"|ℊlYm-?Tfe\ BNTV ,2wZ2jZ=|0Q=WK$soм)ϔz8;G>-#u9{B!;3wmYP݆[EF12igoGFH(O{HF̋t,Z8~ 7_w1p8Z-{_y? 'oSllKSP,3T(B1 Q[& **#n}=M& ˖¤ǻZ |"@lj@֞2K"H$ℓN3lRi-wCzf(W.˨YXb d I>+wZMWğ }v4Q㎇fK[v' *N)bs˚n4Js*U)Դ*rī˘skۖ kXKRb_AD{}xJ[O? 2kh]=Q79:=ߗ1""k^-&B#e1ޢa KCMAzܧgDDQ>k9]O59_xݐ^H zvm2їzDui &p2w3zUZv]_q x-]7Gv^%5VwAe1]KAJDɭzeS:wc@%r9^&@~93׹d把s1:8[k$sY$~wp{6z䴔¢a7?T%<]Veu.-,C8{1 H.F5ۭyy? L}LEsr`&}3HWMF}yʟ9sA*jYwLOPyOw+zW#y2D.sCzѱuk),x´Gm1`lNgܝh8vH,y~g֐ێ0J`3!lltA""7 WK)Kֺh2uَ\W%Ixi(;yJբ[ |3 b }R,Yf.13+'pU#igX{(N9lwR|flt<|PWMJFNa׷2(,B@Mŀ1gJH\/k-B7\*X)dZZ_CvxH+Cqr&v܍t?/%70-(P \+G`vg(DDd21qk)y~q)ךPoza^7fDq\}=eYl8US]OGXd(V3IVPg84۝p_?Nü`F~w"%z'=3mJvr*?%7,/`n@OV4:||0s*z _ܙ b%@'3O@46,>[ ڹB^R>17\rwuaO1A|2Jpr\fbe ݶQwCP g.P(1vŬ[D[Vu(hs#3\X{%œ&Y~=+ΥQi{f57(nCs&ڵ9O#e\3P!ʝ>L-陋;FXCϔmqDZl`cP1 \+,T(A»pfd]߳G5GD}t-XEt%~Q.\ӓl&wԟ M> teum` z/ VAENRCɳVX \&QZ5c@Bč""4[/qzKRل_=L 8*k0K'YRT8FHjrUi~IR|ϺIDpgc̓6/'""2wi[ϪW.Ǖm*8+}!z51~&Vhڶ-X4 xoO '"f]^rcz ;7v5Va!wBq=2#>YDz]j DP3IVPb˃GyeFnIN9oYVOsa^=,r]a軏bK~aGs_]C#,@Eune;9q8G۸Z?.>p?F;rnp"Y{mD4nv;-"_Xh*nȍ8_^i{lC-%ʆbE@=l Y5':b :ս'DD.3CJqLڛ032ή՘&;JS0r WgO:JfʆbŞ@=TajjVİOA/Nܳwi_eY-ܛmUג(R*:3KCVlHyVST*dKd.4jz שymSNFtgBDC?xĄ]K/G)1W3 .]q'?9ݪaE+f7@$r9^fЊs呄nl'SSϮy9%{O_ϜYa@'=qv:C]y}z Cv(%e"'7oD5[OwP:+jSPU)q۲#)oG{MGV_Iϳ} Lx+ǟhn٤ˬ۟/ұB*@f~^쯭~8HWQnOr"}j'WԽ8^*K}{]%X_b޻L8Ư#WŬ"^P3SPTH$jXqḁY:±[zv&)!/> }bqvv?>&"3dкa%dG} D Q_X ;O''vy8,빵oݨ #;Osgu]s~Kٝ_wp͘s]Oݎ=;qw:vts&TH6vi1!̌v]a77w܍I/t.8׋5#Zl6$$٤-r#oӉ7xDD{O<̳e+} 7Ó+xȌ?}(/i+JcُDDFx Ќxs]ofwyK_ٻˀ(7%`' `*q*vW8<;[ [7bv]؅Y~^3#g~b. ;ijirBm8ҵj1&.z W;1ɀŎ}Z2'sL}u5/nά'{3ڬ6؛ͩ^vdգGF]_]~TM9dzw,Q ϦTS0/)V@JJw(~4Ǧ3/dGb!Q3j=y)Lv5-^7Zp?EGf1 pvtP:~B =O6/͉B˷î$hAml (s ̿B<3xQxO;8%Ș` ޔ=Bdqobtӑi'f@]܋q)9_N+'VUc?⦜̒싛˽%Ju(~48:u)75]W]Ԯ{o>kBK/o]ץ7"~a[E~{tt5LsjUd߭Oq(aSb%KN;G̽ri4z ?MedVjiP⨪>~'>|oNMlEGʱ ~( ^<^bQEM{3M{Ҡȥ'^][ڸ|zZ|=FunVD#n]mWC?_Dƥx<֟ca{6_}shxm-\qSN+FR, 2Pi@Ym|{^9㥏?C!Rק_ja@BY 9_8P fT>L CȠW#lIPH'{y+hXC! 4CPf@0 Y4`( @i J,Pf@0 Y4~`Vfjs5_|-p4]J)@ f p fP~~οK@a -8Sc΋,nELYk~߂ ՍkVD6@dq&=n:#4Mk6v <vS'pi)J&5z`ݯYLlo'M ,f̎of5Lj`kԲAI|ǧ\쪡HVu%ENwvu9RäfnW IDqYiʤG%R/əo`p8F՛vp`|!Ǎyo@fwWr{ĂK[o[ZOUk4ہ9H RQn JuYN)~Ζ<;N5kִoVK|6:it\@% YPvLaFOk j~] _CkN~( ^(Unc#"Z/ч\ʊZ=/kg]WU V}vI ir"//r7LK$mN<_ΕGXCyݷΟy173{*#f!IT*N喀˟& )knFR?MD]S>BX=i֟u,tXObӈH)])?լ:Zqt6-,lEıiE"o{2gvݶJ‡;'Iw;u͐;Py<'t웙DD<٥71:5kzM,>C7

A&hSTCRQ~ bj-}-EN23ܜV[ɥ4#~Rǰ/j.*Ui&"Z)4ZwnZ$Lbnk&[Ԟ/;eqAپq/% mDJ)^>7 7|71s"*2ɗ=g 12ijoOÉU$7rÏ L[b0z:2]teIcIʅ.ckZosF M}~򉀈H5MR>``3"K{bTr8?gvKufv, 'Ks_{_?N4Azo$>6(n9>>lTWsR1?_Vk0G{,9@9=YZiqi☚b8Z?Ü=Sw&-Z*<{G˼[./no"<^ ٍnꎚZ(~rBn}ajk 'K{ڡۀihUKWOviV~lD27y_(j?M׬TŅq ?eS)_]Ž w]?v, W҈Ƞ[[\.FzrvNBؓs۟۾˚ Lw2|ʁ.l(it8?DmWaX>ƾ ck({2'5vw>\ 줍P_KXƓUHɂ Zl54c&w ҃VG.;&kxAl|!NƢjYa9v3$d% 978uFYh1v."fsмv9[$_/OM( HpW"Vqɷ+JW#p*[s冐Oe6MOg󦞽%S2b>Z-wNyO=|1kw]*(QQ8o(Y/Xr_z: ͖QIO+?ڹѕm[Aîw9ED6..ne✈O1v.viTsnGH1 ,kq`̄@m뾛=x͏74w[Գӗ<豫>AI䮸&wή*p3^ZH׎gƉlַwf]wG}X;~VZrB4kuMWQ "8J;2p]VO +vw){x ]H3cVqZ.E#b-R<4@p3RSSSSS3[*|qGZȿՈT~*?nm۴0i9xΚ޽qXZDxPgv̰ɈfP\<.HZ^̦wGaONo* PmĀemj,}XbF; #~e2\Xե܎~3L$Cda^-%ݶ.~K |%#U`dV=N zxZ8_`0 戫(}6(k/?г_;9in^9EW :Q•bdVG!Y:no)#_m+#[\w>X"Uv(JC%p (M>Wd'ܐMHxtqGpn-tT\ٌK!ׄk}{P\V,y09Kt}-받="R.NW`Q$62 xJQAɎl`` VߞӐ.۰vs%PQ)Ļ]3+=u;i+2~'| μ6q؟ nt.燐֖@ Yrc[aeDDǴ7/nbN؃s \ֵ=s׽k?-Xh_W(3 7m %"޸$1}Vv "ʺ`HP?Ҽw↱^7_W_Z;';sh=Q?W1mMˮXĈ$aUul[70ʑN% E)5MAbxy_٥qȸ1Ս_ê23rtW!"/Q uUL~p~M""M?"Wb&rL4.Q?-)"TL9}jӾ-"=!:ϩ_+]3=,"ҩJGc"1o1pzCD2ͤD=u gξ+}v/,8h( X 8q2yM[@봹&CyEfhqo6zV9ZZ-~W&[Ѕ*Q'r[ 2Ģ)5Y/iғՀˎ dcM*(#!C@$4("ZZn?%z.Vb&rL2Cr·zU IDATĨ7w{dEs"y"?Z""a_X{Vu5bZ6oע4zXvjU]M9 M=6l HMIyϡoo[ d G'_JOm""&o_9s~7,ō}EmjgND7#CҘ<^4ցHBlJrco_}'/GzZxU?ϔ]s%E$ 1Xlጩ"oGU 2r(^(\aRҲ;8f`\%wȐ!C|܈5[3P!"ӏsU'"zL?.qvYޘ:uY('d?׭1&f)NQUwW5 o %"Rsbzzl6Ka`GC(?ěmj^}䮹r VվymKmU-K$>x`MqùŠ6f*DDY7ui߹~2BIr&NUՉHջooMЋW_ڵnPF%n~.DDI7OJq@Nӟq?73QkطQͣA2O.vZDD~ؾ-_yګѲOږڈTb&rLtlG[ , _ȑ#GΆku&IDqw~tHgv?W[vn ^VaQ|x8@%hP=q%dT Kdս|3Cne:*Ud>]bkZoݔ ѹ?u;@epE]씏Ǯ}لLy~a6pŽr;#s_x>:IYramN.5*~ED7L2$M^zWcXyi=JefFfF4em߉.ecw`I}q܊yDpʨ$瘺OKDeg筅;7'"ߧzYi1J'""]jwfAw+f(\am1nr/h2 `0&zT}Zώ]Wv2v'dN[^@ADkNͳĝMl$eP2 FX=h-ڃ}s-ź{  Pdku|}}}}'mx. @r^nԕYMK_a#Cq~GhmǼYpgyT/IM*VéP_=7nvf: +HLrvBن#6oV|X\ns7GފIN}vuEs\\"e-'Dmݴ}8g L*YӮ>Cwj A. K^;joOx:GvՏ # ֚(3x6^̽!2|aG_ (WXFz~ ^u"WLr:?LS,a~1Xu K;U35c f1 /p/v>+:%:Ar6hn޻ɝoe,|h$J"]kz mŚ+M6n[Y vg&[ XsU}mNK}a=רϔmغyqk&?ZP(y<./SNUn8{? w/mm& ac̋8fSaVH:IK&b U-{4\W)6*|qϿ>Io[/H;áv" ?j ~j]_u:<%9/|{SݑdIkN4j{W,:R1|mƔs%6o\9Ϸ(?4[*dE(æ}E3Fj!~_e<4sꆢ:m|fz\<ͻhAC̒Zʕ>PONfqKyfM{WUtī$;*"53 ޾;55u'8Vl8>9~`0sk'tkQDC8LΩ C h! ?]s >ymޓ֜zTPѝ؈v4w;mհ 4̒*VI^H0aA[Զ6H޵ˡ'Ĝ耽7^e==?Rx:7!co]mW 5FJђGa op!t+okr/}9 !vP)e}ȬY 9_8PS  @B ,ʇIa{tjС'{y+hXC! 4CPf@0 Y4`( @i J,Pf@0 Y4~`VYv)EE7A-mt9r0]8E`0f{{3ݼ}!TeGF`c;¦ڴ̂N~AZklm3Ҙb~˓+&v7Qp8 jMF.>(P~Aӳ 6U>ɐwFVuURV}vILzn5f,w \2 B~M}lz!r1(SkNݷ.ZMyίLpҩ9q7d}Cr癸?Rhe<_T7ωnLūME_[ư8C{C?˲˪>E%MyD )q(Ӧb\]:õAԋ`6v[wٿ{Q/}(1AݿxyNp|4FQ{MbCey_ 01յ)u4q:bӋ+l=aiuFڸ',',ޛC]h}<ؓ8aQf9T sռ >хνJ%֧GK,~Z,[7:KU9=#5Vj?he2jٻb_Bqj"xsyPAgã^?l:GN=62 ?vT$KîW k&YP#z?堁;JI?dSͰC7wEvx9"wM=,Ef<]6B=z}ۀZl|=͹Iz.R)Yta1kFJwg%UڄID8VS/+" K=X?ZDDНçIEtZcĽ{K{1 Ttc%NV(4C]k)W1GU*|aٶDDB@F^\u~*TSQi6]K#{S{9񂒝Ť\FmVJhDKYZEMĞ :M{ha"bޞ\29QM>O;0#ᘎ54pLM'"x8h7]ΆR/u3^ܐJOxq|εL5Mk9i<@?8.{#H~}sUMI~N{M6[س[FjRsUᓵ5oʭvJF`_X\];XE} ')n;x* yL3xgz=(~YeҮE“T=Z nƞ, 7*[1Dd夆&Ǫ 'KQB>H&"G6?Vg_G*[) ]s= g)&>72햞ׂ!vŹ&DDo~7R|ǰw+-1/7.nZ+k8zԚ/_RH#($ ٟ4'wav^رi_*ⷪ OcgD:I{xb}p9<R͐'?9}U5d(rh~x$ƬT.6ѱEm(3^(zyZ+4j ]mh. 5+q9§#tw' 8~XU'"ƋԷ,6JJ۩{}]ɏlgUˡYk@Ƌz~Qn&%lMXojz*z±#Sij 0gT⦚ 'T|]^qEMTuҍ>ķَ0j*LlMD=cUm$+QRJgfe-.d9m|_׺U Ub?mw\<ӡ5&"Ҭ??xlRvM&qm+"*fRIF%Qu1ׯ^e}?1_Vf;;rޮ;'{IIe ]fXMIyУ˚w }Z7i) |K0֎-9lɎX}b]*U4QOFnwtNo%ie\†rNd{R;UK/]é:'ZaV:oeEaF' g N$EKfI,AW$/$^c;r9Qij$߮H76ʮ_j kfU^jjM;O\_67ÉR\ *7^wZ st~rIbG""m~No>c:DDל VQa !̦SO$HEQ䎞[_~4 ,!(;.?Faك$V$&EK12GDS"O.%~ZD~3O'tZ4!"-% >|cġ qndbrb٧F|yyT]im{]?rMlG{v=+wdַ#윆-=|},{771MՆ##?AS-Q+טל_ڄj{ DK&%\YԽ]o:ϽtvqSpT --8)`hѻ:c&N%PVRLNj?uw.MϷSqDD_䅔[+5{yK>g${Ot7q5 }(ăEh c4Pa"~eYe6#.~4G)1յĖsUTJ(\q'.IDNCۊۜbvL̈xΝֆI\ /9}>yxSq-n:̴vcCŝcI`zII "OaAL Tl|&/c͚zb;LƜ^&T8kQOsR_Vg΀_h,Za+ϼyN)2(&;p c16 HINFJRR"+9KM|۴ki!дug3rfDZ\0pz[Uշc?InllU @INˌsgm@cmӱe6*`x([i>hT7lCDuseϖ^RFS3k#>63%eIDATۧw9YW6 r[Z5Yʠ_ (s0˺Ӱ6RhoU;ޖ]#h +lQ9L楥0Do"c ;YnWl1S X/4U!UaLt.%!1%ZX2rsѶ?^?oۭggIZu}سǡܷ-Tnfr0W1k7Uuћ2GV+Od>>""^;n[OHbgqԴ &[ =:2mvCS ʬ]l$*ڄ),r\p]s;,;`+и y{3$kdUVli+$9|Ajw?II(1;&N-Kk}_B9 Q Y APį"CҤs8/Ğ9z=^Pvjl۳`9B}[YeW;kƿ'qcoML% Am3C%^ũ1 .{A ^R 45Wx &<1NWEbR; /e_&ix #z~2s[c[K as6ggeda; /egfq5C}bgoe<e(h32d`F7QWidoGpQ.yk(Uqx]+B@mا3hDIz|$@ܪi֯Wܛ{qSFO?!][gsUuA½Uzo^R[U˨ ɭs>XuS\?+@װo+ "Ge!h/0%D8m&XԳ>7fpn]tmV9QUCg9 6 OTdC:;Hjˎ:kmܧ{ĩC PֶD%Ecw7?1I}q܊cy }Tk 'ͷ}~4Vc> n SD稶ӓʔv~^ O3Z3ofF4%gocaAr{״Ր㘵YtٿFJu/'""]j(5vN𶁣$ـyYuv?osOX[q$ҭDDdս]wFSRTl,h/g7̲ wEWdZ3=GDZ6e\ؠr@vǣo7-fmB]۳K·e-sE]w&I|ZmϦ*b>ԉ?ue'[eG^Y×8XfW_ tldF>qg}:W~y (g)1}#}s=AʬXgqwhHQQ`@s%a2.{$ޝPz^Bs{WuF_~GOyQ{)[܅>jk~{JHy0(u7V=T-}.yۅO% ä?&䴝F̜S.z;a2߬q͝^co+֬]h꠶EVo3q[ZiU0 /숪 TZk^eHuF,Qk#c&2wgqYG!òw 0;IU:Mb&$6n1n2Yg 9x=Nl)O\`0soIao4qE0 /v >>.(Mѝ]M3{Ӷ\ wrc2f j? S2""hٷ0cհaum\oTm^۹5^}m@ HۼSQxST4lzƩԳN}zmMY{U?}jj"aTE;ʏ0 @wtܵ^F:D8T*L6VE.*!A썝72իYE.*̗[^$"ZPB0 xjز`"ju~Y4`( @i J,Pf@0 Y4`( @i J,Pf@0 Y4`( @i J,Pf@0 Y4`( ߎ `Cf!ؐYl,6dE/IENDB`zlmdb-22.6.1/docs/_static/performance_test2.png000066400000000000000000000357561426100523600214370ustar00rootroot00000000000000PNG  IHDR]T)bKGD pHYs-z8tIME 4'r4 IDATxy\TUa_AQ1EEw@-H3|\#+M3˲ehb**,3̽?Xyի9̹]f8x"r\@. E"r\@. E"rP5߉1})1v> *̘'Zu \mԭ}08$tc#mH\_kaLn5%uTP__07pn0 0wd7h6tW}M2[ Q۰XhthZiN8 ni%ݱ泷{ܿt铞zIu7Lz~FzLXۮo~Wx54S/%a`{i{ծBrg!?Ot#"҇'j#bz=JXYӦ4N 6|!Gv%AW KLٲ5'rCLߟk98ևsbʛUY-F)_UKS79{Y{?ZڞD]qlbܓXz- |Jrʮ4it#Һ%Eى_mEjƱ< ,pne~(ߎh|86kPS]P՝<>YjFZ>_Ef"Wuʘ^dY%j+vvK5>y>ec;8?xŤcUc6V!Gʷ0u2}pׅ7p=jx똙梼kZPЃ"*^+g.&ijly:Sz.1JbF=w h48?QCoxG v²ɄB',,aUkz}UF:b*NJ{|=֤Pֺu/=rlY71*%0mŌwgr$Ld✘ =Ho_%R)t>J: YpiO'l""BP*SO]/a:*(1qێRb kkS^ވ{Gō|J UZeʉk%;T{AHՋn\Q}x~&3Sz vԞotغ72|{<,k޻S#k}?n9=Ju.OSib - ?LlSMSajX^\jز=l| ; 8\psm7̈JSN ,^{e˷((vj+ҳK/ү>شs|wgk)/+bTE%t>0{{""XKj+-zTDBK +Ob-!|9Y׀jFL.GlQn1 e/|XwuR>_B3VwV ַsLwakP>DcVi;w!S媧xbtLdžܐS!._|巏|Ⱦ˿^V/GwuNR݋\lJJ>bۮ=r<Ԋb50KQmɐEUUēZt*|+~~evO"eD78ؼdzsOI嗕mDD%*UY^]}x7wҪFR[3RTiºr(J(onmD)s=׹Ou8oǒR-6F&`u9r~<*mI˷}o:Gmh839Mw'wnOY˔:+o_, {{`] Q#k{F_k:5lԶhC^/u2{־uum`7e#Qv0t`y߿&c`GrDij뜙:K.=_&PX\Oy5OH/^ok3t\ܟn<~#nŝB>L Z1i6czbgގi??{‘ofO?%:~(4q=Dd¨|KW=yTfnBwr=0uuLK߮bm frwwnhxgБ r~WX}MTG{Եh.eG"AJWqǩRv˞GD|6sr)8`mUmݫSK'W8+ =Z v߈"Vgz ~WoVu}Qά foBD$s02You'g0!`Df%LDvo-y6{ʈqcuaӨkdU}:gfIz>&Ddj6hG?Hq?j[xk^6eG|!s]5u>R3ꘊOE~;4yQE"r\@. \@. E"r`Xx9'8wR""6?v؎VBoքbª )@.>'h8iީ NZ*{5jU2ަ"6gWI%Uqw=%>3y t{jBexM<*#e7`]IψOR:rJ\}l3RtyfZKoS)-|'qDbVy5.JKTrd"B)+ j*ԏ |&-wLz-bWA&JȂ!"VI-̭j /6ަ(&&F.c4 ,si&kNVwcӔS$D<1.ɻfyXKoSD/1(iهXeⷾ.wA621\mE&n3=mt ټ+[* )#Db 3|S&}8c .еTkB&]7.W-݅u9th. YÍ%OuH-i E"r\@. E"r\@. #ASXKf,z!C|yl~3>=wӽ$ªM^KoSt#[Cϔ6iS":ut&nzl7juxl%q44hM+,[XWpZz1F+kuMq): Ӫ&ZzsXQ'H-df4J"LT~'JfBU\KoS5[:5nRLβ6lƑ/mJȂ!"VI-̭j ,ZKoS#˟jTT5\>* ]u㞒¸MQ&!+zIߜw$*TUiZzL9&sX?һe+QYZMv|ҩ!W̸ iUZzODY9w>x4N0uk*DLy eR(YPsD%Zz\DW0def९"#̇,k'8⩿T G |OjanU\<> [XKoSP34 n` {fM<3,'0,,+'8{8(tW ~km Z&p%|Ut-J"o/b5٧ׅ_tiP]ܺV!Gdfp-Ma ?Ϲ{|tW@s5;oϖ13]SA#EeҩH ϲGP{ F&4qˬ)0]^Nޯ|=ݒ>8:ZTÛj"6?v؎VBoքbj)Zz䢡mo¾ǿtnE7wMmkҰJ ry]7r[?v:,6OUx{aAk: dx-Mr`gX|s[b٢&s6l՞!nSIȊ^g7gy@ 9 U )@.LMza[T1ޙQdS7uZLtVtR:W̸ iUZzׯND#LT/Gފ|ټq-<}{39/|1)N7?H$2SV)W,T9Veh-MQLL\.QQQF`T23[x(fΝdf i5dO!eɤ/r}ED*# Xe'0Yh.| ZzIpcl;84ӥϐ8o\[zto8kq6~"""VS"{һi)"NᤣЭ=;Ckm Z>^章VdkH·j_zo.b"V}z]E6 lޕȭnrDfr3:0Gt ?eOoe817 2%. ?%"k,cp\wUܲQoIDATB2gJ|C3fβЄ0p?[xNjB{RexR[EzfrΛ}D,do7]w0%ʻ[Y0l4^jIz.Z`ѧ~=ݬ̄g~SA#Ezu(yG/斊)p9c FD70$σ@3ӘsۘHl:}^spȥ'c0\,M6da҈Nl':`޹o sQt0p.R>CDij^/jcEvcEsu'@ш3%z9EJ2gƥ7R1" N_C\L~Dh}])Ƽ+Lxe̜vɿt`r`}1)`JM vTȯ|4yԢkͯ,l<3-N=)23"x]yDհ0]{DC:w3S;shTΓ";aqKm*ː#{K# lg;ylL"cCMm6`-c1d!vlG+Pb7skB1W[a6 )@.%Qtr ? Y6C J6LDe?v:,6OUx{aAk: ax-MrPuח_{ˈS Y97#:D Mj3̟O!mm2 Y1K,,ؾ#!GWad5ަX\a*bBmV} ^hh*>NXu}f&;>IET~c0J^KoS5]1 ;yq_QS im3n*NSVĪ*D,LY\QPP战'ziF%+3/}M}{.ơ Ol)"BőCD2_œZ[,4>TVkmjb`` v&a aL>CI;CuX[[$13bx>kr:tލMS=F]Эwx )hGK rssss.0pʹžCp˷/Q(dDnw#250G o=)ܶG\ iJ}~k/݉Ewm%| 04+=SbVn^\2i[)ƧQbI7YsXqљy+_۔\A#EUE_l-O%IJ 2a. JKW=״N6 24Z uiS@v%ىfƘd:?RrJ#n3Ì1Eb~0c̙=8j2|@*4=Sv ~\񤶂 O3N5)Xui#-kv %V~3&sVkkm *f--"~v]ԐJ6~r&Iw|!WprrN⧽TS؄1JȂ!"VI-̭j ?5Olh-MQ@@@P N$l1x6gH7 թ1:u~"KO>jз{캡2~3 GYz768yb\B7q3ަk\,M6da҈Nl':`޹oԫCNTf)q[~uj:X-d^~mFB!w%r[t79\leҩH ϲGP{Nn4eԚNG&5exRﰤG`*v2CByt1a8޻}SK/_YoorE"@ZΆ <ܣVovw#v~"g%I3ޛ4x?9JUOG³&GzVhS7mQ  Eu_&zM+8?* @шt}jy2W3 2a.-=}c@YaE"r\@. E"&X{ZZMyO^pJ?v:,6OUx{aAk: 6ex-MrLgn2g$[IqLBVL0S3E~k/\@. E"r\@. E"r\lTl^]iصc;Z +g[ 5dp-Mrb~SO^XY?v:,6OUx{aAk: dx-Mry1mzҮy{X)$dT/ ó< c]* ^KoS-W?bS엹yi㓔N\De?J\}l3R2tRKoS-7ŢQtUU$LY\QPP战@K |!VfcfL>C97t |[|>.ی`O]B6J} vheW?b,kKTs1w߼ ؿvPoe817 2%. ?%"k,hG5s3S{ןQr6t>axTT\q||Bu{-u`HUqx6"SMEzQ A1Eh1LLr"8dS"=) F{XyȫriE(!#r+b``䇌\y<+yoR"Σ1/RtF]BE#U܌jԉ\dx"r\@. ͏];P(r5Ø WprNj@ p\@. Eh$\aE#1 𭽆z%R ctlm )ol-inUO:un|F`i̧1ttAy`WEg P5LTcZLQ[Nz;{=zLG+ˠEhɉk2E]B8GQ)w9݄198Vy7zy/lZZ*+;<^q\i/H=>}J]C)YMnlxwS1'1U%B%;o.`G)byV4 SĮur#͈LUjUi3{]r#NN^C 1mVfҡ34J~ktrqWh3c?uYTRV8=:o}թ_ 򋙽m| 78+Z;txuч$qug~>8k;{_^>;32.>m_}E*flu.H.T;IG hr Q\Xvڑ꿖dt(5GqN'.?#פo pp.08qlN 0yWwǮXq{49׆Oɋ߻~mMb͟~5u:xH~qoBs;2-Z٫L9&sX?һe+QYzMN^\_zI؊~3~pjçu GD-GMƆը!<=tvJ2/كD&΁oab~Q1K2ո,D41růzVlwm9^,#f–hQЬf[x˙'gjjhXω]fVs'㊘;wjNN^"ԓ(~Q-{Vo90 ÓxL/8Ԋ!^a TDlPPphHkK$<""og8N#ϙ;ZpDlW?R#]S؎?YZ*O;9mW΃ܴ<%mtNI~''*1#,g&T~uk~549fGx%"! gZy2碑$ncrb"$1=oU֌ޝIL.K|YE[xY̙=};wG; x*S_*h I-Mb7luC d.f(u'i[=n?tmN$2.܏Gpy(Yewb7pGxKlV<8xSɉً\zbl_;YJ2s]zV]~LYi2ˮ쇼kמm3#Ӵؼ+[*88U5c_ʯ|1d1WWEgjآ_?۫;D/zo,E#^7%"26nJ]= viŽw#}r4JSwJ|om.VVGGVe6>DĈ" }q8[9W9X1-np7麡A[ϾP\lOJ* [5QL>qg/ǩwFgY&'fo={q.o޺vl#DyR'i&'S8 -EI>tdks[˾xb?ً\8nA'Kw2g\;)wn;_BM0{G"r\@. vPx([IENDB`zlmdb-22.6.1/docs/_static/zlmdb_dbformat.ods000066400000000000000000000440621426100523600207720ustar00rootroot00000000000000PKNAMl9..mimetypeapplication/vnd.oasis.opendocument.spreadsheetPKNAM ""Thumbnails/thumbnail.pngPNG  IHDRV@PLTE    # && &6!(&6! ' %4 3 $*"7'#"$$%+%(++&!*%(.(#***$+3#,:+-2(/<)297,$:1+766.A0C'/A%4G,JURhC,B0F6'E;6S<(C?LM@)IA9YB-XG7bM9eP:BAADKTKRXXLFZRJUTTBNbARdBUkGYgEZkIUbJUjLZeJZmI]sS\iQ^rManNbwYbkScsReyWhwUi}Yfs[hsZk{bRBbUJaXOmTAlZCkZJd]Vq\GgaZveLwfVpXgggfltkrxtmgzrk|||i rls(o%w6zZm]u`ohxi}v|A~)8lzwy|GLUYegvix̂lXr]vf}v|i~{kvuz|̖̇̊˙ˣ̧ʯյȨ̨ӹʶؼ޳­Ųɵůµø˶͹ʼļоºųƹȼűɵ˻̷̸Һ{$IDATx]}\S)TKj)tjŪt d=}ڧSJ16"HAb0| qZ逅4gV/&UC޼'ܠlzOrιsr~s.*$y<~?y<~??a`n]v:obo%8uǧ諛GSϮgb`Ιh'1~r_5='+N%O3}R|/[i&{ HFX TXXuYNY[Xh*PA/.Z'1*󎑧F ҩXKfOil#[ O?djZU3s=PZ-T~>ة61>mj?j =¸Py&o"{K .-1R;yվ0=O3ۀ*Tumwƴ,O_ ew imM )+qeڔ'YHj_*eOks'DŽiy#'#kͽXٻ% ڣxmRU[Qfjml#n]~7'-;q.Csx2bg"aLVQd?aNlf N=@ğ߉_%|:'K0ʝw⫂!lno.Mv|6bhXk5fkwSjqdډ__ Ƶ ve?|)0^›Ao}OGeh/s_ZhW3*2ϿC?rvrL0~a#u7ه3<^4zD7aEfzܯq<16]Ggv:H9Lv??BMN yo/@6jra=tW> mYsF('u&Uh/2A䱞ѓGl47PNt-pN!:'ti盄Ֆ#sբ:tYrsTnc7/#qDWcsc_̠[=_3:xN BiOd#̈/]n<87v (s[)X" 0¹تaG:lJfKζϨjU O):ZVtt=&7ujyW ^Cb1YH΃319 r;-\|oL{a5ɕc-CK~> Rǜ%buy(/16665Ů 6f uU*wo=eC4WW:f(0y<~?y<~?O)曒x<~? BFJOn:Wyd #5 ݼ28ӴS?bQT"B9  cg`[f)"ߢ-2ϩZ?*FKȈL{MRefzi^Zf -OZ"cW?:*zN51@ F_ ;1IA\*7"A<,52aVՒe [TEPղ-3كFi -y<:]rp`/HLgFňW^,dCupD9⶙=pZ45 Mۿ}/.kuYi%ȅ_y{-a*Qe̢h`egJMr bSGs]jkfShVj*-5 0V$c~*)sqE_EMPb(2*VH߄Xdlf/@ss.6p}eLD*5 -n# ٛ"#0 =\|wRC>HiZ ziL.3dz. RL3!?FȷOĴL?Wi?'Ces^?$00 &_.w3׶#ppP((<5a b YP(p/aķ&dޛL~4陰Ad @iaDb#H=/̷1ԧr+brDc ,.IF_Qc6VH䕵 +j o%d2iDRc*esM@7ߨ5UM5aJ?+ꏈ9 3i.C1:2C;Yf$C{ { TC HE=_U5Uveq!y'T\?&CVߵktOZ*vD_"#הdD]~!Jd"H>YT d}o0 Rʇ;BE RbS 交T$++$yIRKF?rOX7ߨ+#cP,%%aL!@du ʆ:Bo2~C3Pg3ԙ䦺:O"*J70:9Dczy (_?ƴ봧~*i(}%% q5`{2l>?g~MsѿLo1lD!x,s#'ѓ49p1Px/+4391 1k`+c˨̺2oIӏ1a1퐳r?MɳhOu,HJI"iTf6KEnKEh#8|zr$FY0Ib$3rkXA؀>fo푏T@'՗BsE/2J .km['SFFdhPjA<@W>w?uT XM5d%!K{jK /GVYA7B T"}nF^B&GsՠgZ9SSP,@PKV 1.IR}X&-1 %6Z *! ʱB6Я'HMI RD.?D.#׃K*2B/! >Y Q+I{@ϸ}[4 &}?W+׉v}PUP(e) O!֐}a_Lr*y 02m%,3Mw'Noev9bq ؛g[a[k{ #nxqmws,f/* :gI翙#d |L\rӽw{4Jk/eh2nV"(g}ky@MT-첎<)2);;rY9|QL/u.nφGG3#-栈~A46٣MDj .KB"DCY|wWM|KLR͈9][rð#֠ ,LM)D3*ʍG!`xY)''Nzx-" 9AkGlẢLE0S xgwp.C{!|XN[ Em0|-m\HBXFzV0z L CAM!@ɭ'l?'h%02ⲳ5sl 8,YfcnڙyʴA$AiL6bQ5pIeiqܡfsA+̾KƒpP@u-]ffD~bYwAe}f| [ .v}x-m[w8A QOX֗KZC 3.lXm6ɰq;oOXرӾ>n+Vb_ӿ"F %&-nvm A>M\sE1- ?ڥ#+*P6!|1ʈ:Vfw%v>4v~/myS]=`K>U]=%i߯1煃|p^W7aSA9^'E߮z;hp? y<~?y<~?y<~?y<~?'ⷪs.+헔!@5JqkG .6s^b`Ιh'9{z  3;Ǽݗ;[Zi—1}\ME9 E0'=n 7c EekdEj_CITSc-TWĂQ ]4>Z@C#i[\ZS0s24?w$Nxk^7}}_Y1~0^?$⫂!|;V ы#3X;>݌s㆟Pb3o'>+xk"7qo?-bQpұň~?ȐO{8=q?z7gǯ';b'񡈡P୩Gj<b 8 ΢h>Z܉ uBY*&:mÍ ~5&zh5|`zq_{Ǒx$~vÍ#w'yFddDRUF1b3 =TSKOо_TFt }`7>q'w0_E ƈvP"mt_Ob&?qkqY)\/4sߵmTF۱%[x17MɃzp6NNtW!E S ʨ(rG)IA k2/b n1nU& I{]vҼ4[?~ JWÉyPS0S'b?<7N-@'Xl$XS/lȀ ૳N͌3tֻU'wH/Cpy%󵻝R g"H_֗쒦\~_5D .-::21%?=Uiehrle"&~U`ОM7|OĂRN]ΈxbT{F2g/{eszl#WD[^j~93t>SD?ǝ1[#F. ޞ]㹷 x [wYДO/YpO.HP z4V֔;nş._\V$Y` 3x+qL8T4'&vOU8@`ˁD*{/i+Ӝ )ɮ4MR7iRAoDQqՁc/9Ov=}:ُVrpΟ6݈_iy<~?y<~~`^p[IENDB`PKNAM settings.xmlZr8}߯H`$)s'qymk%%cׯ%!xBVmBէ[ݪXjC(yߤ$u=LqO_^.u"gs] qʛ($e be|)XZ#2yB!oq gT*owKJ^XUUQJ%cI.l箶Fs~~`GorBey`!e,ĐB` {WxU}#̗>y\VmfA廛ۣ #%tu9eDc,q3PA%K=; <D P>" kQh8 qC=,F<MCJ X0=hxFÐ#'+=wd?|h ;{/\;X-VM3Qs_xJoFtjnx^~*Ѹ})\<|zǦ֚Sʎ&' L/7s,QP |¤!HYJf C}=SiyrZ{И&Y$+EE9\3J0azm\1dD P2@爋+ߦp^D:ۢ㤮 l;S,0`G%lŞ6SPf4Dp$Hw)MyzdhX?lW+x!%5=J/}l' _slD!H+b,uíFշBLK *2W1%E?R \a3,΂`HTD80|^c ߆nSU&κu[~Iqu俥s|d hmnI SE;刑}360/)W mcQAۃz.`0|<߻)ߤ\ɸs|Y>Mn)Ɖ r?ͶpHW RܾBdiOۏ86I=D8D[E$ڿ ^W"* yUmjjZ;fMr 73[}|Ul נ3l:֤U& ʇJ7Tu=f&PUOO>]KKHk5.ki:U[$9d8ްkvKQ[Rה{TOZ`+%Ssۏ&J)#u`4-tN ٟa>Q ;E'8CeZwF8KNrVĺ'ӖQX,`h64qѫ5i-19V8rږdvAk Yeq )q ZeU{f 5Ѱ&cf'fE)\ppx+t5!dIRi`}IQ("EQ<Gv _= `2@M&ϟ׭?5q]lC셏n$ Zlk' 2s{LBĭiK0nӭ9zyKڵpgEnPX $o.RC_'9Y.&t1GQG&.\POQ9vyHv:fĴ>0/>I,)E9|Ր"&Hpacnf_u?=4)uSb9> ^w>J㇕]Q?o_vMT6aajt{BHU6Г]u~NQ/w/)戦6D&4Agv'H e4utuB̜LΞ#J2T*!sE!< mls{ٮCƤ38[I#܈=F|n{uQڟl`v;&B+OgEh7BDj6^㐶`2ZR=F^~i܃ը>>k/y5ICu{P3|a}+=L8B)5B8CF%pAf%;H,fOɣXQfy^\B*Ӷa'`哈 |8AƇO[R°Rc3*Cz/ t?ݤ[]`dAevq-E!I<piեx8QC:Á!W["BO"in h瓞\T>+qM) 2 jCDPn9 R?'l78K;v+˔bF*lkQG:[7 ;O<.߉;(/Aˢ@.'􇑙ϡhM٩0gtؾҼSgOѭs|=Ճp?swO5y97>|OO H ~68,NvQŔ8C"yS7) qyH|GɊxp^.J5Vw5m"n.ﲌF8j%`լ\fe6B2+,5v&ebF(&V:570+YȰo#p܆mgC*Z$ ^ v]>##]/Ro 9AOJMs 2oTr%oKV7&MdYڌM<"&kF5 S$ k@{ C@0= r~*ĄvPuQ}˯ qMH#$ Q =H}EK_|ezeUvCz`Y* CUոX5 hsTF9ԗ8ʡ vb6M|r`:tY4JG~c=eܩF@ \BZ(S̩X+R}╋)g{qWޚ?/ AAgp2agGōTFjx=) Ub7U#JFm/6hgk̊ރ|p]DpS*{W>ϭ`Јg|;[> }8ć6LqˑAt$V_KFXYE~||&j졗[ȁOj$Xb>W 8L0z/mtٖy O>-OrYH旿/k5_?Gw=7WT/`P0sQ%X 9O'M>@y/m̴.A,DʯT&:? _=J&ot $;[9NmvkTvêTtIW!1LY?vP\b/d7PKzL uPKNAMmeta.xmlOo q;uꤥRoo36 ui0?y?&zhgE`SiT-A('l5*ꭶhZo[Ј ;],cФg'AHuqc珄m62NT wj)% bFvhs4S1.4%BRza*yWr9ݡFYe*P&QLbxxzJ8- Ga]",OS9 `S*(ǪZXcg&ߤLgyb`7;_>胇qd8m9Y]w@hMᄡ(1+N o }&hzt I`̢m,Ð ?PK8PLPKNAM styles.xmlYn6S VdINn\+C힑(+% e;$;$EZ%EMZ~<<uvk7$O^{ zKU̢*ùJqGqp^ڭxb$*G.W"ZFhDRzD)*MibĶd)s*,`Ӧx¦ J%̋XV A:,Vb~?1rլ%Y\QqPqcbB`3,T~ۤW-M:jq خi2ڥk9">9*P9.0yW<ĶLqRLަF7cɮ΃ ~D`ހGZe}F\;5_ K$^:s[T8s?kղq/}j֯Q0%ƦXlyjUyN ̉BTZCTgHCCLM-aJdX5[_n|9SfN0sr& NE؋qD͕xvϒ}C|;QA e@3BTE]ZkHrOʲ('*6qj_jV3])pN9S_;&گߜ/VA |1{CqW* h5aU2Sx8J9*^y )@Vx1)e[gYv4LSAEt (O+l@Bp'ƒn*c fVh&^;U+;-hK/zSC{ٷ^# 8//$2X d=-U Ut -*Hƙ?:mêH7ˎ?c$0}XnD^*.ra$ /%V˻poc|гBC8g dR)&L`Ki2GG^,dDDkOo,t{@:jmmŷ0J'_} zg89w*.#T3C<[ u[&|8\õ(oCrnQQylR_p}V npm g\J`b=qkmX:o}#91`7k 6`t{Db\> ůc~_b {ܦbX0.UY~lW,\9;!B1R-xz"CUQ}5(5 994}Ӫ?w\rֱH1M'kۡtOrƲ 3B'tG'F7[7;n0O>'M~~zbJ!mաŃ]- 'da.0i]AB7?:pvj 2`u><_,+H>(( Tr걯~mPK 6 PKNAMConfigurations2/images/Bitmaps/PKNAMConfigurations2/popupmenu/PKNAMConfigurations2/statusbar/PKNAMConfigurations2/toolpanel/PKNAM'Configurations2/accelerator/current.xmlPKPKNAMConfigurations2/floater/PKNAMConfigurations2/menubar/PKNAMConfigurations2/toolbar/PKNAMConfigurations2/progressbar/PKNAM manifest.rdf͓n02]sUWy|Vw:6B|HwPvʣ`ySR`ZPSl%|qM#ӃXݐyajLOy|qy:(gع\kƬlYrvU/a.ԯ` PKK!EPKNAMl9..mimetypePKNAM ""TThumbnails/thumbnail.pngPKNAM " b#settings.xmlPKNAMzL u m)content.xmlPKNAM8PL5meta.xmlPKNAM 6  7styles.xmlPKNAM>Configurations2/images/Bitmaps/PKNAM.?Configurations2/popupmenu/PKNAMf?Configurations2/statusbar/PKNAM?Configurations2/toolpanel/PKNAM'?Configurations2/accelerator/current.xmlPKNAM-@Configurations2/floater/PKNAMc@Configurations2/menubar/PKNAM@Configurations2/toolbar/PKNAM@Configurations2/progressbar/PKNAMh Amanifest.rdfPKNAMK!EHBMETA-INF/manifest.xmlPKpCzlmdb-22.6.1/docs/_static/zlmdb_dbformat.pdf000066400000000000000000001267001426100523600207560ustar00rootroot00000000000000%PDF-1.4 %äüöß 2 0 obj <> stream xZK69@WLI~{<`g%%(]3 2@{\HQ 4ibel7_KĿ}ƹ1nӌL~n7~Ŀ4xh xiF?2$n.]c~4 ($i$WjGC`{h}LZS|f||j53|jl>n~ ɀ bO"(x}'9K4>2!xOQS 4 pbtM,ZEr{ k=cBxXύm~wn ~v ԥ[15U UUso'є|ʘ}e"TTf@P0f!rJsAL",)5MS㒉/\QEy `|m,׈ `K  .Bv+#K4l[Rg w9N)".JÖ#ohD'%S<ߦBae#1v ;dB򁚻gKi5튚un>-fullݒ=]Mq߬8MF[ Tvw6/ݒ`-\E"B6p0J-4R¤*+y+xeޅ;{`%~cp7+8n,f"r:>MMe#['tB"hSzƚc-{D孶l.Ք"ѻ/q;)R< l4+kɥ5""tM2so˯2 Ft{(5 CG*{1JIl;//8cEqaOAʸq"u6xT&猞wȬqm5EwV endstream endobj 3 0 obj 2241 endobj 5 0 obj <> stream x{ XT׹{f 0{-= OA* (* A@@30h#QSc0i^IiNyINGF$=)ϜU=$=݁z_\{fdxC # G޶% Ȟ#K|`wL׹?V۴*gZB d}`ϰ\xqc_$ Sހyz u&he.v]{;Xz|#"STF~pqN>aX԰:xAՉzCXxQ2EFEcbf$[dk-5-=#3+۞/p Ommr&~ˇILnRSi M?&W2 D Š,P-'%?Ior~Kl@5Vr$qG f=D.\|\nWL|)41Bx)?*?"DnkminZPxºkkUQ*gϪ(YVZR\THOKX-q&nЋ:F9 $]~>V[nl辩/cVܥɷB*+JR \A*rsj?UeA{c+UdZ^tGqUj{chuW7ssȸހE68d@3gS g^CckuUږ3aR<_3ϯUQtW99z1첇zoks8v7Y*q>ogX\Ɣ$<&xkKwE&}AXэudhh+md V#IC+:6D63Cu/G5.o4ߍ-_i&ZMm0  #S-|1Ċ{ck.Gⰷi99cna=ۧ{n5jn(OkV$&Zm&ѦH/#[pPRؐQIDg1"2axm]q@ڃ[Wtz<߁#p;l^mddU4CB'4郞b3գ]UA.[cq⚺0^('"CJm]t%[~ 7 9u3ֺ&[]c{ki`CǧU51EίKɭ4kC@ d7ls+צj+չr+$ih$ß%WUXy4x&Z۬Onn9411Nwqih "2ɼj복eأr9 硽jvMĊL۞x3s5jz+Qi!|0VJM3}QQU}WLmGm{GmM*4Z;7"I5Ac6wz:knx*WFVYaZ x\!d˫ jM7 Im`R\?wgk[hWqm6r6{&̯lsY{%k kX%b 7gTm".yMJ+֒q #Z>e9~SqX$kXvvjJUTc~VXpA 4yZ1c0c}| /ɢVEH'zı :QEb ǦN*bX.S-.<[p(8VZ O"IUVfrM.ptL8;L.rLeev{G.AқVz&4/~ t N>M]N;箽|k#/&$ԿNHHsKČ {ȪdRM5iIjje^i$R<Ǔ$!MTND9&J'\|!%Φ.g2d,A< 6ݴت/O.+^:kh#^WB)`tw̶Cw6T΍_VƍϨusXcEl8Bj?x.G;%aH?)/HE %(I*ڿBzqYw( @3d}ͿK4gLZƷY*.-y`-|%_ϏD|A.,ApN; 򣊬fN_L#PkQ%wlbd2V!ņ`A|~`H8 ] H:%I~KR%GFk%m =LOKtԂU"))@e *um&?Aq۳ 1%_B!D$ z>1A n]\2ez+qbYhaĥᒴ޸8@; YghVVSˤ7W#(RPDO8bŌ11$)P9D0EI}cHz[l3ii(YbCpAӫ&_(LQz=oO3+wݱuMi/=7&o=pӒܹ#'kcr?'PJbI⢙l:!:Z洜 s8%=\PZizr\hC2ZwCHI/2ي\%.l315/FxjPŽk=!LUs[pK̺V"|mw7 OI?XͧW'<#] >ݰZR'4y hDEl!/m"t`۱xV#o&Mh9ZĴ ڇ'O9sg^w!|/ꗍt' qulmMV">}6‘/! W.6 )άӴ`V"4XXM!l6 k2gv͆F-PQ a9WjIӦbuֶ`Tl*Z'`gtI BٹrDQ7Hy#x p6^YxNCbHwi2&\(XUd5 E,5 zw ֡e `O3AIY =8c2T{_ V3 \c=F:U/SMGXqsM$ݐ ߁h޾ OCnde: st=x<:}F-$,P>R=U@!D*G, yq͚#kk~ypc7c,vKg< އا{:/1o?O7u|}h;/SB^ | T߹r%/jhW PfI:v` ZHOOӐo9{ :Z'pB:Ez ˝[{\=0898_Cn uC+YI_sc;N'uB'Bu8 8ᄧShp yMcAl">$wMrթ &b\ SNP_dT;!6+UN;B!MpxcM3ƏNm9iue* =6 M6C^p^vR z6'Gj4Ygᒍj&G0 wP%}hs(~Vt\O2zЍ:V&SͲ6ZK4fh9eg&sz֨ċ+BwafE$M q"|௰>  {uEd0ڟȨ %~fik`GK{vAQa9';u-Y.(emJEHs%o 8jNFXHv}\TP/1j`,d!*POGt5 ims p|ͫvzb]8)y>ʜd/n*YbGNmr昼Z>Y{[Rm&kv}=K/5øH5*S# 1fsxk@5'&$H6էZ9Qfh49͋u9nZ/>-Jo]} .^p[E]ujI~WƞgSo^Js=()_uωL)ifד473ؾhb i`c̸;zW6kn<<2`%)٭W>bmE{W>Ȋ;(3txkjĠ({;SG#" hSec.vU4[(Rt4:N&чo_by{gDپ1{%2}RӃEЮ9C=AV'ƩO_bFIbDg^MJ'n^s(oM) ɂck5\:?AȂ'&*qٰnG9Ysd7%Ӕ-giΝ_,dh;UYXj^{[C62~;f}P3q^W9._ɸO"0Bz`( Yqˮ G`* 'Kn1]:S#D%I/)L 1P`Yb,K:v9KNdǏ+?2QúZ۰.5nx]>{ˆW}u{*C1^/U ˝Pw_/TR[/[KVHq>NNq$V4[NbktU~=w4$ %IOoJѵ2 -+'P(X+) 'Ӻyu8ɜꢅF-jT&$<}1J)o.Y^rV~k.8ʁZʁlr`9to/؁Y#QφjŒG!ޚα178bb}P;a&xcJ1>5qa\ەtKrYx`G435vmYd eWN87浗 ~pPq'PR {%~& K8*P r澣+[Z3$Elc8OXUнAo@cKkr/z\>y&Ub$[pO8 %l9;?foNlqvY)S8OXw)luj"U2 ':loS2?#jhcYX4 k>3d%Ο|Ͼ֦~XrВE+cflxpws{~gCb¼(GC(> b'S`/-,C/ollHZ E/?A ,%+˛=PFκECz;S0ea'7o/Nh0H[ >/[ ۷|Oy^^=uE>k"Iī$.r$b&S@ d` ¡d.vݝD;;c=bJ66t^6MpqPJ]/4&F%7hcxɝ?94< <P' (_UҤ{guoR DҠ8iF[4Si8 t' g tmkm`q[4,vX\yy~GZdnTkFk;o X׼ \ k/ޞ$%{D&(̔@+9t^e/g>1,lP3JdOAFBAABY^0} RN}tnh%  ^p Vl7ǽ v(Mx)UƎm-p0n:FǴQ1;ͲCQWi5's|V]mMDHXKKE+Wޖ̝ݱl.IXˎzd8ueۗd®+sϭ+ѻraVGRve=]=v9*V^4{m,#/oR|,RCbo1sg!:D(j(j&& EoġlY٠&FMhq(ZZzJ81?ҝ8n<73Si-gm\v4Vʳ]Mؑל2|/ȫwcL_r:Aciiy=`t" ~t}8;;AL/Åp?NAx^'ќ,b'q̶1`)v<.;cIz``'uDu-x '1:!$"4"(EFjs"]g0%N$7YNfH45tᛯK/OJRa.(_>}X.̼J_;#%Y,L ; ӋjIaSAK=J/Bj$РBV %(]LH@ڃbٗ2TSuEdg%pAgXQxNKYSSW?p{^k?[w+)E\;ӊ6?W ?mO*)%+: CA.,@.9tb梤g&<1`Иl qDQԅXqC,X4ln:(a"Ӿzӵ]vsVȚyw]sJ,^r,(LLp.,Ȩ+O ܗpײ2*2s6?T|Kc%I- `N&lTfTȩB*}U )ȘZA`0Vk)'NIw\莂mKXv2v#{U&>*` \F`R0.Ia,rc<'M%3,G)`d~%R~GCY)D,BH]*qJDL-(]t9XZRI Y,R-W`.TUmv|H6NZZ%{D!0<>Bā9 ĂEOT&ԗWP1MO3Z!ŹN_;|e6(Ñg-ox'k\m~? SlC[r/Q0:^} V%є~xqD)rO/WDNd"0k-"mA"'^k""LDHٻR*6)Q*6!P%B;‹ݫ K{""D@{X6g]DIut^6eHGExK!6PwzN/Filī"=+›"P)2"-![ĽQ)M|K<"rNd \"R͔tL<#тby-1U1㊎a[o=ZCMgj#*ؐc{ O +:o~Uje+W{'.^TOQ~\6eCQ&S v(ZsϊD^)KR_|sG.F.rK"044\B܁`Ld(8uzQٸ1Jx9yqlJNRԉc7=!I8u- 3!%VoLVJN1"e;7%Wb  @Iqp% bܘf=i7ބֽ9fA~{OA\^Ǔj2}F04V|A,ߧٗr#XÈXL9W~OQDH=4%2HMT`}lr->3$U]ؾ:l8n)jziV/f0xq:uΏȘXѢ) >yB ڋи[peȑ?QIy>U.{KB0"\єj6k>kt'Qh^[5$Άm {"ш_3*cҟMw" Z}AhqŨZ%iGn|N$|ﺾ@X(xBe/TI4y0T0~:T֐(R%Ce9H"%T6 =iBpC2&h9v@PV4TlN y* ${:T֐tBe-}*H&PY$I*H a65T'/dy#rfO/ʵ#9P_Wx~eAboo喼<{opD앛_jOmF`H?fw˻ya'DH>yQH3?2p~y!wM޾>A\}^u AAn nҺ Lz_׋y=kzF<ëp v@ðF[!^ "cCĤ-`i #%d4?& >և>|nT2ȅ8j.FlsH3 {2w5Gs6Yw_W& "޹X_#`UG-Ua|! >ƿa{\0ġAU6ܴ o gP݂,{BR r͋wOvںZ]nPW ŀW>'H@0J  M*(Wa8"˴8.u^\:x%㋲gY~w!Y~, H1G&xRNB 7e! {LtK;9~,\x}wg;p(F8FEy-k[y-%} Yr [Q GѮޣ{?z1Z{ez/'/"#G89TUGHyv<|0o< 7y _~>L噆tqH\>  QXjmcCuy/nD"65,/$z/c/BIȈ kwŵh]\wvu:-\^k͝aY~]`rFNÁhg(M9n)9oQq%$K^Y~H%˝fԺ\q[L`l Ȕ$YD*%ʉDb4vya7zc)6zcI`{ f`xs^wL;ίmX&vW~Ҿu{#sgMmu^,( Ҍ27޿KdZM> }lPG/4?;7GTT 7` |q "j endstream endobj 6 0 obj 11652 endobj 7 0 obj <> endobj 8 0 obj <> stream x]ˎ@E|E/'t5ɲǒy(|j>}v) Tȷa[_yo=tޜe3+%鵻sasz>> endobj 10 0 obj <> stream x{ T[וޫW G! _,m61I'Np~N\7Mi8Nc7{mf:=3N2ILJ`$3yk\>>}.L&:2K" ?B# ez\ ?Ao?! >OX>qsǸgԋV< vG?dL$=8˯}g0!ONTZO1kYhp.6af61#߀h/ {}tJz$2Bͅbhn(8Ntsaڍ.v GaE|hRۦ( ߀^iziZ?"PS]ѳقl[w.-D{_n9ؒ%.,uwj6wtEܦ!G#@tv 6y)BK4UVD(j^;]b(uŊ*4:{FSX艆ݱJ H5jëd<(*9iׅQ1ONV٤+S*7է 9hNFUOO\@xt~ e -8Ǹ딴*HPnըVI 팞rf =Ax^֧ czVgWdg2/@m,ϜWq I$_鋸ٕXUIGo_J/;|ÃmvL!g@QMοOwOXM:7zZN\jn]-$IE Li Ud9tn;s{&ZCe@ɾ-y)օm6MAq=ˮ\Umм^M@Ӣa4$¶\9i^ >"•Ī*#(*^["\))&?`6PGvͯbZZVpSVZK,#@g1цOOD&=b6ky5u{V{Fu_Z 5TWbͦ΀ے\;ܹIɾP>g 4.{Rbᘽ12{S׹gT-+ P>^Phs"he 8K? EfUgIpY p^ Q5<)&Ta5GUq7LO^rBg}&{~_'~̮r0Mq?Xص${d/\pyОLL,^- r gI uz`4 X 9·B٨73$gY}Jb=r^8: DZ(+͓gILעX*?#A'\'9 Jf/л۲Z;ۮ=td7yS]S\Ƽkӱ5d7tꪍU۪\la2ο+qe&#=Õ e^l(0 [5ἁ`,NP5I\+1rUqdIC8 2kL 0iͭ9J61J]ۚn%֘nELSWpb+{^pC-T#A[Y1?&lvy;UBEy{"m嗵10`)彧\_C2q*|WjmrF>O5cWW{qKBBN]vڢoȭTִ容#:fsY8Z3ֲ5iNFnlvO-@З']=bp^®;dqپ3gB?Ep`܋9nSQd%7~-Y/ o]S {G82DWRv_S)W»l>?9(Yg,_rwuSW*䦁*tMZ:Z1t B@AA IcjUWZUɲh&WLU^>|im>U^~0_єeOUV=Yf rssU\Zxѡ/ojGR4~seUw=YR( V~{p ɾ9T?\}`Klp_sYGUA?t@o %$4%ܛf@=$Ӗik@]\^֬iy5 5k4T F'thp*Fk^6Y U5|@Xc>?J҄ .;p>aiG6S!mtc^K.q=H}>uq u!B Hd N# ɗkFq@̂l팚饀'4hV[]ͪTB"=-NI. eZ}x6(߅ŋHRGKȮ]vBRXReO~v,Itp;X(-pxFޥWIQۙ&^c3yM->Ӌs&)j;oZ!b4ZX5f^%;l,@f;, ܼf֬j,j4 A0jq|nvcbbԀ?&o4֧?jFͲBm5gVO[{M_iTVk7dX给 x)m/'Nm 4† 0x_CpJL!569 QYRpq M_`ɜCN+ Gya_菳[_GbZf&'㔒GY3r2J3 <^,0H̠:O-uNn_-?ӳ԰?h>ꢇ40CZz툖v3 M 8]ҭvfdgK˙iyv({}6 |vJ6Ugg[DOe.(p.Z,x/8Z@ : hAz<vssh=<|=IiО6jNZ,鲪 t$'ceuz:=.+m{J !tz9e(=_VxFDƘ=;z1J;)tccL'Y*h+$ {O!zCiX?3ҵr3`۱ ^`2Ի}L?[Z־fYS狀ߋAS&9<,&4JTx*,?u-I؏{>v)ցSyJ@{Ƣ->˵ʠг<wh1LN29tUI6d s&;r1p{U)Ot29F X]R]f8 0 Tk;uTnyH@rKr_d699Xj51U\zk1\Е% e*JvDa9ƂUapNm L=_0A#Hlt}`]F+'n}a]2?g!6"ߗ{t ]e6Ѧ4)T,QԄYJ4lSr}!gQmZ+s YR V'%l"b"`ȓ]}S%@ ))D$ Ѳ|v((&`(,`WⳘr9ǵ3MEw[}[_~;_k\d_;鸵g3uCU]Il_SdXݸGY<I yAݝFBqظTy<ԥ#t07;5 ڳSSo1ۖ͞>e˪I ggV䐁e%lVl%)+'|ޥĂ!;?ȉ!ų_AI~R%9ΗH!kӣYo{rpİ z p9͝[-)Iفᶚd; o 1;d㍛1*9Nn\ٲC_n&2Ab4zL,iZnǵCm^vlȾ[↜4U$K2So0eQ uIyH՘3A_43R5Vbh:Ly3y,^@e@tUrxUe,1/\Rn|!#J9~~NzGM孚\/)<ݜtOWܷrd[bmqܩqj+ˆ$T¡NC1bR,]Py9#yi7)C8ɈgR 9 0*g3㹻|] b*~KbYCA- 0:t?~ eЯxɬ$ޛGa7c{4/`ߏ]K꘾W C>qnU&`:h[v#LHK}~}o>c>Oa,A T]J, X'A'=A(9 mLfd"ێ%f$2xn,Q|?Aagp<wGآ_r`  Ax'o Ax8ziaHBd(OoCda&#C`1A`"88Dlo IO-#+)ˁ_*|QHGr( E K UyO eÊM l8( \Th ٬B6c*HwX'WQߢҼ~.$s)tYOj? 3?E+6Y 8zW卩r[n/Շg6ܷy)7tm֬^mW|YҚʽĆa1too ncs?rW۵o~w._gi}]J,xo;yL[;N7|RΞlϵ3-8m>ĉ )i9M[/ehVVeZO&hR/&i lE8ҏ*Qńw1?)Y|_q.Cj||{V{p`8陴uGկdEۥH[{o,;\o<=VqWBa9;t_W6x?^VTX܎AKnC+u{ɽ˝˥SCVѴD7 ̺!z~2~}o(%#̈gqOxn^ =5y;|*{;C]%='kVͽy׶w6&}U o_nvSIAcwi;|aPÑ[?u&9=l ۫/ExAgÁސm< z `!#gbNjo?fdbC, υI.Ɂ| \'0z^kF^..QgGFEVp%w*]肟;gҌ}.*4<%71ƮQ%EEidOulBظUQ$ ψCbRǖa98<533(ScCchEKs NLMN3,\r-EqA-fN OmܱFw+#4â~ hSۆg&zo>F?}${#[(!"qAR>R?am"  DƑ:e0~L{ՓFVuS =E#no522ځqۄn i>SHUoC^^*AYe]ȥabkU2Q)CjUzQFW:?cqDwx[K£qoC?H0[\NNNܤ1Y=ʘkbsrۨR$+["`oD~3&=+kic_܉&qe_ڽl1|=qyeoqGqbZ}Xvݪq p +5YEYn˱1&YK>F9GZޫ<_V\箁p &>ci{?䴽r\}*^zW/\Ȼ»һN;JdC6߃!.l._|2#].RmRokKtY_-. %⯪mkͳ>tYFNg}!+WYH<}a; FeR५/1d>}22z[oEE<O>}zxmyg/=j|mPB5iB#܊(>cY@H9/̱} XZO£ö/ȱ]9g4Gd`!Lv=v+[FM~Lc7fܱ_;-#D!݋U7非iC(nQX{=XPD?w:7 endstream endobj 11 0 obj 11156 endobj 12 0 obj <> endobj 13 0 obj <> stream x]n0 զW7E`q[oy=ѷ>L3 <76n/NMl>ʋ?_~fQPBC<5 ms?/[XΒUQd\cnQT+Nvg|Q³"VrL qG1S 9c.w{D>R#OKO|\q/#u$'+?{?Ŭ#_ar% ?xOɁcg{SR4u8*ܥ9?~Z8;N7vEo`, endstream endobj 14 0 obj <> endobj 15 0 obj <> stream xݼ xSו0>aYOY巄d,ccrCl ~`HI!I()LB4fi4LL'i6 G2}W9g?{Z{<24Mbሴ~K !oFWkH!3ػ%JRݼ:&edCwg>#h읹_O߰e+5;sG `K=51o]?܇480TY4>[-S }pbo kGa!ɂnmšr&J27؋ YnېAWdR5&՜b\jy^`䬺 rhiQt {k /gCk7;ujlY|Y3u$w؛a )39tQcs g}Ck(" QsG7H7raUL ae / .2V*7C2F49(֩iQp7b?ɎG3M[QsU\j,؍\hxv87R͍Gr2ͣkx[b!2+Vkl꺹jYbuvH0a,,-'D!Fy$1YvY5\\&CJ6C}c<7*NpV+Z_Ѥ|]Qr6_^!RV Y2QɯIɵ\ _"ʀ@#e\2>R&e샫iޖ -lj)_,? TqvW5*V^+" VD skI6KJ%4$@e甼JY˲s$9˱bS*+ QM3 BӧU0CԠ~@A -GT% (W*{\ 8ݩ^aR+N++W|P*JNAқ0;)e NjNi40 4j@@Zӥ٣e`{]&  TDjNkx^.@/ʃ2%&JX+ .@BCJ>~:նumo&|(taO:pz#}y8(mohrd@*.1^$yUdJ&nv=)0&P`5\`&e 0. SvWbյua8]煷?-`x'D䗒RzI /NIH+ ~oSx)+6/xAx^NM ֢-NVPi?ÏϽc~8>v?]:?~7?OM @&A_/`NP@_P_m~3~Uk,F~nΰn.&?]R?ˠ8 㬯~z?TnAih9~~@nj~ 'T>cy ]#l U1k?6@nCX;$27.2 N3`VcG@;SI?"J+ eM4ZUQX+z{EJc Ko깊36]{{x~quQɒYT+|x:WɻxAJK@)<4wJ;p e.i݋AmL釸i򔂻 6{#]]9weF|d);o̧P(&C|"? EMOR.9lj1{M‰Bj+B,*fxC*d8KTWbdj$-?? EDh4((DU`JNW(uoxᝧ)yF)H)﨩Ryצ@Ud{qC+ qysόY2:� Oyzg:[~g#ɣotlL$(cRҺdZǷt +It±\ ѐgfUp!TȔk!!caLZTBZ1Jd#-3iJg9'I|n$3C;Cp"y/Ī,Me}¼4^* fWfZR-Zg>Lyr ZBZ 'RvTt{XJ`$PWZR)4##5 y w-<[hTkf{ɜ4+jmpy&S`k:lKqZk7tDM&](IB$X>oV=+; c^\m.4!-V')d 4M_~3BMg23,N7}k4Wp Q*/n{܁8.[ }Z I?? XvY%VkĀZuX"]^)%Nʊw|"F•|oVmHƶm[e,; } |f%iZ_Sx?"iUt BԐb-dyCj.a3E RMjr*Ms:IPtx6r1M]qYۭnkGd1+b#YbrBviz] h?Q44'8E}A60wc2Œ%&łTSCY Vz8g[|R:-1+ dy*z|SW1ZTw7Rb۪12o~EͨϭYr}MN],Itz K@K!diuntԍFNNlV@reeZyYY,F6#J%PF`YD!'3e鋽brI+E 8Qe,&gPh'c}xOk&poy뼐 I_ٞHֻvyNJ,~LQhc/fQ ajv*6!NL!  jz?vY[OZ:k&X/Z/[Z혢:* Z,wnͳvX32'YAPc(PBR1 1We#Xƶym6HL K9B +wk_4k >gܫWQ]R(p%,^[qMHJHʞͲBاb)kujd=Jw&tKKtY_NZ TP+ :ܴe@wTN韶Y хodoX^W^34|8C?;L;tuY9O벳=~K٢9LfG9b AE`bATd_ "N*Jrርot-C^jI\yZmUI||iy|uU}[?06_ydXrJ.x9/ksBUY5LI5=AԀ^5BQ :LVhyؖ咙i~gp+?pyN%ѣH7OH 53RI2gY,eXBn7#!Z ZQ^c>Oh̲.7Y4UHuoI7Zo2s4bFʙUU# )c|lL]Mݱdc 8iVuuW{R~Z^슻GjmO BY. bR-}Hv6\ˇ.d-,STW뭥jrti Dʐ٢* M\EmmW,1dOfA->ܲ{t'YT_q;MHq/vvT4} xЮsCE5+m6i]uQUʒYbN}fȒxuY7o \zxeBC>殺C۔Rݣާ>ㄶ>p#1{bp111\MQ#( j5##k|X}h3k\ "mPxBK٭];uHrV(J+V;cSI%p1v$G8\V2NW+)(cxN (&/uesl_{]M\CͲ>/f6(}z:ytf-3wRwqfuRcqݩ{VIDw+ 8VbkS==S񨃎8UZ:>vP㼃ʐ]p+Y ZX++Cdأ b9X)FTlG1ՎF;wtqAt_'(DA )qB>sL8&!:WSؐ6 އ_9 >Nr%2 }h>~oE6eLkef2-VT3r\i%hgVL~c%P.ޚ}}W>qvg[[jMehy*݈2.et^d"TIxU tX0ҩS)qoʛHWK 1" t9Jh.}B}:W8QL1{D%)/"jQ#Up3/$9ʺՔR!  ?1|`PMz{ G ՆF7 S7XdT$Jqf@ύypwxSgnXV) + Xp+ |ɘtg% kEJթAE [6]0'XŤ%#lත'4 dV}r/,fsEU~?EțdTs$G%$氆^aIͬdArtJTPM 9]߹#2pYqs1}7ߢE/Vh\UwL]<^h xggU_B2qG$o/w{.ݗ~4:[)[twgykuS]'^y.2,(2-ɲ2.YUoD]7pYN??~>PvlpEBݲb{8tڲH(]wWe__@v^:Ey֙A,Us龒:9;Nl^̃젠$MP,2*5la{<π6*p?9ֆJw/y=IQ.TQ(:,˄GRH-5D$͂ݚW"p[,N'yFxe$doIn+# W6~WҾ7ԷWJim'o޵+-&_|mq}'"~: IuLWH`:j,k c6bT0QSBy4IA=X@bl gh#s%rQhY]n7ޚ"3 FIC+?-X3b?R%%')qfk,-S4) 'cgcis8'SN~ :'99OIIL&I% $1CHe~U|q<yL?5Ua怼' />j68=O/=sjaR'p+Rr.Ja_g38.qb}JR{J̰M99U #V[Uh T&$E%xqkFb<^@BK."r5,WC5J`lW['6Y(ߍ8J4/o0Ba4GA|f⃣3J?pOqГwod7L9wƖ7p68+6?ܽw bƂtkY{UʅUҰ)uxMn!/T,kV4tX8${% 3# `ʳ*N$q8:77w-b!6~Lފ=R|(#Dbc>,@ջ5j(HLl dPsFM  c7s8_HKs63MOM*Rs5';9\r9oWFrVg@R3`P:1ɰ:Gbџt&0srh31c=t1خ"h\6[ט*xgZz&.(ݗ[ wjz\3ؼ< 5?ݝ}oWE7v3ܰ{zwwg>֮KGB#vٱzm ;ʊu#pbT)&uӈVGPc7j JKX(b% `\CxDDtqgAqR$*D y|zByjA54>Q !*Ɛ5f!|;ڊb@ >aݶ0_cr9!r[u@|`틝Zs~9xp#LӇd\bKP~52*Ce>mÈaE"* [19{%4C1 ~l{$;N p cBb^;5'Aȓ(_9sdo-\GjD3*Cp& aIgHAJL<'3nIGZ €N $pJD< b40ν!Ȯ61F.}}➃+?_Sf*G7}uq|CymymGE^눏3`ݘ Jac 3[9ȵn}1PRG=t1ړ@Ii|E1ǞpovvOt$ݠnL7St1fІ+7¼s>1n@lgy+RG?eD=73g/Ծl9Q._kt-Tt (BY@.rF<临xs 1;c`tytGx霠1k24Zh&3Il24D^HѶKZ}0>ݶ4o=iyc)9D!*$Tw@JJNriW{2u^v.uE{r[;$?4> oR2zalcQ=3:qd"T6 ͜ẝ-Цݭֻiк*-KʺP~fg0nz;bQ^+} Kzi% ǝQz ! Ju8!rbHPG;EHA  !a?eVvX!i7'$_'s Zm>v Xۣa,p0wBC3]}UYYFwOi7uv7 E&c7 o7s>7 n0T5}:MD7Ŗaǽa kC7\t!7t1h(pWiO}{nz Oa1 ܴŝ馼~܀syTqosӹ٤3XٜGguNfusF7-'?79nĽUp:\:l s#ns'Pj4ypHǡowdֻ!/JfdP%L0RqqwnܢlFcJ JHS2%ٱ<)i88Ec"m:P˚ _oG^VǨsfܫ8RRDśRTRIeܙmw_dLIfM'&UYӜ[D/3EȑRk`3W: t #n$g?rJ \- əIᚠ ػ;/-< :Q1êؾuH~cz27.ض=3J' )vX-E٧Fn5.J]'ט֕6֤ҽƼmpOd$&$&$0L_f̠Q21VBjf0ƚ;j$ 'Y,fC,%v3w*Y+⁐Vvsos͑݉켰_sۂ.>eg]4ΛnĀ_vƪ`&ڵgNRב?d[8ޡ0XmO@aS_gY1^%+|CZ2h$CiẁG9! fMޛLɐeTRfnPRanQc~HLjdvIɆDT`a^cH2Pv8A ?|ma+1R+,n]v/J7O-Kn/)j˫֦[|M;ݲ2^5#޻er[jYK~ukkF6vOZP撐&9OEihl:렲k67&M;㸑qݡWқgM55Vn~.ݶ fB+S[d97Q+Ƃb &]hxw4Id4"z nq,;RWn|㛅W##;eqk{|R+eEm ?3Ph7,v*v]E'Ի@L^ln 5,˷<޲Uj>|һX_\~]%WcUŅNgEsans 6O^7o].2g?˱M 镬VIu$ge혤I~nJkf֪ͥ!%SJSJT<,IWnD#_]0MStŽӊCi,k~)yYJYrMʊ6]);f^w2@_S kvoN xm]Ø' gg#8[ Hc$.XslF,.qLJJn3yML]&Agz4k& uɤ49 g{3T Eh8+g] _ν哧O}Ś_wpGeK5땺v]_-J}(1;=Jm G (gE75գ~DyBɸY&+Crd>mX_M[Y=E8?VV)'3=I;Ӿ7[{O\(~SYSCmx&xC&*);dCrsSΥXOQ,G=ތU$MI ?MkBz4KѴJȽEۇz7سؗwwك#n{]zrf 0l/Y^Xloi9>s{}뺇:G+:wn^=dga[)YyyoHۛ<+&, =_:blN!|?֮\?QďW%뱤_cX}Y_^RQ>1b/lQ"B^dwBBrKFy~ F\#`{FG0y܈1mG< ! A#6y%rKn$%xany}_ڂOۍd tWa#+܂8J~Foc63"1o뇭,{sGljPmHnWcanŏXoNlp}>_E,r|Z7l$09 gLC̲%ms ߁\۵~uUjW\40_l__SۻSNqҔ(0Hrӯ\e~t?7 ɁwoL1imA ՊW:WOzUS89x/eP^xKXx"L0=Sqz!| |s0gg?C 2{{Dz=Ӊ0yaPCܾ^|pmg0_fڒ )їФqM sֵ&0߶5hkŧ1$ M|iWѫ Tj(\2J! mA3c䛛kuMHth:WL7;)+쪎CSq\+\ aehu3VVgnCZ_ԇ0!&DY j #.0K"0V#0fFFɰkxQG|bzxk{bxt=<~8a- f endstream endobj 16 0 obj 14570 endobj 17 0 obj <> endobj 18 0 obj <> stream x]͎0y /QkH(Ģ?*JsOJ]>w|{~οű=ٜ6c)\!bfzzmS_cb?\ӏ1iaMյ9󹙾4אC |<`Dǖ*؅Դ!6%d뢨z0t͕KNgSԦhQT:(`Gށ=كKWR+~Wo)oxKރw ޓۂ\zӿB_o_,K [;xZK!G]굋PLR38KQߣ.`,E/ B{u =S\;_wpW[ mΥ ѦxGߴScӞGCq*A5 endstream endobj 19 0 obj <> endobj 20 0 obj <> endobj 21 0 obj <> endobj 1 0 obj <>/Contents 2 0 R>> endobj 4 0 obj <> endobj 22 0 obj <> endobj 23 0 obj < /Producer /CreationDate(D:20181002005834+02'00')>> endobj xref 0 24 0000000000 65535 f 0000043303 00000 n 0000000019 00000 n 0000002331 00000 n 0000043446 00000 n 0000002352 00000 n 0000014089 00000 n 0000014111 00000 n 0000014312 00000 n 0000014836 00000 n 0000015209 00000 n 0000026452 00000 n 0000026475 00000 n 0000026683 00000 n 0000027119 00000 n 0000027416 00000 n 0000042073 00000 n 0000042096 00000 n 0000042292 00000 n 0000042821 00000 n 0000043196 00000 n 0000043248 00000 n 0000043545 00000 n 0000043629 00000 n trailer < <40651D1E822384DFE15DD2DAEBEDAEAE> ] /DocChecksum /39BFFC342DC56738310965FAF2C6AC92 >> startxref 43796 %%EOF zlmdb-22.6.1/docs/conf.py000077500000000000000000000126161426100523600151510ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) import sys import os import time try: import sphinx_rtd_theme except ImportError: sphinx_rtd_theme = None try: from sphinxcontrib import spelling except ImportError: spelling = None sys.path.insert(0, os.path.abspath('..')) # monkey-patch txaio so that we can "use" both twisted *and* asyncio, # at least at import time -- this is so the autodoc stuff can # successfully import autobahn.twisted.* as well as autobahn.asyncio.* # (usually, you can only import one or the other in a single Python # interpreter) import txaio def use_tx(): "monkey-patched for doc-building" from txaio import tx txaio._use_framework(tx) def use_aio(): "monkey-patched for doc-building" from txaio import aio txaio._use_framework(aio) txaio.use_twisted = use_tx txaio.use_asyncio = use_aio # -- Project information ----------------------------------------------------- project = 'zLMDB' author = 'Crossbar.io Project' language = 'en' this_year = '{0}'.format(time.strftime('%Y')) if this_year != '2018': copyright = '2018-{0}, Crossbar.io Technologies GmbH'.format(this_year) else: copyright = '2018, Crossbar.io Technologies GmbH' base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) with open(os.path.join(base_dir, "zlmdb", "_version.py")) as f: # defines __version__ exec(f.read()) version = release = __version__ # noqa # -- General configuration --------------------------------------------------- # https://sphinx-autoapi.readthedocs.io/ # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html autoclass_content = 'both' autodoc_member_order = 'bysource' autoapi_keep_files = True autoapi_type = 'python' autoapi_dirs = [base_dir, os.path.join(base_dir, 'zlmdb')] # 'members', # 'undoc-members', # 'private-members', # 'show-inheritance', # 'show-inheritance-diagram', # 'show-module-summary', # 'special-members', # 'imported-members', autoapi_options = ['members', 'show-inheritance'] # Check if we are building on readthedocs RTD_BUILD = os.environ.get('READTHEDOCS', None) == 'True' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', 'sphinx.ext.todo', 'sphinx.ext.doctest', 'sphinx.ext.inheritance_diagram', # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html 'sphinx.ext.autodoc', 'sphinx.ext.autodoc.typehints', 'sphinxcontrib.spelling', # https://sphinx-autoapi.readthedocs.io # 'autoapi.extension', # Usage: .. thumbnail:: picture.png # Installation: pip install sphinxcontrib-images # Source: https://github.com/sphinx-contrib/images # 'sphinxcontrib.images', ] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. language = 'en' # extensions not available on RTD if spelling is not None: extensions.append('sphinxcontrib.spelling') spelling_lang = 'en_US' spelling_show_suggestions = False spelling_word_list_filename = 'spelling_wordlist.txt' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'contents' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # if sphinx_rtd_theme: html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] else: html_theme = "default" pygments_style = 'sphinx' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] intersphinx_mapping = { 'py3': ('https://docs.python.org/3', None), 'python': ('https://docs.python.org/3', None), 'rtd': ('https://docs.readthedocs.io/en/latest/', None), 'txaio': ('https://txaio.readthedocs.io/en/latest/', None), 'autobahn': ('https://autobahn.readthedocs.io/en/latest/', None), 'zlmdb': ('https://zlmdb.readthedocs.io/en/latest/', None), 'numpy': ('http://docs.scipy.org/doc/numpy', None), 'scipy': ('http://docs.scipy.org/doc/scipy/reference', None), 'matplotlib': ('http://matplotlib.org/stable', None), } zlmdb-22.6.1/docs/contents.rst000066400000000000000000000002141426100523600162250ustar00rootroot00000000000000:tocdepth: 0 :orphan: .. _site_contents: Site Contents ============= .. toctree:: :maxdepth: 3 index performance reference zlmdb-22.6.1/docs/gen.py000066400000000000000000000013041426100523600147620ustar00rootroot00000000000000 def print_tables(tables, prefix='zlmdb', inherit=False): tables = sorted(tables) for table in tables: print('* :class:`{}.{}`'.format(prefix, table)) print('') print('------') print('') tmpl = """.. autoclass:: {}.{} :members: """ if inherit: tmpl += " :show-inheritance:" for table in tables: print(tmpl.format(prefix, table)) from zlmdb import __all__ as a1 print_tables([x for x in a1 if x.startswith('Map')], inherit=True) from zlmdb._types import __dict__ as a2 print_tables([x for x in a2 if x.endswith('KeysMixin')], prefix='zlmdb._types') print_tables([x for x in a2 if x.endswith('ValuesMixin')], prefix='zlmdb._types') zlmdb-22.6.1/docs/index.rst000077700000000000000000000000001426100523600173772../README.rstustar00rootroot00000000000000zlmdb-22.6.1/docs/performance.rst000066400000000000000000000246661426100523600167120ustar00rootroot00000000000000Performance ----------- Read performance (with Flatbuffers serializer for object storage): .. image:: _static/performance_test2.png :width: 605px Write performance with different serializers: .. image:: _static/performance_test1.png :width: 780px * [zlmdb/tests/test_serialization.py](https://github.com/crossbario/zlmdb/blob/master/zlmdb/tests/test_serialization.py) * [zlmdb/_pmap.py:_FlatBuffersValuesMixin](https://github.com/crossbario/zlmdb/blob/master/zlmdb/_pmap.py#L625) Test system ........... The test was run on an Intel NUC with Ubuntu Bionic: .. code-block:: console (cpy370_1) oberstet@crossbar1:~/scm/crossbario/zlmdb$ uname -a Linux crossbar1 4.15.0-34-generic #37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018 x86_64 x86_64 x86_64 GNU/Linux (cpy370_1) oberstet@crossbar1:~/scm/crossbario/zlmdb$ lsb_release -a No LSB modules are available. Distributor ID: Ubuntu Description: Ubuntu 18.04.1 LTS Release: 18.04 Codename: bionic (cpy370_1) oberstet@crossbar1:~/scm/crossbario/zlmdb$ lscpu Architektur: x86_64 CPU Operationsmodus: 32-bit, 64-bit Byte-Reihenfolge: Little Endian CPU(s): 8 Liste der Online-CPU(s): 0-7 Thread(s) pro Kern: 2 Kern(e) pro Socket: 4 Sockel: 1 NUMA-Knoten: 1 Anbieterkennung: GenuineIntel Prozessorfamilie: 6 Modell: 94 Modellname: Intel(R) Core(TM) i7-6770HQ CPU @ 2.60GHz Stepping: 3 CPU MHz: 900.102 Maximale Taktfrequenz der CPU: 3500,0000 Minimale Taktfrequenz der CPU: 800,0000 BogoMIPS: 5184.00 Virtualisierung: VT-x L1d Cache: 32K L1i Cache: 32K L2 Cache: 256K L3 Cache: 6144K NUMA-Knoten0 CPU(s): 0-7 Markierungen: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc cpuid aperfmperf tsc_known_freq pni pclmulqdq dtes64 monitor ds_cpl vmx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch cpuid_fault epb invpcid_single pti ssbd ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm mpx rdseed adx smap clflushopt intel_pt xsaveopt xsavec xgetbv1 xsaves dtherm ida arat pln pts hwp hwp_notify hwp_act_window hwp_epp flush_l1d Results ....... Fill & Read performance results for PyPy 3 (v6.0.0): .. code-block:: console zlmdb/tests/test_flatbuffers.py::test_pmap_flatbuffers_count Using temporary directory /tmp/tmpg38791il for database Transaction ended: puts=10000 / dels=0 rows in 821 ms, 12166 rows/sec Transaction ended: puts=10000 / dels=0 rows in 211 ms, 47390 rows/sec Transaction ended: puts=10000 / dels=0 rows in 236 ms, 42372 rows/sec Transaction ended: puts=10000 / dels=0 rows in 216 ms, 46112 rows/sec Transaction ended: puts=10000 / dels=0 rows in 263 ms, 37881 rows/sec Transaction ended: 1000000 rows read in 1349 ms, 740900 rows/sec Transaction ended: 1000000 rows read in 1225 ms, 816188 rows/sec Transaction ended: 1000000 rows read in 1230 ms, 812895 rows/sec Transaction ended: 1000000 rows read in 1228 ms, 814307 rows/sec Transaction ended: 1000000 rows read in 1228 ms, 814307 rows/sec PASSED and Write performance with different serializers: .. code-block:: console zlmdb/tests/test_serialization.py::test_json_serialization_speed running on: 3.5.3 (fdd60ed87e94, Apr 24 2018, 06:10:04) [PyPy 6.0.0 with GCC 6.2.0 20160901] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 19384.7 objects/sec 8.5 MB 30204.7 objects/sec 17.0 MB 30075.6 objects/sec 25.4 MB 30390.1 objects/sec 33.9 MB 27105.8 objects/sec 42.4 MB 29900.0 objects/sec 50.9 MB 30598.2 objects/sec 59.3 MB 30044.7 objects/sec 67.8 MB 30140.4 objects/sec 76.3 MB 28741.3 objects/sec 84.8 MB 30598.2 objects/sec max, 84.8 MB bytes total, 847 Bytes bytes/obj PASSED zlmdb/tests/test_serialization.py::test_cbor_serialization_speed running on: 3.5.3 (fdd60ed87e94, Apr 24 2018, 06:10:04) [PyPy 6.0.0 with GCC 6.2.0 20160901] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 24692.3 objects/sec 5.8 MB 32789.0 objects/sec 11.6 MB 34056.9 objects/sec 17.3 MB 32679.4 objects/sec 23.1 MB 33207.5 objects/sec 28.9 MB 33553.0 objects/sec 34.7 MB 27443.7 objects/sec 40.4 MB 31347.2 objects/sec 46.2 MB 33560.1 objects/sec 52.0 MB 33203.0 objects/sec 57.8 MB 34056.9 objects/sec max, 57.8 MB bytes total, 577 Bytes bytes/obj PASSED zlmdb/tests/test_serialization.py::test_pickle_serialization_speed running on: 3.5.3 (fdd60ed87e94, Apr 24 2018, 06:10:04) [PyPy 6.0.0 with GCC 6.2.0 20160901] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 16280.2 objects/sec 8.5 MB 16985.4 objects/sec 17.0 MB 17206.1 objects/sec 25.5 MB 17056.9 objects/sec 34.0 MB 17406.6 objects/sec 42.4 MB 17474.5 objects/sec 50.9 MB 17509.5 objects/sec 59.4 MB 17450.8 objects/sec 67.9 MB 18063.3 objects/sec 76.4 MB 17343.1 objects/sec 84.9 MB 18063.3 objects/sec max, 84.9 MB bytes total, 848 Bytes bytes/obj PASSED zlmdb/tests/test_serialization.py::test_flatbuffer_serialization_speed running on: 3.5.3 (fdd60ed87e94, Apr 24 2018, 06:10:04) [PyPy 6.0.0 with GCC 6.2.0 20160901] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 58094.0 objects/sec 1.6 MB 52665.7 objects/sec 3.2 MB 63701.7 objects/sec 4.8 MB 61753.9 objects/sec 6.4 MB 63488.8 objects/sec 8.0 MB 64583.2 objects/sec 9.6 MB 62175.9 objects/sec 11.2 MB 64443.8 objects/sec 12.8 MB 63375.5 objects/sec 14.4 MB 61808.2 objects/sec 16.0 MB 64583.2 objects/sec max, 16.0 MB bytes total, 159 Bytes bytes/obj PASSED Fill & Read performance results for CPython 3 (v3.7.0): .. code-block:: console zlmdb/tests/test_flatbuffers.py::test_pmap_flatbuffers_count Using temporary directory /tmp/tmpkxt44ayp for database Transaction ended: puts=10000 / dels=0 rows in 1747 ms, 5721 rows/sec Transaction ended: puts=10000 / dels=0 rows in 1716 ms, 5826 rows/sec Transaction ended: puts=10000 / dels=0 rows in 1752 ms, 5705 rows/sec Transaction ended: puts=10000 / dels=0 rows in 1742 ms, 5740 rows/sec Transaction ended: puts=10000 / dels=0 rows in 1756 ms, 5692 rows/sec Transaction ended: 1000000 rows read in 12931 ms, 77328 rows/sec Transaction ended: 1000000 rows read in 12926 ms, 77361 rows/sec Transaction ended: 1000000 rows read in 12956 ms, 77179 rows/sec Transaction ended: 1000000 rows read in 12977 ms, 77056 rows/sec Transaction ended: 1000000 rows read in 12860 ms, 77758 rows/sec PASSED and Write performance with different serializers: .. code-block:: console zlmdb/tests/test_serialization.py::test_json_serialization_speed running on: 3.7.0 (default, Sep 11 2018, 09:56:32) [GCC 7.3.0] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 18612.4 objects/sec 8.5 MB 17952.2 objects/sec 17.0 MB 18716.1 objects/sec 25.4 MB 18239.6 objects/sec 33.9 MB 18900.9 objects/sec 42.4 MB 18328.9 objects/sec 50.9 MB 18454.4 objects/sec 59.3 MB 18544.6 objects/sec 67.8 MB 18553.5 objects/sec 76.3 MB 18304.3 objects/sec 84.8 MB 18900.9 objects/sec max, 84.8 MB bytes total, 847 Bytes bytes/obj PASSED zlmdb/tests/test_serialization.py::test_cbor_serialization_speed running on: 3.7.0 (default, Sep 11 2018, 09:56:32) [GCC 7.3.0] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 9066.4 objects/sec 5.8 MB 9125.0 objects/sec 11.6 MB 9063.7 objects/sec 17.3 MB 9108.3 objects/sec 23.1 MB 8998.3 objects/sec 28.9 MB 8938.6 objects/sec 34.7 MB 9088.6 objects/sec 40.4 MB 9063.0 objects/sec 46.2 MB 9127.8 objects/sec 52.0 MB 9129.6 objects/sec 57.8 MB 9129.6 objects/sec max, 57.8 MB bytes total, 577 Bytes bytes/obj PASSED zlmdb/tests/test_serialization.py::test_pickle_serialization_speed running on: 3.7.0 (default, Sep 11 2018, 09:56:32) [GCC 7.3.0] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 21894.9 objects/sec 5.8 MB 21725.4 objects/sec 11.6 MB 21793.6 objects/sec 17.4 MB 21755.0 objects/sec 23.2 MB 21873.5 objects/sec 28.9 MB 21651.3 objects/sec 34.7 MB 21620.2 objects/sec 40.5 MB 21810.5 objects/sec 46.3 MB 21956.2 objects/sec 52.1 MB 21133.8 objects/sec 57.9 MB 21956.2 objects/sec max, 57.9 MB bytes total, 578 Bytes bytes/obj PASSED zlmdb/tests/test_serialization.py::test_flatbuffer_serialization_speed running on: 3.7.0 (default, Sep 11 2018, 09:56:32) [GCC 7.3.0] uname_result(system='Linux', node='crossbar1', release='4.15.0-34-generic', version='#37-Ubuntu SMP Mon Aug 27 15:21:48 UTC 2018', machine='x86_64', processor='x86_64') 6127.6 objects/sec 1.6 MB 6176.0 objects/sec 3.2 MB 6171.0 objects/sec 4.8 MB 6194.4 objects/sec 6.4 MB 6191.5 objects/sec 8.0 MB 6225.2 objects/sec 9.6 MB 6144.9 objects/sec 11.2 MB 6175.1 objects/sec 12.8 MB 6118.0 objects/sec 14.4 MB 6119.6 objects/sec 16.0 MB 6225.2 objects/sec max, 16.0 MB bytes total, 159 Bytes bytes/obj PASSED zlmdb-22.6.1/docs/reference.rst000066400000000000000000000414331426100523600163360ustar00rootroot00000000000000Reference ========= .. contents:: :local: ------------- Schema ------ .. autoclass:: zlmdb.Schema :members: Database -------- .. autoclass:: zlmdb.Database :members: Transaction ----------- * :class:`zlmdb.Transaction` * :class:`zlmdb.TransactionStats` ------- .. autoclass:: zlmdb.Transaction :members: .. autoclass:: zlmdb.TransactionStats :members: PersistentMap ------------- * :class:`zlmdb._pmap.PersistentMap` * :class:`zlmdb._pmap.PersistentMapIterator` ------- .. autoclass:: zlmdb._pmap.PersistentMap :members: .. autoclass:: zlmdb._pmap.PersistentMapIterator :members: Typed PersistentMap ------------------- * :class:`zlmdb.MapBytes16FlatBuffers` * :class:`zlmdb.MapBytes16TimestampUuid` * :class:`zlmdb.MapBytes16TimestampUuidFlatBuffers` * :class:`zlmdb.MapBytes20Bytes16` * :class:`zlmdb.MapBytes20Bytes20` * :class:`zlmdb.MapBytes20Bytes20FlatBuffers` * :class:`zlmdb.MapBytes20Bytes20Timestamp` * :class:`zlmdb.MapBytes20FlatBuffers` * :class:`zlmdb.MapBytes20StringFlatBuffers` * :class:`zlmdb.MapBytes20TimestampBytes20` * :class:`zlmdb.MapBytes20TimestampUuid` * :class:`zlmdb.MapBytes20Uuid` * :class:`zlmdb.MapBytes32Bytes32` * :class:`zlmdb.MapBytes32Bytes32FlatBuffers` * :class:`zlmdb.MapBytes32FlatBuffers` * :class:`zlmdb.MapBytes32StringFlatBuffers` * :class:`zlmdb.MapBytes32Timestamp` * :class:`zlmdb.MapBytes32Uuid` * :class:`zlmdb.MapBytes32UuidFlatBuffers` * :class:`zlmdb.MapOid3FlatBuffers` * :class:`zlmdb.MapOidCbor` * :class:`zlmdb.MapOidFlatBuffers` * :class:`zlmdb.MapOidJson` * :class:`zlmdb.MapOidOid` * :class:`zlmdb.MapOidOidFlatBuffers` * :class:`zlmdb.MapOidOidOid` * :class:`zlmdb.MapOidOidSet` * :class:`zlmdb.MapOidPickle` * :class:`zlmdb.MapOidString` * :class:`zlmdb.MapOidStringOid` * :class:`zlmdb.MapOidTimestampFlatBuffers` * :class:`zlmdb.MapOidTimestampOid` * :class:`zlmdb.MapOidTimestampStringOid` * :class:`zlmdb.MapOidUuid` * :class:`zlmdb.MapSlotUuidUuid` * :class:`zlmdb.MapStringCbor` * :class:`zlmdb.MapStringFlatBuffers` * :class:`zlmdb.MapStringJson` * :class:`zlmdb.MapStringOid` * :class:`zlmdb.MapStringOidOid` * :class:`zlmdb.MapStringPickle` * :class:`zlmdb.MapStringString` * :class:`zlmdb.MapStringStringStringUuid` * :class:`zlmdb.MapStringStringUuid` * :class:`zlmdb.MapStringTimestampCbor` * :class:`zlmdb.MapStringUuid` * :class:`zlmdb.MapTimestampBytes32FlatBuffers` * :class:`zlmdb.MapTimestampFlatBuffers` * :class:`zlmdb.MapTimestampStringCbor` * :class:`zlmdb.MapTimestampStringFlatBuffers` * :class:`zlmdb.MapTimestampUuidCbor` * :class:`zlmdb.MapTimestampUuidFlatBuffers` * :class:`zlmdb.MapTimestampUuidStringFlatBuffers` * :class:`zlmdb.MapUint16UuidTimestampFlatBuffers` * :class:`zlmdb.MapUuidBytes20Bytes20Uint8UuidFlatBuffers` * :class:`zlmdb.MapUuidBytes20Uint8FlatBuffers` * :class:`zlmdb.MapUuidBytes20Uint8UuidFlatBuffers` * :class:`zlmdb.MapUuidBytes32FlatBuffers` * :class:`zlmdb.MapUuidCbor` * :class:`zlmdb.MapUuidFlatBuffers` * :class:`zlmdb.MapUuidJson` * :class:`zlmdb.MapUuidOid` * :class:`zlmdb.MapUuidPickle` * :class:`zlmdb.MapUuidString` * :class:`zlmdb.MapUuidStringFlatBuffers` * :class:`zlmdb.MapUuidStringOid` * :class:`zlmdb.MapUuidStringUuid` * :class:`zlmdb.MapUuidTimestampBytes32` * :class:`zlmdb.MapUuidTimestampCbor` * :class:`zlmdb.MapUuidTimestampFlatBuffers` * :class:`zlmdb.MapUuidTimestampUuid` * :class:`zlmdb.MapUuidTimestampUuidFlatBuffers` * :class:`zlmdb.MapUuidUuid` * :class:`zlmdb.MapUuidUuidCbor` * :class:`zlmdb.MapUuidUuidFlatBuffers` * :class:`zlmdb.MapUuidUuidSet` * :class:`zlmdb.MapUuidUuidStringFlatBuffers` * :class:`zlmdb.MapUuidUuidStringUuid` * :class:`zlmdb.MapUuidUuidUuid` * :class:`zlmdb.MapUuidUuidUuidStringUuid` * :class:`zlmdb.MapUuidUuidUuidUuid` * :class:`zlmdb.MapUuidUuidUuidUuidUuid` ------ .. autoclass:: zlmdb.MapBytes16FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes16TimestampUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes16TimestampUuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20Bytes16 :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20Bytes20 :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20Bytes20FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20Bytes20Timestamp :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20StringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20TimestampBytes20 :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20TimestampUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes20Uuid :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32Bytes32 :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32Bytes32FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32StringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32Timestamp :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32Uuid :members: :show-inheritance: .. autoclass:: zlmdb.MapBytes32UuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapOid3FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapOidCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapOidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapOidJson :members: :show-inheritance: .. autoclass:: zlmdb.MapOidOid :members: :show-inheritance: .. autoclass:: zlmdb.MapOidOidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapOidOidOid :members: :show-inheritance: .. autoclass:: zlmdb.MapOidOidSet :members: :show-inheritance: .. autoclass:: zlmdb.MapOidPickle :members: :show-inheritance: .. autoclass:: zlmdb.MapOidString :members: :show-inheritance: .. autoclass:: zlmdb.MapOidStringOid :members: :show-inheritance: .. autoclass:: zlmdb.MapOidTimestampFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapOidTimestampOid :members: :show-inheritance: .. autoclass:: zlmdb.MapOidTimestampStringOid :members: :show-inheritance: .. autoclass:: zlmdb.MapOidUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapSlotUuidUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapStringCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapStringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapStringJson :members: :show-inheritance: .. autoclass:: zlmdb.MapStringOid :members: :show-inheritance: .. autoclass:: zlmdb.MapStringOidOid :members: :show-inheritance: .. autoclass:: zlmdb.MapStringPickle :members: :show-inheritance: .. autoclass:: zlmdb.MapStringString :members: :show-inheritance: .. autoclass:: zlmdb.MapStringStringStringUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapStringStringUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapStringTimestampCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapStringUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampBytes32FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampStringCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampStringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampUuidCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampUuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapTimestampUuidStringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUint16UuidTimestampFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidBytes20Bytes20Uint8UuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidBytes20Uint8FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidBytes20Uint8UuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidBytes32FlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidJson :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidOid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidPickle :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidString :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidStringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidStringOid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidStringUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidTimestampBytes32 :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidTimestampCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidTimestampFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidTimestampUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidTimestampUuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidCbor :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidSet :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidStringFlatBuffers :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidStringUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidUuidStringUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidUuidUuid :members: :show-inheritance: .. autoclass:: zlmdb.MapUuidUuidUuidUuidUuid :members: :show-inheritance: Key Types --------- * :class:`zlmdb._types._Bytes16KeysMixin` * :class:`zlmdb._types._Bytes16TimestampKeysMixin` * :class:`zlmdb._types._Bytes16TimestampUuidKeysMixin` * :class:`zlmdb._types._Bytes20Bytes20KeysMixin` * :class:`zlmdb._types._Bytes20KeysMixin` * :class:`zlmdb._types._Bytes20StringKeysMixin` * :class:`zlmdb._types._Bytes20TimestampKeysMixin` * :class:`zlmdb._types._Bytes32Bytes32KeysMixin` * :class:`zlmdb._types._Bytes32KeysMixin` * :class:`zlmdb._types._Bytes32StringKeysMixin` * :class:`zlmdb._types._Bytes32UuidKeysMixin` * :class:`zlmdb._types._Oid3KeysMixin` * :class:`zlmdb._types._OidKeysMixin` * :class:`zlmdb._types._OidOidKeysMixin` * :class:`zlmdb._types._OidStringKeysMixin` * :class:`zlmdb._types._OidTimestampKeysMixin` * :class:`zlmdb._types._OidTimestampStringKeysMixin` * :class:`zlmdb._types._SlotUuidKeysMixin` * :class:`zlmdb._types._StringKeysMixin` * :class:`zlmdb._types._StringOidKeysMixin` * :class:`zlmdb._types._StringStringKeysMixin` * :class:`zlmdb._types._StringStringStringKeysMixin` * :class:`zlmdb._types._StringTimestampKeysMixin` * :class:`zlmdb._types._TimestampBytes32KeysMixin` * :class:`zlmdb._types._TimestampKeysMixin` * :class:`zlmdb._types._TimestampStringKeysMixin` * :class:`zlmdb._types._TimestampUuidKeysMixin` * :class:`zlmdb._types._TimestampUuidStringKeysMixin` * :class:`zlmdb._types._Uint16UuidTimestampKeysMixin` * :class:`zlmdb._types._UuidBytes20Bytes20Uint8UuidKeysMixin` * :class:`zlmdb._types._UuidBytes20Uint8KeysMixin` * :class:`zlmdb._types._UuidBytes20Uint8UuidKeysMixin` * :class:`zlmdb._types._UuidBytes32KeysMixin` * :class:`zlmdb._types._UuidKeysMixin` * :class:`zlmdb._types._UuidStringKeysMixin` * :class:`zlmdb._types._UuidTimestampKeysMixin` * :class:`zlmdb._types._UuidTimestampUuidKeysMixin` * :class:`zlmdb._types._UuidUuidKeysMixin` * :class:`zlmdb._types._UuidUuidStringKeysMixin` * :class:`zlmdb._types._UuidUuidUuidKeysMixin` * :class:`zlmdb._types._UuidUuidUuidStringKeysMixin` * :class:`zlmdb._types._UuidUuidUuidUuidKeysMixin` ------ .. autoclass:: zlmdb._types._Bytes16KeysMixin :members: .. autoclass:: zlmdb._types._Bytes16TimestampKeysMixin :members: .. autoclass:: zlmdb._types._Bytes16TimestampUuidKeysMixin :members: .. autoclass:: zlmdb._types._Bytes20Bytes20KeysMixin :members: .. autoclass:: zlmdb._types._Bytes20KeysMixin :members: .. autoclass:: zlmdb._types._Bytes20StringKeysMixin :members: .. autoclass:: zlmdb._types._Bytes20TimestampKeysMixin :members: .. autoclass:: zlmdb._types._Bytes32Bytes32KeysMixin :members: .. autoclass:: zlmdb._types._Bytes32KeysMixin :members: .. autoclass:: zlmdb._types._Bytes32StringKeysMixin :members: .. autoclass:: zlmdb._types._Bytes32UuidKeysMixin :members: .. autoclass:: zlmdb._types._Oid3KeysMixin :members: .. autoclass:: zlmdb._types._OidKeysMixin :members: .. autoclass:: zlmdb._types._OidOidKeysMixin :members: .. autoclass:: zlmdb._types._OidStringKeysMixin :members: .. autoclass:: zlmdb._types._OidTimestampKeysMixin :members: .. autoclass:: zlmdb._types._OidTimestampStringKeysMixin :members: .. autoclass:: zlmdb._types._SlotUuidKeysMixin :members: .. autoclass:: zlmdb._types._StringKeysMixin :members: .. autoclass:: zlmdb._types._StringOidKeysMixin :members: .. autoclass:: zlmdb._types._StringStringKeysMixin :members: .. autoclass:: zlmdb._types._StringStringStringKeysMixin :members: .. autoclass:: zlmdb._types._StringTimestampKeysMixin :members: .. autoclass:: zlmdb._types._TimestampBytes32KeysMixin :members: .. autoclass:: zlmdb._types._TimestampKeysMixin :members: .. autoclass:: zlmdb._types._TimestampStringKeysMixin :members: .. autoclass:: zlmdb._types._TimestampUuidKeysMixin :members: .. autoclass:: zlmdb._types._TimestampUuidStringKeysMixin :members: .. autoclass:: zlmdb._types._Uint16UuidTimestampKeysMixin :members: .. autoclass:: zlmdb._types._UuidBytes20Bytes20Uint8UuidKeysMixin :members: .. autoclass:: zlmdb._types._UuidBytes20Uint8KeysMixin :members: .. autoclass:: zlmdb._types._UuidBytes20Uint8UuidKeysMixin :members: .. autoclass:: zlmdb._types._UuidBytes32KeysMixin :members: .. autoclass:: zlmdb._types._UuidKeysMixin :members: .. autoclass:: zlmdb._types._UuidStringKeysMixin :members: .. autoclass:: zlmdb._types._UuidTimestampKeysMixin :members: .. autoclass:: zlmdb._types._UuidTimestampUuidKeysMixin :members: .. autoclass:: zlmdb._types._UuidUuidKeysMixin :members: .. autoclass:: zlmdb._types._UuidUuidStringKeysMixin :members: .. autoclass:: zlmdb._types._UuidUuidUuidKeysMixin :members: .. autoclass:: zlmdb._types._UuidUuidUuidStringKeysMixin :members: .. autoclass:: zlmdb._types._UuidUuidUuidUuidKeysMixin :members: Value Types ----------- * :class:`zlmdb._types._Bytes16ValuesMixin` * :class:`zlmdb._types._Bytes20TimestampValuesMixin` * :class:`zlmdb._types._Bytes20ValuesMixin` * :class:`zlmdb._types._Bytes32ValuesMixin` * :class:`zlmdb._types._CborValuesMixin` * :class:`zlmdb._types._FlatBuffersValuesMixin` * :class:`zlmdb._types._JsonValuesMixin` * :class:`zlmdb._types._OidSetValuesMixin` * :class:`zlmdb._types._OidValuesMixin` * :class:`zlmdb._types._Pickle5ValuesMixin` * :class:`zlmdb._types._PickleValuesMixin` * :class:`zlmdb._types._StringSetValuesMixin` * :class:`zlmdb._types._StringValuesMixin` * :class:`zlmdb._types._TimestampValuesMixin` * :class:`zlmdb._types._UuidSetValuesMixin` * :class:`zlmdb._types._UuidValuesMixin` ------ .. autoclass:: zlmdb._types._Bytes16ValuesMixin :members: .. autoclass:: zlmdb._types._Bytes20TimestampValuesMixin :members: .. autoclass:: zlmdb._types._Bytes20ValuesMixin :members: .. autoclass:: zlmdb._types._Bytes32ValuesMixin :members: .. autoclass:: zlmdb._types._CborValuesMixin :members: .. autoclass:: zlmdb._types._FlatBuffersValuesMixin :members: .. autoclass:: zlmdb._types._JsonValuesMixin :members: .. autoclass:: zlmdb._types._OidSetValuesMixin :members: .. autoclass:: zlmdb._types._OidValuesMixin :members: .. autoclass:: zlmdb._types._Pickle5ValuesMixin :members: .. autoclass:: zlmdb._types._PickleValuesMixin :members: .. autoclass:: zlmdb._types._StringSetValuesMixin :members: .. autoclass:: zlmdb._types._StringValuesMixin :members: .. autoclass:: zlmdb._types._TimestampValuesMixin :members: .. autoclass:: zlmdb._types._UuidSetValuesMixin :members: .. autoclass:: zlmdb._types._UuidValuesMixin :members: zlmdb-22.6.1/docs/spelling_wordlist.txt000066400000000000000000000022621426100523600201500ustar00rootroot00000000000000Twisted asyncio Trollius inlineCallbacks Deferreds inline excludeMe wxPython Tox CPython stdlib timestamp Lua rawsocket serializer subprotocol subprotocols Hybi args kwargs unserialized unserialize serializable Testee Jython wallclock walltime mixin Mixin hostname serializers app apps util wamp WAMP Ctor Iff iff ping pong openHandshakeTimeout closeHandshakeTimeout wasClean logOctets logFrames reasonUtf sendMessage websocket validator Bjoern Hoehrmann codepoint currentIndex totalIndex websocket xormasker localhost polyfill useragent sendPing sendPong sendClose Nagle endsOnCodePoint Changelog changelog docstrings websockify ws wss slowsquare plugin pubsub Peticolas isSecure permessage Nagle endsOnCodePoint blog backport backports twistd frontend backend frontends backends setProtocolOptions serverConnectionDropTimeout reasonRaw serverStatus onConnect namespace unsubscribe bzip http uri longpoll choosereactor flashpolicy autoimport longpoll chopsize lifecycle Lifecycle Callees callees Testsuite testsuite Subpackages subpath subpaths pongs pings params unescaped utf acknowledgement unregistration unregister unregistered unsubscription unsubscribe unsubscribed deserialization deserialize zlmdb-22.6.1/flatbuffers/000077500000000000000000000000001426100523600152145ustar00rootroot00000000000000zlmdb-22.6.1/flatbuffers/__init__.py000066400000000000000000000013571426100523600173330ustar00rootroot00000000000000# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .builder import Builder from .table import Table from .compat import range_func as compat_range from ._version import __version__ from . import util zlmdb-22.6.1/flatbuffers/_version.py000066400000000000000000000012671426100523600174200ustar00rootroot00000000000000# Copyright 2019 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Placeholder, to be updated during the release process # by the setup.py __version__ = u"latest" zlmdb-22.6.1/flatbuffers/builder.py000066400000000000000000000626651426100523600172330ustar00rootroot00000000000000# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from . import number_types as N from .number_types import (UOffsetTFlags, SOffsetTFlags, VOffsetTFlags) from . import encode from . import packer from . import compat from .compat import range_func from .compat import memoryview_type from .compat import import_numpy, NumpyRequiredForThisFeature np = import_numpy() ## @file ## @addtogroup flatbuffers_python_api ## @{ ## @cond FLATBUFFERS_INTERNAL class OffsetArithmeticError(RuntimeError): """ Error caused by an Offset arithmetic error. Probably caused by bad writing of fields. This is considered an unreachable situation in normal circumstances. """ pass class IsNotNestedError(RuntimeError): """ Error caused by using a Builder to write Object data when not inside an Object. """ pass class IsNestedError(RuntimeError): """ Error caused by using a Builder to begin an Object when an Object is already being built. """ pass class StructIsNotInlineError(RuntimeError): """ Error caused by using a Builder to write a Struct at a location that is not the current Offset. """ pass class BuilderSizeError(RuntimeError): """ Error caused by causing a Builder to exceed the hardcoded limit of 2 gigabytes. """ pass class BuilderNotFinishedError(RuntimeError): """ Error caused by not calling `Finish` before calling `Output`. """ pass # VtableMetadataFields is the count of metadata fields in each vtable. VtableMetadataFields = 2 ## @endcond class Builder(object): """ A Builder is used to construct one or more FlatBuffers. Typically, Builder objects will be used from code generated by the `flatc` compiler. A Builder constructs byte buffers in a last-first manner for simplicity and performance during reading. Internally, a Builder is a state machine for creating FlatBuffer objects. It holds the following internal state: - Bytes: an array of bytes. - current_vtable: a list of integers. - vtables: a hash of vtable entries. Attributes: Bytes: The internal `bytearray` for the Builder. finished: A boolean determining if the Builder has been finalized. """ ## @cond FLATBUFFERS_INTENRAL __slots__ = ("Bytes", "current_vtable", "head", "minalign", "objectEnd", "vtables", "nested", "forceDefaults", "finished", "vectorNumElems") """Maximum buffer size constant, in bytes. Builder will never allow it's buffer grow over this size. Currently equals 2Gb. """ MAX_BUFFER_SIZE = 2**31 ## @endcond def __init__(self, initialSize=1024): """Initializes a Builder of size `initial_size`. The internal buffer is grown as needed. """ if not (0 <= initialSize <= Builder.MAX_BUFFER_SIZE): msg = "flatbuffers: Cannot create Builder larger than 2 gigabytes." raise BuilderSizeError(msg) self.Bytes = bytearray(initialSize) ## @cond FLATBUFFERS_INTERNAL self.current_vtable = None self.head = UOffsetTFlags.py_type(initialSize) self.minalign = 1 self.objectEnd = None self.vtables = {} self.nested = False self.forceDefaults = False ## @endcond self.finished = False def Output(self): """Return the portion of the buffer that has been used for writing data. This is the typical way to access the FlatBuffer data inside the builder. If you try to access `Builder.Bytes` directly, you would need to manually index it with `Head()`, since the buffer is constructed backwards. It raises BuilderNotFinishedError if the buffer has not been finished with `Finish`. """ if not self.finished: raise BuilderNotFinishedError() return self.Bytes[self.Head():] ## @cond FLATBUFFERS_INTERNAL def StartObject(self, numfields): """StartObject initializes bookkeeping for writing a new object.""" self.assertNotNested() # use 32-bit offsets so that arithmetic doesn't overflow. self.current_vtable = [0 for _ in range_func(numfields)] self.objectEnd = self.Offset() self.nested = True def WriteVtable(self): """ WriteVtable serializes the vtable for the current object, if needed. Before writing out the vtable, this checks pre-existing vtables for equality to this one. If an equal vtable is found, point the object to the existing vtable and return. Because vtable values are sensitive to alignment of object data, not all logically-equal vtables will be deduplicated. A vtable has the following format: * N, where N is the number of fields in the schema for this type. Includes deprecated fields. Thus, a vtable is made of 2 + N elements, each VOffsetT bytes wide. An object has the following format: + """ # Prepend a zero scalar to the object. Later in this function we'll # write an offset here that points to the object's vtable: self.PrependSOffsetTRelative(0) objectOffset = self.Offset() vtKey = [] trim = True for elem in reversed(self.current_vtable): if elem == 0: if trim: continue else: elem = objectOffset - elem trim = False vtKey.append(elem) vtKey = tuple(vtKey) vt2Offset = self.vtables.get(vtKey) if vt2Offset is None: # Did not find a vtable, so write this one to the buffer. # Write out the current vtable in reverse , because # serialization occurs in last-first order: i = len(self.current_vtable) - 1 trailing = 0 trim = True while i >= 0: off = 0 elem = self.current_vtable[i] i -= 1 if elem == 0: if trim: trailing += 1 continue else: # Forward reference to field; # use 32bit number to ensure no overflow: off = objectOffset - elem trim = False self.PrependVOffsetT(off) # The two metadata fields are written last. # First, store the object bytesize: objectSize = UOffsetTFlags.py_type(objectOffset - self.objectEnd) self.PrependVOffsetT(VOffsetTFlags.py_type(objectSize)) # Second, store the vtable bytesize: vBytes = len(self.current_vtable) - trailing + VtableMetadataFields vBytes *= N.VOffsetTFlags.bytewidth self.PrependVOffsetT(VOffsetTFlags.py_type(vBytes)) # Next, write the offset to the new vtable in the # already-allocated SOffsetT at the beginning of this object: objectStart = SOffsetTFlags.py_type(len(self.Bytes) - objectOffset) encode.Write(packer.soffset, self.Bytes, objectStart, SOffsetTFlags.py_type(self.Offset() - objectOffset)) # Finally, store this vtable in memory for future # deduplication: self.vtables[vtKey] = self.Offset() else: # Found a duplicate vtable. objectStart = SOffsetTFlags.py_type(len(self.Bytes) - objectOffset) self.head = UOffsetTFlags.py_type(objectStart) # Write the offset to the found vtable in the # already-allocated SOffsetT at the beginning of this object: encode.Write(packer.soffset, self.Bytes, self.Head(), SOffsetTFlags.py_type(vt2Offset - objectOffset)) self.current_vtable = None return objectOffset def EndObject(self): """EndObject writes data necessary to finish object construction.""" self.assertNested() self.nested = False return self.WriteVtable() def growByteBuffer(self): """Doubles the size of the byteslice, and copies the old data towards the end of the new buffer (since we build the buffer backwards).""" if len(self.Bytes) == Builder.MAX_BUFFER_SIZE: msg = "flatbuffers: cannot grow buffer beyond 2 gigabytes" raise BuilderSizeError(msg) newSize = min(len(self.Bytes) * 2, Builder.MAX_BUFFER_SIZE) if newSize == 0: newSize = 1 bytes2 = bytearray(newSize) bytes2[newSize-len(self.Bytes):] = self.Bytes self.Bytes = bytes2 ## @endcond def Head(self): """Get the start of useful data in the underlying byte buffer. Note: unlike other functions, this value is interpreted as from the left. """ ## @cond FLATBUFFERS_INTERNAL return self.head ## @endcond ## @cond FLATBUFFERS_INTERNAL def Offset(self): """Offset relative to the end of the buffer.""" return UOffsetTFlags.py_type(len(self.Bytes) - self.Head()) def Pad(self, n): """Pad places zeros at the current offset.""" for i in range_func(n): self.Place(0, N.Uint8Flags) def Prep(self, size, additionalBytes): """ Prep prepares to write an element of `size` after `additional_bytes` have been written, e.g. if you write a string, you need to align such the int length field is aligned to SizeInt32, and the string data follows it directly. If all you need to do is align, `additionalBytes` will be 0. """ # Track the biggest thing we've ever aligned to. if size > self.minalign: self.minalign = size # Find the amount of alignment needed such that `size` is properly # aligned after `additionalBytes`: alignSize = (~(len(self.Bytes) - self.Head() + additionalBytes)) + 1 alignSize &= (size - 1) # Reallocate the buffer if needed: while self.Head() < alignSize+size+additionalBytes: oldBufSize = len(self.Bytes) self.growByteBuffer() updated_head = self.head + len(self.Bytes) - oldBufSize self.head = UOffsetTFlags.py_type(updated_head) self.Pad(alignSize) def PrependSOffsetTRelative(self, off): """ PrependSOffsetTRelative prepends an SOffsetT, relative to where it will be written. """ # Ensure alignment is already done: self.Prep(N.SOffsetTFlags.bytewidth, 0) if not (off <= self.Offset()): msg = "flatbuffers: Offset arithmetic error." raise OffsetArithmeticError(msg) off2 = self.Offset() - off + N.SOffsetTFlags.bytewidth self.PlaceSOffsetT(off2) ## @endcond def PrependUOffsetTRelative(self, off): """Prepends an unsigned offset into vector data, relative to where it will be written. """ # Ensure alignment is already done: self.Prep(N.UOffsetTFlags.bytewidth, 0) if not (off <= self.Offset()): msg = "flatbuffers: Offset arithmetic error." raise OffsetArithmeticError(msg) off2 = self.Offset() - off + N.UOffsetTFlags.bytewidth self.PlaceUOffsetT(off2) ## @cond FLATBUFFERS_INTERNAL def StartVector(self, elemSize, numElems, alignment): """ StartVector initializes bookkeeping for writing a new vector. A vector has the following format: - - +, where T is the type of elements of this vector. """ self.assertNotNested() self.nested = True self.vectorNumElems = numElems self.Prep(N.Uint32Flags.bytewidth, elemSize*numElems) self.Prep(alignment, elemSize*numElems) # In case alignment > int. return self.Offset() ## @endcond def EndVector(self): """EndVector writes data necessary to finish vector construction.""" self.assertNested() ## @cond FLATBUFFERS_INTERNAL self.nested = False ## @endcond # we already made space for this, so write without PrependUint32 self.PlaceUOffsetT(self.vectorNumElems) self.vectorNumElems = None return self.Offset() def CreateString(self, s, encoding='utf-8', errors='strict'): """CreateString writes a null-terminated byte string as a vector.""" self.assertNotNested() ## @cond FLATBUFFERS_INTERNAL self.nested = True ## @endcond if isinstance(s, compat.string_types): x = s.encode(encoding, errors) elif isinstance(s, compat.binary_types): x = s else: raise TypeError("non-string passed to CreateString") self.Prep(N.UOffsetTFlags.bytewidth, (len(x)+1)*N.Uint8Flags.bytewidth) self.Place(0, N.Uint8Flags) l = UOffsetTFlags.py_type(len(s)) ## @cond FLATBUFFERS_INTERNAL self.head = UOffsetTFlags.py_type(self.Head() - l) ## @endcond self.Bytes[self.Head():self.Head()+l] = x self.vectorNumElems = len(x) return self.EndVector() def CreateByteVector(self, x): """CreateString writes a byte vector.""" self.assertNotNested() ## @cond FLATBUFFERS_INTERNAL self.nested = True ## @endcond if not isinstance(x, compat.binary_types): raise TypeError("non-byte vector passed to CreateByteVector") self.Prep(N.UOffsetTFlags.bytewidth, len(x)*N.Uint8Flags.bytewidth) l = UOffsetTFlags.py_type(len(x)) ## @cond FLATBUFFERS_INTERNAL self.head = UOffsetTFlags.py_type(self.Head() - l) ## @endcond self.Bytes[self.Head():self.Head()+l] = x self.vectorNumElems = len(x) return self.EndVector() def CreateNumpyVector(self, x): """CreateNumpyVector writes a numpy array into the buffer.""" if np is None: # Numpy is required for this feature raise NumpyRequiredForThisFeature("Numpy was not found.") if not isinstance(x, np.ndarray): raise TypeError("non-numpy-ndarray passed to CreateNumpyVector") if x.dtype.kind not in ['b', 'i', 'u', 'f']: raise TypeError("numpy-ndarray holds elements of unsupported datatype") if x.ndim > 1: raise TypeError("multidimensional-ndarray passed to CreateNumpyVector") self.StartVector(x.itemsize, x.size, x.dtype.alignment) # Ensure little endian byte ordering if x.dtype.str[0] == "<": x_lend = x else: x_lend = x.byteswap(inplace=False) # Calculate total length l = UOffsetTFlags.py_type(x_lend.itemsize * x_lend.size) ## @cond FLATBUFFERS_INTERNAL self.head = UOffsetTFlags.py_type(self.Head() - l) ## @endcond # tobytes ensures c_contiguous ordering self.Bytes[self.Head():self.Head()+l] = x_lend.tobytes(order='C') self.vectorNumElems = x.size return self.EndVector() ## @cond FLATBUFFERS_INTERNAL def assertNested(self): """ Check that we are in the process of building an object. """ if not self.nested: raise IsNotNestedError() def assertNotNested(self): """ Check that no other objects are being built while making this object. If not, raise an exception. """ if self.nested: raise IsNestedError() def assertStructIsInline(self, obj): """ Structs are always stored inline, so need to be created right where they are used. You'll get this error if you created it elsewhere. """ N.enforce_number(obj, N.UOffsetTFlags) if obj != self.Offset(): msg = ("flatbuffers: Tried to write a Struct at an Offset that " "is different from the current Offset of the Builder.") raise StructIsNotInlineError(msg) def Slot(self, slotnum): """ Slot sets the vtable key `voffset` to the current location in the buffer. """ self.assertNested() self.current_vtable[slotnum] = self.Offset() ## @endcond def __Finish(self, rootTable, sizePrefix, file_identifier=None): """Finish finalizes a buffer, pointing to the given `rootTable`.""" N.enforce_number(rootTable, N.UOffsetTFlags) prepSize = N.UOffsetTFlags.bytewidth if file_identifier is not None: prepSize += N.Int32Flags.bytewidth if sizePrefix: prepSize += N.Int32Flags.bytewidth self.Prep(self.minalign, prepSize) if file_identifier is not None: self.Prep(N.UOffsetTFlags.bytewidth, encode.FILE_IDENTIFIER_LENGTH) # Convert bytes object file_identifier to an array of 4 8-bit integers, # and use big-endian to enforce size compliance. # https://docs.python.org/2/library/struct.html#format-characters file_identifier = N.struct.unpack(">BBBB", file_identifier) for i in range(encode.FILE_IDENTIFIER_LENGTH-1, -1, -1): # Place the bytes of the file_identifer in reverse order: self.Place(file_identifier[i], N.Uint8Flags) self.PrependUOffsetTRelative(rootTable) if sizePrefix: size = len(self.Bytes) - self.Head() N.enforce_number(size, N.Int32Flags) self.PrependInt32(size) self.finished = True return self.Head() def Finish(self, rootTable, file_identifier=None): """Finish finalizes a buffer, pointing to the given `rootTable`.""" return self.__Finish(rootTable, False, file_identifier=file_identifier) def FinishSizePrefixed(self, rootTable, file_identifier=None): """ Finish finalizes a buffer, pointing to the given `rootTable`, with the size prefixed. """ return self.__Finish(rootTable, True, file_identifier=file_identifier) ## @cond FLATBUFFERS_INTERNAL def Prepend(self, flags, off): self.Prep(flags.bytewidth, 0) self.Place(off, flags) def PrependSlot(self, flags, o, x, d): N.enforce_number(x, flags) N.enforce_number(d, flags) if x != d or self.forceDefaults: self.Prepend(flags, x) self.Slot(o) def PrependBoolSlot(self, *args): self.PrependSlot(N.BoolFlags, *args) def PrependByteSlot(self, *args): self.PrependSlot(N.Uint8Flags, *args) def PrependUint8Slot(self, *args): self.PrependSlot(N.Uint8Flags, *args) def PrependUint16Slot(self, *args): self.PrependSlot(N.Uint16Flags, *args) def PrependUint32Slot(self, *args): self.PrependSlot(N.Uint32Flags, *args) def PrependUint64Slot(self, *args): self.PrependSlot(N.Uint64Flags, *args) def PrependInt8Slot(self, *args): self.PrependSlot(N.Int8Flags, *args) def PrependInt16Slot(self, *args): self.PrependSlot(N.Int16Flags, *args) def PrependInt32Slot(self, *args): self.PrependSlot(N.Int32Flags, *args) def PrependInt64Slot(self, *args): self.PrependSlot(N.Int64Flags, *args) def PrependFloat32Slot(self, *args): self.PrependSlot(N.Float32Flags, *args) def PrependFloat64Slot(self, *args): self.PrependSlot(N.Float64Flags, *args) def PrependUOffsetTRelativeSlot(self, o, x, d): """ PrependUOffsetTRelativeSlot prepends an UOffsetT onto the object at vtable slot `o`. If value `x` equals default `d`, then the slot will be set to zero and no other data will be written. """ if x != d or self.forceDefaults: self.PrependUOffsetTRelative(x) self.Slot(o) def PrependStructSlot(self, v, x, d): """ PrependStructSlot prepends a struct onto the object at vtable slot `o`. Structs are stored inline, so nothing additional is being added. In generated code, `d` is always 0. """ N.enforce_number(d, N.UOffsetTFlags) if x != d: self.assertStructIsInline(x) self.Slot(v) ## @endcond def PrependBool(self, x): """Prepend a `bool` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.BoolFlags, x) def PrependByte(self, x): """Prepend a `byte` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Uint8Flags, x) def PrependUint8(self, x): """Prepend an `uint8` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Uint8Flags, x) def PrependUint16(self, x): """Prepend an `uint16` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Uint16Flags, x) def PrependUint32(self, x): """Prepend an `uint32` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Uint32Flags, x) def PrependUint64(self, x): """Prepend an `uint64` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Uint64Flags, x) def PrependInt8(self, x): """Prepend an `int8` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Int8Flags, x) def PrependInt16(self, x): """Prepend an `int16` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Int16Flags, x) def PrependInt32(self, x): """Prepend an `int32` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Int32Flags, x) def PrependInt64(self, x): """Prepend an `int64` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Int64Flags, x) def PrependFloat32(self, x): """Prepend a `float32` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Float32Flags, x) def PrependFloat64(self, x): """Prepend a `float64` to the Builder buffer. Note: aligns and checks for space. """ self.Prepend(N.Float64Flags, x) def ForceDefaults(self, forceDefaults): """ In order to save space, fields that are set to their default value don't get serialized into the buffer. Forcing defaults provides a way to manually disable this optimization. When set to `True`, will always serialize default values. """ self.forceDefaults = forceDefaults ############################################################## ## @cond FLATBUFFERS_INTERNAL def PrependVOffsetT(self, x): self.Prepend(N.VOffsetTFlags, x) def Place(self, x, flags): """ Place prepends a value specified by `flags` to the Builder, without checking for available space. """ N.enforce_number(x, flags) self.head = self.head - flags.bytewidth encode.Write(flags.packer_type, self.Bytes, self.Head(), x) def PlaceVOffsetT(self, x): """PlaceVOffsetT prepends a VOffsetT to the Builder, without checking for space. """ N.enforce_number(x, N.VOffsetTFlags) self.head = self.head - N.VOffsetTFlags.bytewidth encode.Write(packer.voffset, self.Bytes, self.Head(), x) def PlaceSOffsetT(self, x): """PlaceSOffsetT prepends a SOffsetT to the Builder, without checking for space. """ N.enforce_number(x, N.SOffsetTFlags) self.head = self.head - N.SOffsetTFlags.bytewidth encode.Write(packer.soffset, self.Bytes, self.Head(), x) def PlaceUOffsetT(self, x): """PlaceUOffsetT prepends a UOffsetT to the Builder, without checking for space. """ N.enforce_number(x, N.UOffsetTFlags) self.head = self.head - N.UOffsetTFlags.bytewidth encode.Write(packer.uoffset, self.Bytes, self.Head(), x) ## @endcond ## @cond FLATBUFFERS_INTERNAL def vtableEqual(a, objectStart, b): """vtableEqual compares an unwritten vtable to a written vtable.""" N.enforce_number(objectStart, N.UOffsetTFlags) if len(a) * N.VOffsetTFlags.bytewidth != len(b): return False for i, elem in enumerate(a): x = encode.Get(packer.voffset, b, i * N.VOffsetTFlags.bytewidth) # Skip vtable entries that indicate a default value. if x == 0 and elem == 0: pass else: y = objectStart - elem if x != y: return False return True ## @endcond ## @} zlmdb-22.6.1/flatbuffers/compat.py000066400000000000000000000047271426100523600170630ustar00rootroot00000000000000# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ A tiny version of `six` to help with backwards compability. Also includes compatibility helpers for numpy. """ import sys PY2 = sys.version_info[0] == 2 PY26 = sys.version_info[0:2] == (2, 6) PY27 = sys.version_info[0:2] == (2, 7) PY275 = sys.version_info[0:3] >= (2, 7, 5) PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: import importlib.machinery string_types = (str,) binary_types = (bytes,bytearray) range_func = range memoryview_type = memoryview struct_bool_decl = "?" else: import imp string_types = (unicode,) if PY26 or PY27: binary_types = (str,bytearray) else: binary_types = (str,) range_func = xrange if PY26 or (PY27 and not PY275): memoryview_type = buffer struct_bool_decl = "= 0 if value < (1 << 8): return BitWidth.W8 elif value < (1 << 16): return BitWidth.W16 elif value < (1 << 32): return BitWidth.W32 elif value < (1 << 64): return BitWidth.W64 else: raise ValueError('value is too big to encode: %s' % value) @staticmethod def I(value): """Returns the minimum `BitWidth` to encode signed integer value.""" # -2^(n-1) <= value < 2^(n-1) # -2^n <= 2 * value < 2^n # 2 * value < 2^n, when value >= 0 or 2 * (-value) <= 2^n, when value < 0 # 2 * value < 2^n, when value >= 0 or 2 * (-value) - 1 < 2^n, when value < 0 # # if value >= 0: # return BitWidth.U(2 * value) # else: # return BitWidth.U(2 * (-value) - 1) # ~x = -x - 1 value *= 2 return BitWidth.U(value if value >= 0 else ~value) @staticmethod def F(value): """Returns the `BitWidth` to encode floating point value.""" if struct.unpack('f', struct.pack('f', value))[0] == value: return BitWidth.W32 return BitWidth.W64 @staticmethod def B(byte_width): return { 1: BitWidth.W8, 2: BitWidth.W16, 4: BitWidth.W32, 8: BitWidth.W64 }[byte_width] I = {1: 'b', 2: 'h', 4: 'i', 8: 'q'} # Integer formats U = {1: 'B', 2: 'H', 4: 'I', 8: 'Q'} # Unsigned integer formats F = {4: 'f', 8: 'd'} # Floating point formats def _Unpack(fmt, buf): return struct.unpack(fmt[len(buf)], buf)[0] def _UnpackVector(fmt, buf, length): byte_width = len(buf) // length return struct.unpack('%d%s' % (length, fmt[byte_width]), buf) def _Pack(fmt, value, byte_width): return struct.pack(fmt[byte_width], value) def _PackVector(fmt, values, byte_width): return struct.pack('%d%s' % (len(values), fmt[byte_width]), *values) def _Mutate(fmt, buf, value, byte_width, value_bit_width): if (1 << value_bit_width) <= byte_width: buf[:byte_width] = _Pack(fmt, value, byte_width) return True return False # Computes how many bytes you'd have to pad to be able to write an # "scalar_size" scalar if the buffer had grown to "buf_size", # "scalar_size" is a power of two. def _PaddingBytes(buf_size, scalar_size): # ((buf_size + (scalar_size - 1)) // scalar_size) * scalar_size - buf_size return -buf_size & (scalar_size - 1) def _ShiftSlice(s, offset, length): start = offset + (0 if s.start is None else s.start) stop = offset + (length if s.stop is None else s.stop) return slice(start, stop, s.step) # https://en.cppreference.com/w/cpp/algorithm/lower_bound def _LowerBound(values, value, pred): """Implementation of C++ std::lower_bound() algorithm.""" first, last = 0, len(values) count = last - first while count > 0: i = first step = count // 2 i += step if pred(values[i], value): i += 1 first = i count -= step + 1 else: count = step return first # https://en.cppreference.com/w/cpp/algorithm/binary_search def _BinarySearch(values, value, pred=lambda x, y: x < y): """Implementation of C++ std::binary_search() algorithm.""" index = _LowerBound(values, value, pred) if index != len(values) and not pred(value, values[index]): return index return -1 class Type(enum.IntEnum): """Supported types of encoded data. These are used as the upper 6 bits of a type field to indicate the actual type. """ NULL = 0 INT = 1 UINT = 2 FLOAT = 3 # Types above stored inline, types below store an offset. KEY = 4 STRING = 5 INDIRECT_INT = 6 INDIRECT_UINT = 7 INDIRECT_FLOAT = 8 MAP = 9 VECTOR = 10 # Untyped. VECTOR_INT = 11 # Typed any size (stores no type table). VECTOR_UINT = 12 VECTOR_FLOAT = 13 VECTOR_KEY = 14 # DEPRECATED, use VECTOR or VECTOR_KEY instead. # Read test.cpp/FlexBuffersDeprecatedTest() for details on why. VECTOR_STRING_DEPRECATED = 15 VECTOR_INT2 = 16 # Typed tuple (no type table, no size field). VECTOR_UINT2 = 17 VECTOR_FLOAT2 = 18 VECTOR_INT3 = 19 # Typed triple (no type table, no size field). VECTOR_UINT3 = 20 VECTOR_FLOAT3 = 21 VECTOR_INT4 = 22 # Typed quad (no type table, no size field). VECTOR_UINT4 = 23 VECTOR_FLOAT4 = 24 BLOB = 25 BOOL = 26 VECTOR_BOOL = 36 # To do the same type of conversion of type to vector type @staticmethod def Pack(type_, bit_width): return (int(type_) << 2) | bit_width @staticmethod def Unpack(packed_type): return 1 << (packed_type & 0b11), Type(packed_type >> 2) @staticmethod def IsInline(type_): return type_ <= Type.FLOAT or type_ == Type.BOOL @staticmethod def IsTypedVector(type_): return Type.VECTOR_INT <= type_ <= Type.VECTOR_STRING_DEPRECATED or \ type_ == Type.VECTOR_BOOL @staticmethod def IsTypedVectorElementType(type_): return Type.INT <= type_ <= Type.STRING or type_ == Type.BOOL @staticmethod def ToTypedVectorElementType(type_): if not Type.IsTypedVector(type_): raise ValueError('must be typed vector type') return Type(type_ - Type.VECTOR_INT + Type.INT) @staticmethod def IsFixedTypedVector(type_): return Type.VECTOR_INT2 <= type_ <= Type.VECTOR_FLOAT4 @staticmethod def IsFixedTypedVectorElementType(type_): return Type.INT <= type_ <= Type.FLOAT @staticmethod def ToFixedTypedVectorElementType(type_): if not Type.IsFixedTypedVector(type_): raise ValueError('must be fixed typed vector type') # 3 types each, starting from length 2. fixed_type = type_ - Type.VECTOR_INT2 return Type(fixed_type % 3 + Type.INT), fixed_type // 3 + 2 @staticmethod def ToTypedVector(element_type, fixed_len=0): """Converts element type to corresponding vector type. Args: element_type: vector element type fixed_len: number of elements: 0 for typed vector; 2, 3, or 4 for fixed typed vector. Returns: Typed vector type or fixed typed vector type. """ if fixed_len == 0: if not Type.IsTypedVectorElementType(element_type): raise ValueError('must be typed vector element type') else: if not Type.IsFixedTypedVectorElementType(element_type): raise ValueError('must be fixed typed vector element type') offset = element_type - Type.INT if fixed_len == 0: return Type(offset + Type.VECTOR_INT) # TypedVector elif fixed_len == 2: return Type(offset + Type.VECTOR_INT2) # FixedTypedVector elif fixed_len == 3: return Type(offset + Type.VECTOR_INT3) # FixedTypedVector elif fixed_len == 4: return Type(offset + Type.VECTOR_INT4) # FixedTypedVector else: raise ValueError('unsupported fixed_len: %s' % fixed_len) class Buf: """Class to access underlying buffer object starting from the given offset.""" def __init__(self, buf, offset): self._buf = buf self._offset = offset if offset >= 0 else len(buf) + offset self._length = len(buf) - self._offset def __getitem__(self, key): if isinstance(key, slice): return self._buf[_ShiftSlice(key, self._offset, self._length)] elif isinstance(key, int): return self._buf[self._offset + key] else: raise TypeError('invalid key type') def __setitem__(self, key, value): if isinstance(key, slice): self._buf[_ShiftSlice(key, self._offset, self._length)] = value elif isinstance(key, int): self._buf[self._offset + key] = key else: raise TypeError('invalid key type') def __repr__(self): return 'buf[%d:]' % self._offset def Find(self, sub): """Returns the lowest index where the sub subsequence is found.""" return self._buf[self._offset:].find(sub) def Slice(self, offset): """Returns new `Buf` which starts from the given offset.""" return Buf(self._buf, self._offset + offset) def Indirect(self, offset, byte_width): """Return new `Buf` based on the encoded offset (indirect encoding).""" return self.Slice(offset - _Unpack(U, self[offset:offset + byte_width])) class Object: """Base class for all non-trivial data accessors.""" __slots__ = '_buf', '_byte_width' def __init__(self, buf, byte_width): self._buf = buf self._byte_width = byte_width @property def ByteWidth(self): return self._byte_width class Sized(Object): """Base class for all data accessors which need to read encoded size.""" __slots__ = '_size', def __init__(self, buf, byte_width, size=0): super().__init__(buf, byte_width) if size == 0: self._size = _Unpack(U, self.SizeBytes) else: self._size = size @property def SizeBytes(self): return self._buf[-self._byte_width:0] def __len__(self): return self._size class Blob(Sized): """Data accessor for the encoded blob bytes.""" __slots__ = () @property def Bytes(self): return self._buf[0:len(self)] def __repr__(self): return 'Blob(%s, size=%d)' % (self._buf, len(self)) class String(Sized): """Data accessor for the encoded string bytes.""" __slots__ = () @property def Bytes(self): return self._buf[0:len(self)] def Mutate(self, value): """Mutates underlying string bytes in place. Args: value: New string to replace the existing one. New string must have less or equal UTF-8-encoded bytes than the existing one to successfully mutate underlying byte buffer. Returns: Whether the value was mutated or not. """ encoded = value.encode('utf-8') n = len(encoded) if n <= len(self): self._buf[-self._byte_width:0] = _Pack(U, n, self._byte_width) self._buf[0:n] = encoded self._buf[n:len(self)] = bytearray(len(self) - n) return True return False def __str__(self): return self.Bytes.decode('utf-8') def __repr__(self): return 'String(%s, size=%d)' % (self._buf, len(self)) class Key(Object): """Data accessor for the encoded key bytes.""" __slots__ = () def __init__(self, buf, byte_width): assert byte_width == 1 super().__init__(buf, byte_width) @property def Bytes(self): return self._buf[0:len(self)] def __len__(self): return self._buf.Find(0) def __str__(self): return self.Bytes.decode('ascii') def __repr__(self): return 'Key(%s, size=%d)' % (self._buf, len(self)) class Vector(Sized): """Data accessor for the encoded vector bytes.""" __slots__ = () def __getitem__(self, index): if index < 0 or index >= len(self): raise IndexError('vector index %s is out of [0, %d) range' % \ (index, len(self))) packed_type = self._buf[len(self) * self._byte_width + index] buf = self._buf.Slice(index * self._byte_width) return Ref.PackedType(buf, self._byte_width, packed_type) @property def Value(self): """Returns the underlying encoded data as a list object.""" return [e.Value for e in self] def __repr__(self): return 'Vector(%s, byte_width=%d, size=%d)' % \ (self._buf, self._byte_width, self._size) class TypedVector(Sized): """Data accessor for the encoded typed vector or fixed typed vector bytes.""" __slots__ = '_element_type', '_size' def __init__(self, buf, byte_width, element_type, size=0): super().__init__(buf, byte_width, size) if element_type == Type.STRING: # These can't be accessed as strings, since we don't know the bit-width # of the size field, see the declaration of # FBT_VECTOR_STRING_DEPRECATED above for details. # We change the type here to be keys, which are a subtype of strings, # and will ignore the size field. This will truncate strings with # embedded nulls. element_type = Type.KEY self._element_type = element_type @property def Bytes(self): return self._buf[:self._byte_width * len(self)] @property def ElementType(self): return self._element_type def __getitem__(self, index): if index < 0 or index >= len(self): raise IndexError('vector index %s is out of [0, %d) range' % \ (index, len(self))) buf = self._buf.Slice(index * self._byte_width) return Ref(buf, self._byte_width, 1, self._element_type) @property def Value(self): """Returns underlying data as list object.""" if not self: return [] if self._element_type is Type.BOOL: return [bool(e) for e in _UnpackVector(U, self.Bytes, len(self))] elif self._element_type is Type.INT: return list(_UnpackVector(I, self.Bytes, len(self))) elif self._element_type is Type.UINT: return list(_UnpackVector(U, self.Bytes, len(self))) elif self._element_type is Type.FLOAT: return list(_UnpackVector(F, self.Bytes, len(self))) elif self._element_type is Type.KEY: return [e.AsKey for e in self] elif self._element_type is Type.STRING: return [e.AsString for e in self] else: raise TypeError('unsupported element_type: %s' % self._element_type) def __repr__(self): return 'TypedVector(%s, byte_width=%d, element_type=%s, size=%d)' % \ (self._buf, self._byte_width, self._element_type, self._size) class Map(Vector): """Data accessor for the encoded map bytes.""" @staticmethod def CompareKeys(a, b): if isinstance(a, Ref): a = a.AsKeyBytes if isinstance(b, Ref): b = b.AsKeyBytes return a < b def __getitem__(self, key): if isinstance(key, int): return super().__getitem__(key) index = _BinarySearch(self.Keys, key.encode('ascii'), self.CompareKeys) if index != -1: return super().__getitem__(index) raise KeyError(key) @property def Keys(self): byte_width = _Unpack(U, self._buf[-2 * self._byte_width:-self._byte_width]) buf = self._buf.Indirect(-3 * self._byte_width, self._byte_width) return TypedVector(buf, byte_width, Type.KEY) @property def Values(self): return Vector(self._buf, self._byte_width) @property def Value(self): return {k.Value: v.Value for k, v in zip(self.Keys, self.Values)} def __repr__(self): return 'Map(%s, size=%d)' % (self._buf, len(self)) class Ref: """Data accessor for the encoded data bytes.""" __slots__ = '_buf', '_parent_width', '_byte_width', '_type' @staticmethod def PackedType(buf, parent_width, packed_type): byte_width, type_ = Type.Unpack(packed_type) return Ref(buf, parent_width, byte_width, type_) def __init__(self, buf, parent_width, byte_width, type_): self._buf = buf self._parent_width = parent_width self._byte_width = byte_width self._type = type_ def __repr__(self): return 'Ref(%s, parent_width=%d, byte_width=%d, type_=%s)' % \ (self._buf, self._parent_width, self._byte_width, self._type) @property def _Bytes(self): return self._buf[:self._parent_width] def _ConvertError(self, target_type): raise TypeError('cannot convert %s to %s' % (self._type, target_type)) def _Indirect(self): return self._buf.Indirect(0, self._parent_width) @property def IsNull(self): return self._type is Type.NULL @property def IsBool(self): return self._type is Type.BOOL @property def AsBool(self): if self._type is Type.BOOL: return bool(_Unpack(U, self._Bytes)) else: return self.AsInt != 0 def MutateBool(self, value): """Mutates underlying boolean value bytes in place. Args: value: New boolean value. Returns: Whether the value was mutated or not. """ return self.IsBool and \ _Mutate(U, self._buf, value, self._parent_width, BitWidth.W8) @property def IsNumeric(self): return self.IsInt or self.IsFloat @property def IsInt(self): return self._type in (Type.INT, Type.INDIRECT_INT, Type.UINT, Type.INDIRECT_UINT) @property def AsInt(self): """Returns current reference as integer value.""" if self.IsNull: return 0 elif self.IsBool: return int(self.AsBool) elif self._type is Type.INT: return _Unpack(I, self._Bytes) elif self._type is Type.INDIRECT_INT: return _Unpack(I, self._Indirect()[:self._byte_width]) if self._type is Type.UINT: return _Unpack(U, self._Bytes) elif self._type is Type.INDIRECT_UINT: return _Unpack(U, self._Indirect()[:self._byte_width]) elif self.IsString: return len(self.AsString) elif self.IsKey: return len(self.AsKey) elif self.IsBlob: return len(self.AsBlob) elif self.IsVector: return len(self.AsVector) elif self.IsTypedVector: return len(self.AsTypedVector) elif self.IsFixedTypedVector: return len(self.AsFixedTypedVector) else: raise self._ConvertError(Type.INT) def MutateInt(self, value): """Mutates underlying integer value bytes in place. Args: value: New integer value. It must fit to the byte size of the existing encoded value. Returns: Whether the value was mutated or not. """ if self._type is Type.INT: return _Mutate(I, self._buf, value, self._parent_width, BitWidth.I(value)) elif self._type is Type.INDIRECT_INT: return _Mutate(I, self._Indirect(), value, self._byte_width, BitWidth.I(value)) elif self._type is Type.UINT: return _Mutate(U, self._buf, value, self._parent_width, BitWidth.U(value)) elif self._type is Type.INDIRECT_UINT: return _Mutate(U, self._Indirect(), value, self._byte_width, BitWidth.U(value)) else: return False @property def IsFloat(self): return self._type in (Type.FLOAT, Type.INDIRECT_FLOAT) @property def AsFloat(self): """Returns current reference as floating point value.""" if self.IsNull: return 0.0 elif self.IsBool: return float(self.AsBool) elif self.IsInt: return float(self.AsInt) elif self._type is Type.FLOAT: return _Unpack(F, self._Bytes) elif self._type is Type.INDIRECT_FLOAT: return _Unpack(F, self._Indirect()[:self._byte_width]) elif self.IsString: return float(self.AsString) elif self.IsVector: return float(len(self.AsVector)) elif self.IsTypedVector(): return float(len(self.AsTypedVector)) elif self.IsFixedTypedVector(): return float(len(self.FixedTypedVector)) else: raise self._ConvertError(Type.FLOAT) def MutateFloat(self, value): """Mutates underlying floating point value bytes in place. Args: value: New float value. It must fit to the byte size of the existing encoded value. Returns: Whether the value was mutated or not. """ if self._type is Type.FLOAT: return _Mutate(F, self._buf, value, self._parent_width, BitWidth.B(self._parent_width)) elif self._type is Type.INDIRECT_FLOAT: return _Mutate(F, self._Indirect(), value, self._byte_width, BitWidth.B(self._byte_width)) else: return False @property def IsKey(self): return self._type is Type.KEY @property def AsKeyBytes(self): if self.IsKey: return Key(self._Indirect(), self._byte_width).Bytes else: raise self._ConvertError(Type.KEY) @property def AsKey(self): if self.IsKey: return str(Key(self._Indirect(), self._byte_width)) else: raise self._ConvertError(Type.KEY) @property def IsString(self): return self._type is Type.STRING @property def AsString(self): if self.IsString: return str(String(self._Indirect(), self._byte_width)) elif self.IsKey: return self.AsKey else: raise self._ConvertError(Type.STRING) def MutateString(self, value): return String(self._Indirect(), self._byte_width).Mutate(value) @property def IsBlob(self): return self._type is Type.BLOB @property def AsBlob(self): if self.IsBlob: return Blob(self._Indirect(), self._byte_width).Bytes else: raise self._ConvertError(Type.BLOB) @property def IsAnyVector(self): return self.IsVector or self.IsTypedVector or self.IsFixedTypedVector() @property def IsVector(self): return self._type in (Type.VECTOR, Type.MAP) @property def AsVector(self): if self.IsVector: return Vector(self._Indirect(), self._byte_width) else: raise self._ConvertError(Type.VECTOR) @property def IsTypedVector(self): return Type.IsTypedVector(self._type) @property def AsTypedVector(self): if self.IsTypedVector: return TypedVector(self._Indirect(), self._byte_width, Type.ToTypedVectorElementType(self._type)) else: raise self._ConvertError('TYPED_VECTOR') @property def IsFixedTypedVector(self): return Type.IsFixedTypedVector(self._type) @property def AsFixedTypedVector(self): if self.IsFixedTypedVector: element_type, size = Type.ToFixedTypedVectorElementType(self._type) return TypedVector(self._Indirect(), self._byte_width, element_type, size) else: raise self._ConvertError('FIXED_TYPED_VECTOR') @property def IsMap(self): return self._type is Type.MAP @property def AsMap(self): if self.IsMap: return Map(self._Indirect(), self._byte_width) else: raise self._ConvertError(Type.MAP) @property def Value(self): """Converts current reference to value of corresponding type. This is equivalent to calling `AsInt` for integer values, `AsFloat` for floating point values, etc. Returns: Value of corresponding type. """ if self.IsNull: return None elif self.IsBool: return self.AsBool elif self.IsInt: return self.AsInt elif self.IsFloat: return self.AsFloat elif self.IsString: return self.AsString elif self.IsKey: return self.AsKey elif self.IsBlob: return self.AsBlob elif self.IsMap: return self.AsMap.Value elif self.IsVector: return self.AsVector.Value elif self.IsTypedVector: return self.AsTypedVector.Value elif self.IsFixedTypedVector: return self.AsFixedTypedVector.Value else: raise TypeError('cannot convert %r to value' % self) def _IsIterable(obj): try: iter(obj) return True except TypeError: return False class Value: """Class to represent given value during the encoding process.""" @staticmethod def Null(): return Value(0, Type.NULL, BitWidth.W8) @staticmethod def Bool(value): return Value(value, Type.BOOL, BitWidth.W8) @staticmethod def Int(value, bit_width): return Value(value, Type.INT, bit_width) @staticmethod def UInt(value, bit_width): return Value(value, Type.UINT, bit_width) @staticmethod def Float(value, bit_width): return Value(value, Type.FLOAT, bit_width) @staticmethod def Key(offset): return Value(offset, Type.KEY, BitWidth.W8) def __init__(self, value, type_, min_bit_width): self._value = value self._type = type_ # For scalars: of itself, for vector: of its elements, for string: length. self._min_bit_width = min_bit_width @property def Value(self): return self._value @property def Type(self): return self._type @property def MinBitWidth(self): return self._min_bit_width def StoredPackedType(self, parent_bit_width=BitWidth.W8): return Type.Pack(self._type, self.StoredWidth(parent_bit_width)) # We have an absolute offset, but want to store a relative offset # elem_index elements beyond the current buffer end. Since whether # the relative offset fits in a certain byte_width depends on # the size of the elements before it (and their alignment), we have # to test for each size in turn. def ElemWidth(self, buf_size, elem_index=0): if Type.IsInline(self._type): return self._min_bit_width for byte_width in 1, 2, 4, 8: offset_loc = buf_size + _PaddingBytes(buf_size, byte_width) + \ elem_index * byte_width bit_width = BitWidth.U(offset_loc - self._value) if byte_width == (1 << bit_width): return bit_width raise ValueError('relative offset is too big') def StoredWidth(self, parent_bit_width=BitWidth.W8): if Type.IsInline(self._type): return max(self._min_bit_width, parent_bit_width) return self._min_bit_width def __repr__(self): return 'Value(%s, %s, %s)' % (self._value, self._type, self._min_bit_width) def __str__(self): return str(self._value) def InMap(func): def wrapper(self, *args, **kwargs): if isinstance(args[0], str): self.Key(args[0]) func(self, *args[1:], **kwargs) else: func(self, *args, **kwargs) return wrapper def InMapForString(func): def wrapper(self, *args): if len(args) == 1: func(self, args[0]) elif len(args) == 2: self.Key(args[0]) func(self, args[1]) else: raise ValueError('invalid number of arguments') return wrapper class Pool: """Collection of (data, offset) pairs sorted by data for quick access.""" def __init__(self): self._pool = [] # sorted list of (data, offset) tuples def FindOrInsert(self, data, offset): do = data, offset index = _BinarySearch(self._pool, do, lambda a, b: a[0] < b[0]) if index != -1: _, offset = self._pool[index] return offset self._pool.insert(index, do) return None def Clear(self): self._pool = [] @property def Elements(self): return [data for data, _ in self._pool] class Builder: """Helper class to encode structural data into flexbuffers format.""" def __init__(self, share_strings=False, share_keys=True, force_min_bit_width=BitWidth.W8): self._share_strings = share_strings self._share_keys = share_keys self._force_min_bit_width = force_min_bit_width self._string_pool = Pool() self._key_pool = Pool() self._finished = False self._buf = bytearray() self._stack = [] def __len__(self): return len(self._buf) @property def StringPool(self): return self._string_pool @property def KeyPool(self): return self._key_pool def Clear(self): self._string_pool.Clear() self._key_pool.Clear() self._finished = False self._buf = bytearray() self._stack = [] def Finish(self): """Finishes encoding process and returns underlying buffer.""" if self._finished: raise RuntimeError('builder has been already finished') # If you hit this exception, you likely have objects that were never # included in a parent. You need to have exactly one root to finish a # buffer. Check your Start/End calls are matched, and all objects are inside # some other object. if len(self._stack) != 1: raise RuntimeError('internal stack size must be one') value = self._stack[0] byte_width = self._Align(value.ElemWidth(len(self._buf))) self._WriteAny(value, byte_width=byte_width) # Root value self._Write(U, value.StoredPackedType(), byte_width=1) # Root type self._Write(U, byte_width, byte_width=1) # Root size self.finished = True return self._buf def _ReadKey(self, offset): key = self._buf[offset:] return key[:key.find(0)] def _Align(self, alignment): byte_width = 1 << alignment self._buf.extend(b'\x00' * _PaddingBytes(len(self._buf), byte_width)) return byte_width def _Write(self, fmt, value, byte_width): self._buf.extend(_Pack(fmt, value, byte_width)) def _WriteVector(self, fmt, values, byte_width): self._buf.extend(_PackVector(fmt, values, byte_width)) def _WriteOffset(self, offset, byte_width): relative_offset = len(self._buf) - offset assert byte_width == 8 or relative_offset < (1 << (8 * byte_width)) self._Write(U, relative_offset, byte_width) def _WriteAny(self, value, byte_width): fmt = { Type.NULL: U, Type.BOOL: U, Type.INT: I, Type.UINT: U, Type.FLOAT: F }.get(value.Type) if fmt: self._Write(fmt, value.Value, byte_width) else: self._WriteOffset(value.Value, byte_width) def _WriteBlob(self, data, append_zero, type_): bit_width = BitWidth.U(len(data)) byte_width = self._Align(bit_width) self._Write(U, len(data), byte_width) loc = len(self._buf) self._buf.extend(data) if append_zero: self._buf.append(0) self._stack.append(Value(loc, type_, bit_width)) return loc def _WriteScalarVector(self, element_type, byte_width, elements, fixed): """Writes scalar vector elements to the underlying buffer.""" bit_width = BitWidth.B(byte_width) # If you get this exception, you're trying to write a vector with a size # field that is bigger than the scalars you're trying to write (e.g. a # byte vector > 255 elements). For such types, write a "blob" instead. if BitWidth.U(len(elements)) > bit_width: raise ValueError('too many elements for the given byte_width') self._Align(bit_width) if not fixed: self._Write(U, len(elements), byte_width) loc = len(self._buf) fmt = {Type.INT: I, Type.UINT: U, Type.FLOAT: F}.get(element_type) if not fmt: raise TypeError('unsupported element_type') self._WriteVector(fmt, elements, byte_width) type_ = Type.ToTypedVector(element_type, len(elements) if fixed else 0) self._stack.append(Value(loc, type_, bit_width)) return loc def _CreateVector(self, elements, typed, fixed, keys=None): """Writes vector elements to the underlying buffer.""" length = len(elements) if fixed and not typed: raise ValueError('fixed vector must be typed') # Figure out smallest bit width we can store this vector with. bit_width = max(self._force_min_bit_width, BitWidth.U(length)) prefix_elems = 1 # Vector size if keys: bit_width = max(bit_width, keys.ElemWidth(len(self._buf))) prefix_elems += 2 # Offset to the keys vector and its byte width. vector_type = Type.KEY # Check bit widths and types for all elements. for i, e in enumerate(elements): bit_width = max(bit_width, e.ElemWidth(len(self._buf), prefix_elems + i)) if typed: if i == 0: vector_type = e.Type else: if vector_type != e.Type: raise RuntimeError('typed vector elements must be of the same type') if fixed and not Type.IsFixedTypedVectorElementType(vector_type): raise RuntimeError('must be fixed typed vector element type') byte_width = self._Align(bit_width) # Write vector. First the keys width/offset if available, and size. if keys: self._WriteOffset(keys.Value, byte_width) self._Write(U, 1 << keys.MinBitWidth, byte_width) if not fixed: self._Write(U, length, byte_width) # Then the actual data. loc = len(self._buf) for e in elements: self._WriteAny(e, byte_width) # Then the types. if not typed: for e in elements: self._buf.append(e.StoredPackedType(bit_width)) if keys: type_ = Type.MAP else: if typed: type_ = Type.ToTypedVector(vector_type, length if fixed else 0) else: type_ = Type.VECTOR return Value(loc, type_, bit_width) def _PushIndirect(self, value, type_, bit_width): byte_width = self._Align(bit_width) loc = len(self._buf) fmt = { Type.INDIRECT_INT: I, Type.INDIRECT_UINT: U, Type.INDIRECT_FLOAT: F }[type_] self._Write(fmt, value, byte_width) self._stack.append(Value(loc, type_, bit_width)) @InMapForString def String(self, value): """Encodes string value.""" reset_to = len(self._buf) encoded = value.encode('utf-8') loc = self._WriteBlob(encoded, append_zero=True, type_=Type.STRING) if self._share_strings: prev_loc = self._string_pool.FindOrInsert(encoded, loc) if prev_loc is not None: del self._buf[reset_to:] self._stack[-1]._value = loc = prev_loc # pylint: disable=protected-access return loc @InMap def Blob(self, value): """Encodes binary blob value. Args: value: A byte/bytearray value to encode Returns: Offset of the encoded value in underlying the byte buffer. """ return self._WriteBlob(value, append_zero=False, type_=Type.BLOB) def Key(self, value): """Encodes key value. Args: value: A byte/bytearray/str value to encode. Byte object must not contain zero bytes. String object must be convertible to ASCII. Returns: Offset of the encoded value in the underlying byte buffer. """ if isinstance(value, (bytes, bytearray)): encoded = value else: encoded = value.encode('ascii') if 0 in encoded: raise ValueError('key contains zero byte') loc = len(self._buf) self._buf.extend(encoded) self._buf.append(0) if self._share_keys: prev_loc = self._key_pool.FindOrInsert(encoded, loc) if prev_loc is not None: del self._buf[loc:] loc = prev_loc self._stack.append(Value.Key(loc)) return loc def Null(self, key=None): """Encodes None value.""" if key: self.Key(key) self._stack.append(Value.Null()) @InMap def Bool(self, value): """Encodes boolean value. Args: value: A boolean value. """ self._stack.append(Value.Bool(value)) @InMap def Int(self, value, byte_width=0): """Encodes signed integer value. Args: value: A signed integer value. byte_width: Number of bytes to use: 1, 2, 4, or 8. """ bit_width = BitWidth.I(value) if byte_width == 0 else BitWidth.B(byte_width) self._stack.append(Value.Int(value, bit_width)) @InMap def IndirectInt(self, value, byte_width=0): """Encodes signed integer value indirectly. Args: value: A signed integer value. byte_width: Number of bytes to use: 1, 2, 4, or 8. """ bit_width = BitWidth.I(value) if byte_width == 0 else BitWidth.B(byte_width) self._PushIndirect(value, Type.INDIRECT_INT, bit_width) @InMap def UInt(self, value, byte_width=0): """Encodes unsigned integer value. Args: value: An unsigned integer value. byte_width: Number of bytes to use: 1, 2, 4, or 8. """ bit_width = BitWidth.U(value) if byte_width == 0 else BitWidth.B(byte_width) self._stack.append(Value.UInt(value, bit_width)) @InMap def IndirectUInt(self, value, byte_width=0): """Encodes unsigned integer value indirectly. Args: value: An unsigned integer value. byte_width: Number of bytes to use: 1, 2, 4, or 8. """ bit_width = BitWidth.U(value) if byte_width == 0 else BitWidth.B(byte_width) self._PushIndirect(value, Type.INDIRECT_UINT, bit_width) @InMap def Float(self, value, byte_width=0): """Encodes floating point value. Args: value: A floating point value. byte_width: Number of bytes to use: 4 or 8. """ bit_width = BitWidth.F(value) if byte_width == 0 else BitWidth.B(byte_width) self._stack.append(Value.Float(value, bit_width)) @InMap def IndirectFloat(self, value, byte_width=0): """Encodes floating point value indirectly. Args: value: A floating point value. byte_width: Number of bytes to use: 4 or 8. """ bit_width = BitWidth.F(value) if byte_width == 0 else BitWidth.B(byte_width) self._PushIndirect(value, Type.INDIRECT_FLOAT, bit_width) def _StartVector(self): """Starts vector construction.""" return len(self._stack) def _EndVector(self, start, typed, fixed): """Finishes vector construction by encodung its elements.""" vec = self._CreateVector(self._stack[start:], typed, fixed) del self._stack[start:] self._stack.append(vec) return vec.Value @contextlib.contextmanager def Vector(self, key=None): if key: self.Key(key) try: start = self._StartVector() yield self finally: self._EndVector(start, typed=False, fixed=False) @InMap def VectorFromElements(self, elements): """Encodes sequence of any elements as a vector. Args: elements: sequence of elements, they may have different types. """ with self.Vector(): for e in elements: self.Add(e) @contextlib.contextmanager def TypedVector(self, key=None): if key: self.Key(key) try: start = self._StartVector() yield self finally: self._EndVector(start, typed=True, fixed=False) @InMap def TypedVectorFromElements(self, elements, element_type=None): """Encodes sequence of elements of the same type as typed vector. Args: elements: Sequence of elements, they must be of the same type. element_type: Suggested element type. Setting it to None means determining correct value automatically based on the given elements. """ if isinstance(elements, array.array): if elements.typecode == 'f': self._WriteScalarVector(Type.FLOAT, 4, elements, fixed=False) elif elements.typecode == 'd': self._WriteScalarVector(Type.FLOAT, 8, elements, fixed=False) elif elements.typecode in ('b', 'h', 'i', 'l', 'q'): self._WriteScalarVector( Type.INT, elements.itemsize, elements, fixed=False) elif elements.typecode in ('B', 'H', 'I', 'L', 'Q'): self._WriteScalarVector( Type.UINT, elements.itemsize, elements, fixed=False) else: raise ValueError('unsupported array typecode: %s' % elements.typecode) else: add = self.Add if element_type is None else self.Adder(element_type) with self.TypedVector(): for e in elements: add(e) @InMap def FixedTypedVectorFromElements(self, elements, element_type=None, byte_width=0): """Encodes sequence of elements of the same type as fixed typed vector. Args: elements: Sequence of elements, they must be of the same type. Allowed types are `Type.INT`, `Type.UINT`, `Type.FLOAT`. Allowed number of elements are 2, 3, or 4. element_type: Suggested element type. Setting it to None means determining correct value automatically based on the given elements. byte_width: Number of bytes to use per element. For `Type.INT` and `Type.UINT`: 1, 2, 4, or 8. For `Type.FLOAT`: 4 or 8. Setting it to 0 means determining correct value automatically based on the given elements. """ if not 2 <= len(elements) <= 4: raise ValueError('only 2, 3, or 4 elements are supported') types = {type(e) for e in elements} if len(types) != 1: raise TypeError('all elements must be of the same type') type_, = types if element_type is None: element_type = {int: Type.INT, float: Type.FLOAT}.get(type_) if not element_type: raise TypeError('unsupported element_type: %s' % type_) if byte_width == 0: width = { Type.UINT: BitWidth.U, Type.INT: BitWidth.I, Type.FLOAT: BitWidth.F }[element_type] byte_width = 1 << max(width(e) for e in elements) self._WriteScalarVector(element_type, byte_width, elements, fixed=True) def _StartMap(self): """Starts map construction.""" return len(self._stack) def _EndMap(self, start): """Finishes map construction by encodung its elements.""" # Interleaved keys and values on the stack. stack = self._stack[start:] if len(stack) % 2 != 0: raise RuntimeError('must be even number of keys and values') for key in stack[::2]: if key.Type is not Type.KEY: raise RuntimeError('all map keys must be of %s type' % Type.KEY) pairs = zip(stack[::2], stack[1::2]) # [(key, value), ...] pairs = sorted(pairs, key=lambda pair: self._ReadKey(pair[0].Value)) del self._stack[start:] for pair in pairs: self._stack.extend(pair) keys = self._CreateVector(self._stack[start::2], typed=True, fixed=False) values = self._CreateVector( self._stack[start + 1::2], typed=False, fixed=False, keys=keys) del self._stack[start:] self._stack.append(values) return values.Value @contextlib.contextmanager def Map(self, key=None): if key: self.Key(key) try: start = self._StartMap() yield self finally: self._EndMap(start) def MapFromElements(self, elements): start = self._StartMap() for k, v in elements.items(): self.Key(k) self.Add(v) self._EndMap(start) def Adder(self, type_): return { Type.BOOL: self.Bool, Type.INT: self.Int, Type.INDIRECT_INT: self.IndirectInt, Type.UINT: self.UInt, Type.INDIRECT_UINT: self.IndirectUInt, Type.FLOAT: self.Float, Type.INDIRECT_FLOAT: self.IndirectFloat, Type.KEY: self.Key, Type.BLOB: self.Blob, Type.STRING: self.String, }[type_] @InMapForString def Add(self, value): """Encodes value of any supported type.""" if value is None: self.Null() elif isinstance(value, bool): self.Bool(value) elif isinstance(value, int): self.Int(value) elif isinstance(value, float): self.Float(value) elif isinstance(value, str): self.String(value) elif isinstance(value, (bytes, bytearray)): self.Blob(value) elif isinstance(value, dict): with self.Map(): for k, v in value.items(): self.Key(k) self.Add(v) elif isinstance(value, array.array): self.TypedVectorFromElements(value) elif _IsIterable(value): self.VectorFromElements(value) else: raise TypeError('unsupported python type: %s' % type(value)) @property def LastValue(self): return self._stack[-1] @InMap def ReuseValue(self, value): self._stack.append(value) def GetRoot(buf): """Returns root `Ref` object for the given buffer.""" if len(buf) < 3: raise ValueError('buffer is too small') byte_width = buf[-1] return Ref.PackedType( Buf(buf, -(2 + byte_width)), byte_width, packed_type=buf[-2]) def Dumps(obj): """Returns bytearray with the encoded python object.""" fbb = Builder() fbb.Add(obj) return fbb.Finish() def Loads(buf): """Returns python object decoded from the buffer.""" return GetRoot(buf).Value zlmdb-22.6.1/flatbuffers/number_types.py000066400000000000000000000075631426100523600203150ustar00rootroot00000000000000# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import collections import struct from . import packer from .compat import import_numpy, NumpyRequiredForThisFeature np = import_numpy() # For reference, see: # https://docs.python.org/2/library/ctypes.html#ctypes-fundamental-data-types-2 # These classes could be collections.namedtuple instances, but those are new # in 2.6 and we want to work towards 2.5 compatability. class BoolFlags(object): bytewidth = 1 min_val = False max_val = True py_type = bool name = "bool" packer_type = packer.boolean class Uint8Flags(object): bytewidth = 1 min_val = 0 max_val = (2**8) - 1 py_type = int name = "uint8" packer_type = packer.uint8 class Uint16Flags(object): bytewidth = 2 min_val = 0 max_val = (2**16) - 1 py_type = int name = "uint16" packer_type = packer.uint16 class Uint32Flags(object): bytewidth = 4 min_val = 0 max_val = (2**32) - 1 py_type = int name = "uint32" packer_type = packer.uint32 class Uint64Flags(object): bytewidth = 8 min_val = 0 max_val = (2**64) - 1 py_type = int name = "uint64" packer_type = packer.uint64 class Int8Flags(object): bytewidth = 1 min_val = -(2**7) max_val = (2**7) - 1 py_type = int name = "int8" packer_type = packer.int8 class Int16Flags(object): bytewidth = 2 min_val = -(2**15) max_val = (2**15) - 1 py_type = int name = "int16" packer_type = packer.int16 class Int32Flags(object): bytewidth = 4 min_val = -(2**31) max_val = (2**31) - 1 py_type = int name = "int32" packer_type = packer.int32 class Int64Flags(object): bytewidth = 8 min_val = -(2**63) max_val = (2**63) - 1 py_type = int name = "int64" packer_type = packer.int64 class Float32Flags(object): bytewidth = 4 min_val = None max_val = None py_type = float name = "float32" packer_type = packer.float32 class Float64Flags(object): bytewidth = 8 min_val = None max_val = None py_type = float name = "float64" packer_type = packer.float64 class SOffsetTFlags(Int32Flags): pass class UOffsetTFlags(Uint32Flags): pass class VOffsetTFlags(Uint16Flags): pass def valid_number(n, flags): if flags.min_val is None and flags.max_val is None: return True return flags.min_val <= n <= flags.max_val def enforce_number(n, flags): if flags.min_val is None and flags.max_val is None: return if not flags.min_val <= n <= flags.max_val: raise TypeError("bad number %s for type %s" % (str(n), flags.name)) def float32_to_uint32(n): packed = struct.pack("<1f", n) (converted,) = struct.unpack("<1L", packed) return converted def uint32_to_float32(n): packed = struct.pack("<1L", n) (unpacked,) = struct.unpack("<1f", packed) return unpacked def float64_to_uint64(n): packed = struct.pack("<1d", n) (converted,) = struct.unpack("<1Q", packed) return converted def uint64_to_float64(n): packed = struct.pack("<1Q", n) (unpacked,) = struct.unpack("<1d", packed) return unpacked def to_numpy_type(number_type): if np is not None: return np.dtype(number_type.name).newbyteorder('<') else: raise NumpyRequiredForThisFeature('Numpy was not found.') zlmdb-22.6.1/flatbuffers/packer.py000066400000000000000000000022151426100523600170330ustar00rootroot00000000000000# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Provide pre-compiled struct packers for encoding and decoding. See: https://docs.python.org/2/library/struct.html#format-characters """ import struct from . import compat boolean = struct.Struct(compat.struct_bool_decl) uint8 = struct.Struct("=9.0.1 bumpversion>=0.5.3 wheel>=0.30.0 watchdog>=0.8.3 flake8>=3.5.0 tox>=2.9.1 tox-gh-actions>=2.2.0 codecov>=2.0.15 sphinx>=1.7.1 sphinxcontrib-images sphinxcontrib-spelling sphinx-autoapi sphinx_rtd_theme twine>=1.10.0 pytest>=3.4.2 pytest-runner>=2.11.1 humanize>=0.5.1 backports.tempfile>=1.0 # https://github.com/google/yapf/issues/712 yapf==0.29.0 pylint>=1.9.2 pyyaml>=4.2b4 types-PyYAML>=6.0.1 mypy>=0.610; python_version >= '3.4' and platform_python_implementation != 'PyPy' twisted>=18.7.0 zlmdb-22.6.1/requirements-rtd.txt000066400000000000000000000001651426100523600167660ustar00rootroot00000000000000# requirements for building the docs on RTD txaio twisted sphinxcontrib-images sphinxcontrib-spelling sphinx-autoapi zlmdb-22.6.1/requirements-test.txt000066400000000000000000000000471426100523600171530ustar00rootroot00000000000000twisted txaioetcd numpy pandas pyarrow zlmdb-22.6.1/setup.cfg000066400000000000000000000007001426100523600145270ustar00rootroot00000000000000[bumpversion] current_version = 0.1.0 commit = True tag = True [bumpversion:file:setup.py] search = version='{current_version}' replace = version='{new_version}' [bumpversion:file:zlmdb/__init__.py] search = __version__ = '{current_version}' replace = __version__ = '{new_version}' [bdist_wheel] universal = 1 [flake8] exclude = docs [aliases] # Define setup.py command aliases here test = pytest [tool:pytest] collect_ignore = ['setup.py'] zlmdb-22.6.1/setup.py000066400000000000000000000067271426100523600144370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### """The setup script.""" import os from setuptools import setup with open('zlmdb/_version.py') as f: exec(f.read()) # defines __version__ with open('README.rst') as readme_file: readme = readme_file.read() # enforce use of CFFI for LMDB # os.environ['LMDB_FORCE_CFFI'] = '1' # enforce use of bundled libsodium with PyNaCl os.environ['SODIUM_INSTALL'] = 'bundled' requirements = [ 'cffi>=1.14.5', 'cbor2>=5.2.0', 'click>=7.1.2', 'flatbuffers>=2.0', 'lmdb>=1.2.1', 'pynacl>=1.4.0', 'pyyaml>=5.4.1', 'txaio>=21.2.1', 'numpy>=1.20.1', ] extras_require = { 'dev': [] } with open('requirements-dev.txt') as f: for line in f.read().splitlines(): extras_require['dev'].append(line.strip()) # setup_requirements = ['pytest-runner'] test_requirements = ['pytest', 'pytest-runner'] packages = [ 'flatbuffers', 'zlmdb', 'zlmdb.flatbuffers', 'zlmdb.flatbuffers.reflection', ] setup( author="Crossbar.io Technologies GmbH", author_email='contact@crossbario.com', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', ], description="Object-relational zero-copy in-memory database layer for LMDB.", entry_points={ 'console_scripts': [ 'zlmdb=zlmdb.cli:main', ], }, # NumPy 1.19.0+ requires Py 3.6+ # NumPy 1.20.0+ requires Py 3.7+ python_requires='>=3.7', install_requires=requirements, extras_require=extras_require, license="MIT license", long_description=readme, include_package_data=True, keywords='zlmdb', name='zlmdb', packages=packages, # setup_requires=setup_requirements, test_suite='tests', tests_require=test_requirements, url='https://github.com/crossbario/zlmdb', version=__version__, zip_safe=True, ) zlmdb-22.6.1/tests/000077500000000000000000000000001426100523600140535ustar00rootroot00000000000000zlmdb-22.6.1/tests/Makefile000066400000000000000000000001061426100523600155100ustar00rootroot00000000000000generate: ~/scm/3rdparty/flatbuffers/flatc --python -o _gen user.fbs zlmdb-22.6.1/tests/_gen/000077500000000000000000000000001426100523600147635ustar00rootroot00000000000000zlmdb-22.6.1/tests/_gen/crossbarfx/000077500000000000000000000000001426100523600171375ustar00rootroot00000000000000zlmdb-22.6.1/tests/_gen/crossbarfx/Date.py000066400000000000000000000016231426100523600203700ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: crossbarfx import flatbuffers class Date(object): __slots__ = ['_tab'] # Date def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Date def Year(self): return self._tab.Get(flatbuffers.number_types.Uint16Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0)) # Date def Month(self): return self._tab.Get(flatbuffers.number_types.Uint8Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(2)) # Date def Day(self): return self._tab.Get(flatbuffers.number_types.Uint8Flags, self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(3)) def CreateDate(builder, year, month, day): builder.Prep(2, 4) builder.PrependUint8(day) builder.PrependUint8(month) builder.PrependUint16(year) return builder.Offset() zlmdb-22.6.1/tests/_gen/crossbarfx/Rating.py000066400000000000000000000023201426100523600207320ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: crossbarfx import flatbuffers class Rating(object): __slots__ = ['_tab'] @classmethod def GetRootAsRating(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Rating() x.Init(buf, n + offset) return x # Rating def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Rating def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Rating def Rating(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 def RatingStart(builder): builder.StartObject(2) def RatingAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def RatingAddRating(builder, rating): builder.PrependFloat32Slot(1, rating, 0.0) def RatingEnd(builder): return builder.EndObject() zlmdb-22.6.1/tests/_gen/crossbarfx/Tag.py000066400000000000000000000002141426100523600202210ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: crossbarfx class Tag(object): GEEK = 0 VIP = 1 zlmdb-22.6.1/tests/_gen/crossbarfx/User.py000066400000000000000000000101261426100523600204270ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: crossbarfx import flatbuffers class User(object): __slots__ = ['_tab'] @classmethod def GetRootAsUser(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = User() x.Init(buf, n + offset) return x # User def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # User def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # User def Authid(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # User def Email(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # User def Birthday(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: x = o + self._tab.Pos from .Date import Date obj = Date() obj.Init(self._tab.Bytes, x) return obj return None # User def IsFriendly(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # User def Tags(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # User def TagsAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) return 0 # User def TagsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.VectorLen(o) return 0 # User def Ratings(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from .Rating import Rating obj = Rating() obj.Init(self._tab.Bytes, x) return obj return None # User def RatingsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return self._tab.VectorLen(o) return 0 def UserStart(builder): builder.StartObject(7) def UserAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def UserAddAuthid(builder, authid): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(authid), 0) def UserAddEmail(builder, email): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(email), 0) def UserAddBirthday(builder, birthday): builder.PrependStructSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(birthday), 0) def UserAddIsFriendly(builder, isFriendly): builder.PrependBoolSlot(4, isFriendly, 0) def UserAddTags(builder, tags): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(tags), 0) def UserStartTagsVector(builder, numElems): return builder.StartVector(1, numElems, 1) def UserAddRatings(builder, ratings): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(ratings), 0) def UserStartRatingsVector(builder, numElems): return builder.StartVector(4, numElems, 4) def UserEnd(builder): return builder.EndObject() zlmdb-22.6.1/tests/_gen/crossbarfx/__init__.py000066400000000000000000000000001426100523600212360ustar00rootroot00000000000000zlmdb-22.6.1/tests/test_cbor.py000066400000000000000000000046351426100523600164210ustar00rootroot00000000000000import datetime import pickle import json import cbor2 from typing import Optional, List, Dict RESULT = { 'objects': 0, 'bytes': 0 } class Tag(object): GEEK = 1 VIP = 2 # requires python 3.6+: # # class User(object): # oid: int # name: str # authid: str # email: str # birthday: datetime.date # is_friendly: bool # tags: Optional[List[str]] # ratings: Dict[str, float] = {} # friends: List[int] = [] # referred_by: int = None class User(object): def __init__(self): self.oid = None self.name = None self.authid = None self.email = None self.birthday = None self.is_friendly = None self.tags = None self.ratings = {} self.friends = [] self.referred_by = None def marshal(self): obj = { 'oid': self.oid, 'name': self.name, 'authid': self.authid, 'email': self.email, 'birthday': self.birthday, 'is_friendly': self.is_friendly, 'tags': self.tags, 'ratings': self.ratings, 'friends': self.friends, 'referred_by': self.referred_by, } return obj @staticmethod def parse(obj): user = User() user.oid = obj.get('oid', None) user.name = obj.get('name', None) user.authid = obj.get('authid', None) user.email = obj.get('email', None) user.birthday = obj.get('birthday', None) user.is_friendly = obj.get('is_friendly', None) user.tags = obj.get('tags', None) user.ratings = obj.get('ratings', {}) user.friends = obj.get('friends', []) user.referred_by = obj.get('referred_by', None) return user def test(): global RESULT user = User() user.oid = 23 user.name = 'Homer Simpson' user.authid = 'homer' user.email = 'homer.simpson@example.com' user.birthday = { 'year': 1950, 'month': 12, 'day': 24 } user.is_friendly = True user.tags = [Tag.GEEK, Tag.VIP] #data = json.dumps(user.marshal(), ensure_ascii=False) data = cbor2.dumps(user.marshal()) RESULT['objects'] += 1 RESULT['bytes'] += len(data) import timeit N = 1000 M = 100000 for i in range(N): secs = timeit.timeit(test, number=M) ops = round(float(M) / secs, 1) print('{} objects/sec'.format(ops)) print(RESULT) zlmdb-22.6.1/tests/test_fbs_reflection.py000066400000000000000000000004031426100523600204450ustar00rootroot00000000000000import txaio txaio.use_twisted() from autobahn.xbr._schema import FbsSchema for filename in [ '/tmp/test/bfbs/climate.bfbs', '/tmp/test/bfbs/network.bfbs', '/tmp/test/bfbs/location.bfbs']: schema = FbsSchema.load(filename) print(schema) zlmdb-22.6.1/tests/test_flatbuffers.py000066400000000000000000000037511426100523600177750ustar00rootroot00000000000000import flatbuffers from _gen.crossbarfx import User, Date, Tag, Rating RESULT = { 'objects': 0, 'bytes': 0 } def test(): global RESULT builder = flatbuffers.Builder(0) name = builder.CreateString('Homer Simpson') authid = builder.CreateString('homer') email = builder.CreateString('homer.simpson@example.com') User.UserStartTagsVector(builder, 2) builder.PrependUOffsetTRelative(Tag.Tag.GEEK) builder.PrependUOffsetTRelative(Tag.Tag.VIP) tags = builder.EndVector(2) ratings = None if False: _ratings = { 'dawn-of-the-dead': 6.9, 'day-of-the-dead': 7.5, 'land-of-the-dead': 8.9 } _ratings_strings = { } for name in _ratings.keys(): _name = builder.CreateString(name) _ratings_strings[_name] = name User.UserStartRatingsVector(builder, len(_ratings)) l = [] for _name, _rating in _ratings.items(): Rating.RatingStart(builder) Rating.RatingAddName(builder, _ratings_strings[_name]) Rating.RatingAddRating(builder, _rating) rating = Rating.RatingEnd(builder) l.append(rating) ratings = builder.EndVector(len(_ratings)) User.UserStart(builder) User.UserAddName(builder, name) User.UserAddAuthid(builder, authid) User.UserAddEmail(builder, email) User.UserAddBirthday(builder, Date.CreateDate(builder, 1950, 12, 24)) User.UserAddIsFriendly(builder, True) User.UserAddTags(builder, tags) if ratings: User.UserAddRatings(builder, ratings) user = User.UserEnd(builder) builder.Finish(user) buf = builder.Output() #data = bytes(buf) RESULT['objects'] += 1 RESULT['bytes'] += len(buf) # RESULT['bytes'] += len(data) import timeit N = 1000 M = 100000 for i in range(N): secs = timeit.timeit(test, number=M) ops = round(float(M) / secs, 1) print('{} objects/sec'.format(ops)) print(RESULT) zlmdb-22.6.1/tests/test_new.py000066400000000000000000000021431426100523600162550ustar00rootroot00000000000000import random import zlmdb class Foo(object): oid: int value: float msg: str def __init__(self, oid=None, value=None, msg=None): self.oid = oid self.value = value self.msg = msg @staticmethod def unmarshal(obj): return Foo(obj['oid'], obj['value'], obj['msg']) def marshal(self): return {'oid': self.oid, 'value': self.value, 'msg': self.msg} class MySchema(zlmdb.Schema): tab1: zlmdb.MapOidPickle = zlmdb.MapOidPickle(1) # tab1: zlmdb.MapOidJson = zlmdb.MapOidJson(1, marshal=Foo.marshal, unmarshal=Foo.unmarshal) # tab1: zlmdb.MapOidCbor = zlmdb.MapOidCbor(1, marshal=Foo.marshal, unmarshal=Foo.unmarshal) schema = MySchema() with schema.open('.testdb') as db: with db.begin(write=True) as txn: o1 = Foo(23, random.random(), 'Hello, world!') schema.tab1[txn, o1.oid] = o1 print('object saved:', o1.oid, o1.value, o1.msg) o2 = schema.tab1[txn, o1.oid] assert o2 print('object loaded:', o2.oid, o2.value, o2.msg) print('transaction committed') print('database closed') zlmdb-22.6.1/tests/test_ops.py000066400000000000000000000071351426100523600162730ustar00rootroot00000000000000def test_insert1(env): users = [] user1 = User() user1.oid = 1 user1.name = 'Homer Simpson' user1.authid = 'homer' user1.email = 'homer.simpson@example.com' user1.birthday = datetime.date(1950, 12, 24) user1.is_friendly = True user1.tags = ['relaxed', 'beerfan'] users.append(user1) user2 = User() user2.oid = 2 user2.name = 'Crocodile Dundee' user2.authid = 'crocoboss' user2.email = 'croco@example.com' user2.birthday = datetime.date(1960, 2, 4) user2.is_friendly = False user2.tags = ['red', 'yellow'] user2.referred_by = user1.oid users.append(user2) user3 = User() user3.oid = 3 user3.name = 'Foobar Space' user3.authid = 'foobar' user3.email = 'foobar@example.com' user3.birthday = datetime.date(1970, 5, 7) user3.is_friendly = True user3.tags = ['relaxed', 'beerfan'] user3.referred_by = user1.oid users.append(user3) with Transaction(env, write=True) as txn: for user in users: _user = txn.users[user.oid] if not _user: txn.users[user.oid] = user #txn.users_by_authid[user.authid] = user.oid print('user stored', user) else: print('user loaded', _user) def test_insert2(env): with Transaction(env, write=True) as txn: for i in range(100): user = User() user.oid = i + 10 user.name = 'Test {}'.format(i) user.authid = 'test-{}'.format(i) user.email = '{}@example.com'.format(user.authid) for j in range(10): user.ratings['test-rating-{}'.format(j)] = random.random() _user = txn.users[user.oid] if not _user: txn.users[user.oid] = user #txn.users_by_authid[user.authid] = user.oid print('user stored', user, user.oid, user.authid) else: print('user loaded', _user, _user.oid, _user.authid) def test_insert3(env): oid = 4 with Transaction(env, write=True) as txn: user = txn.users[oid] if not user: user = User() user.oid = oid user.name = 'Foobar Space' user.authid = 'foobar' user.email = 'foobar@example.com' user.birthday = datetime.date(1970, 5, 7) user.is_friendly = True user.tags = ['relaxed', 'beerfan'] user.referred_by = 1 txn.users[oid] = user print('user stored', user) else: print('user loaded', user) def test_by_auth(env): with Transaction(env) as txn: for i in range(100): authid = 'test-{}'.format(i) oid = txn.idx_users_by_authid[authid] if oid: user = txn.users[oid] print('success: user "{}" loaded by authid "{}"'.format(oid, authid)) else: print('failure: user not found for authid "{}"'.format(authid)) def test_by_email(env): with Transaction(env) as txn: for i in range(100): email = 'test-{}@example.com'.format(i) oid = txn.idx_users_by_email[email] if oid: user = txn.users[oid] print('success: user "{}" loaded by email "{}"'.format(oid, email)) else: print('failure: user not found for email "{}"'.format(email)) def test_truncate_index(env): with Transaction(env, write=True) as txn: rows = txn.users_by_authid.truncate() print('users_by_authid truncated: {} rows'.format(rows)) zlmdb-22.6.1/tests/test_pickle.py000066400000000000000000000024701426100523600167360ustar00rootroot00000000000000import datetime import pickle from typing import Optional, List, Dict RESULT = { 'objects': 0, 'bytes': 0 } class Tag(object): GEEK = 1 VIP = 2 # requires python 3.6+: # # class User(object): # oid: int # name: str # authid: str # email: str # birthday: datetime.date # is_friendly: bool # tags: Optional[List[str]] # ratings: Dict[str, float] = {} # friends: List[int] = [] # referred_by: int = None class User(object): def __init__(self): self.oid = None self.name = None self.authid = None self.email = None self.birthday = None self.is_friendly = None self.tags = None self.friends = None self.referred_by = None def test(): global RESULT user = User() user.oid = 23 user.name = 'Homer Simpson' user.authid = 'homer' user.email = 'homer.simpson@example.com' user.birthday = datetime.date(1950, 12, 24) user.is_friendly = True user.tags = [Tag.GEEK, Tag.VIP] data = pickle.dumps(user, protocol=4) RESULT['objects'] += 1 RESULT['bytes'] += len(data) import timeit N = 1000 M = 100000 for i in range(N): secs = timeit.timeit(test, number=M) ops = round(float(M) / secs, 1) print('{} objects/sec'.format(ops)) print(RESULT) zlmdb-22.6.1/tests/test_zlmdb.py000066400000000000000000000017501426100523600165770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for `zlmdb` package.""" import pytest from click.testing import CliRunner from zlmdb import _transaction from zlmdb import cli @pytest.fixture def response(): """Sample pytest fixture. See more at: http://doc.pytest.org/en/latest/fixture.html """ # import requests # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') def test_content(response): """Sample pytest test function with the pytest fixture as an argument.""" # from bs4 import BeautifulSoup # assert 'GitHub' in BeautifulSoup(response.content).title.string def test_command_line_interface(): """Test the CLI.""" runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'zlmdb.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this message and exit.' in help_result.output zlmdb-22.6.1/tests/user.py000066400000000000000000000033641426100523600154110ustar00rootroot00000000000000import uuid import datetime class User(object): def __init__(self): self.oid = None self.name = None self.authid = None self.uuid = None self.email = None self.birthday = None self.is_friendly = None self.tags = None self.ratings = {} self.friends = [] self.referred_by = None def marshal(self): obj = { 'oid': self.oid, 'name': self.name, 'authid': self.authid, 'uuid': self.uuid.hex if self.uuid else None, 'email': self.email, 'birthday': { 'year': self.birthday.year if self.birthday else None, 'month': self.birthday.month if self.birthday else None, 'day': self.birthday.day if self.birthday else None, }, 'is_friendly': self.is_friendly, 'tags': self.tags, 'ratings': self.ratings, 'friends': self.friends, 'referred_by': self.referred_by, } return obj @staticmethod def parse(obj): user = User() user.oid = obj.get('oid', None) user.name = obj.get('name', None) user.authid = obj.get('authid', None) if 'uuid' in obj: user.uuid = uuid.UUID(hex=obj['uuid']) user.email = obj.get('email', None) if 'birthday' in obj: b = obj['birthday'] user.birthday = datetime.date(b.year, b.month, b.day) user.is_friendly = obj.get('is_friendly', None) user.tags = obj.get('tags', None) user.ratings = obj.get('ratings', {}) user.friends = obj.get('friends', []) user.referred_by = obj.get('referred_by', None) return user zlmdb-22.6.1/tests/user_typed.py000066400000000000000000000033231426100523600166110ustar00rootroot00000000000000import uuid import datetime from typing import Optional, List, Dict class User(object): oid: int name: str authid: str uuid: uuid.UUID email: str birthday: datetime.date is_friendly: bool tags: Optional[List[str]] ratings: Dict[str, float] = {} friends: List[int] = [] referred_by: int = None def marshal(self): obj = { 'oid': self.oid, 'name': self.name, 'authid': self.authid, 'uuid': self.uuid.hex if self.uuid else None, 'email': self.email, 'birthday': { 'year': self.birthday.year if self.birthday else None, 'month': self.birthday.month if self.birthday else None, 'day': self.birthday.day if self.birthday else None, }, 'is_friendly': self.is_friendly, 'tags': self.tags, 'ratings': self.ratings, 'friends': self.friends, 'referred_by': self.referred_by, } return obj @staticmethod def parse(obj): user = User() user.oid = obj.get('oid', None) user.name = obj.get('name', None) user.authid = obj.get('authid', None) if 'uuid' in obj: user.uuid = uuid.UUID(hex=obj['uuid']) user.email = obj.get('email', None) if 'birthday' in obj: b = obj['birthday'] user.birthday = datetime.date(b.year, b.month, b.day) user.is_friendly = obj.get('is_friendly', None) user.tags = obj.get('tags', None) user.ratings = obj.get('ratings', {}) user.friends = obj.get('friends', []) user.referred_by = obj.get('referred_by', None) return user zlmdb-22.6.1/tests/zdb/000077500000000000000000000000001426100523600146325ustar00rootroot00000000000000zlmdb-22.6.1/tests/zdb/README.md000066400000000000000000000004741426100523600161160ustar00rootroot00000000000000# ZLMDB high level API tests * [test_zdb_df.py](test_zdb_df.py): test pandas dataframe integration * [test_zdb_etcd.py](test_zdb_etcd.py): test etcd data integration * [test_zdb_dyn.py](test_zdb_dyn.py): test self describing database format ## Notes ```console signify -S -s key.sec -m message.txt -x msg.sig ``` zlmdb-22.6.1/tests/zdb/_schema_fbs.py000066400000000000000000000147011426100523600174400ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import random import uuid import datetime from zlmdb.flatbuffers.demo import User as _user from zlmdb.flatbuffers.demo import Date as _date class User(object): def __init__(self, from_fbs=None): self._from_fbs = from_fbs self._name = None self._authid = None self._uuid = None self._email = None self._birthday = None self._is_friendly = None self._tags = None self._ratings = None self._ratings_cached = None self._friends = None self._friends_cached = None self._referred_by = None @property def name(self): return self._name or self._from_fbs.Name() @name.setter def name(self, value): self._name = value @property def authid(self): return self._authid or self._from_fbs.Authid() @authid.setter def authid(self, value): self._authid = value @property def uuid(self): return self._uuid or self._from_fbs.Uuid() @uuid.setter def uuid(self, value): self._uuid = value @property def email(self): return self._email or self._from_fbs.Email() @email.setter def email(self, value): self._email = value @property def birthday(self): return self._birthday or self._from_fbs.Birthday() @birthday.setter def birthday(self, value): self._birthday = value @property def is_friendly(self): return self._is_friendly or self._from_fbs.IsFriendly() @is_friendly.setter def is_friendly(self, value): self._is_friendly = value @property def ratings(self): if self._ratings is not None: return self._ratings if self._ratings_cached is None: self._ratings_cached = {} if self._from_fbs: for i in range(self._from_fbs.RatingsLength()): rat = self._from_fbs.Ratings(i) self._ratings_cached[rat.Name()] = rat.Rating() return self._ratings_cached @ratings.setter def ratings(self, value): self._ratings = value @property def friends(self): if self._friends is not None: return self._friends if self._friends_cached is None: self._friends_cached = [] if self._from_fbs: for i in range(self._from_fbs.FriendsLength()): friend_oid = self._from_fbs.Friends(i) self._friends_cached.append(friend_oid) return self._friends_cached @friends.setter def friends(self, value): self._friends = value @property def referred_by(self): return self._referred_by or self._from_fbs.ReferredBy() @referred_by.setter def referred_by(self, value): self._referred_by = value def build(self, builder): if self._name is not None: name = builder.CreateString(self._name) else: name = builder.CreateString(self._from_fbs.Name()) if self._authid is not None: authid = builder.CreateString(self._authid) else: authid = builder.CreateString(self._from_fbs.Authid()) if self._email is not None: email = builder.CreateString(self._email) else: email = builder.CreateString(self._from_fbs.Email()) _user.UserStart(builder) _user.UserAddName(builder, name) _user.UserAddAuthid(builder, authid) _user.UserAddEmail(builder, email) if self._birthday is not None: _user.UserAddBirthday( builder, _date.CreateDate(builder, self._birthday.year, self._birthday.month, self._birthday.day)) else: bd = self._from_fbs.Birthday() _user.UserAddBirthday(builder, _date.CreateDate(builder, bd.Year(), bd.Month(), bd.Day())) # FIXME: tags # FIXME: ratings # FIXME: friends if self._is_friendly is not None: _user.UserAddIsFriendly(builder, self._is_friendly) else: _user.UserAddIsFriendly(builder, self._from_fbs.IsFriendly()) if self._referred_by is not None: _user.UserAddReferredBy(builder, self._referred_by) else: _user.UserAddReferredBy(builder, self._from_fbs.ReferredBy()) return _user.UserEnd(builder) @staticmethod def cast(buf): return User(_user.User.GetRootAsUser(buf, 0)) @staticmethod def create_test_user(oid=None): user = User() if oid is not None: user.oid = oid else: user.oid = random.randint(0, 9007199254740992) user.name = 'Test {}'.format(user.oid) user.authid = 'test-{}'.format(user.oid) user.uuid = uuid.uuid4() user.email = '{}@example.com'.format(user.authid) user.birthday = datetime.date(1950, 12, 24) user.is_friendly = True user.tags = ['geek', 'sudoko', 'yellow'] for j in range(10): user.ratings['test-rating-{}'.format(j)] = random.random() user.friends = [random.randint(0, 9007199254740992) for _ in range(10)] user.referred_by = random.randint(0, 9007199254740992) return user zlmdb-22.6.1/tests/zdb/test_zdb_df.py000066400000000000000000000040571426100523600175010ustar00rootroot00000000000000 from twisted.internet.task import react from twisted.internet.defer import inlineCallbacks import txaio from autobahn.twisted import util import numpy as np import pandas as pd import pyarrow as pa import zlmdb from zlmdb._pmap import _StringKeysMixin, PersistentMap try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory class _DataFrameValuesMixin(object): def __init__(self, marshal=None, unmarshal=None): self._marshal = marshal or self._zlmdb_marshal self._unmarshal = unmarshal or self._zlmdb_unmarshal def _serialize_value(self, value): return pa.serialize(value).to_buffer() def _deserialize_value(self, data): return pa.deserialize(data) class MapStringDataFrame(_StringKeysMixin, _DataFrameValuesMixin, PersistentMap): def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MySchema(zlmdb.Schema): samples: MapStringDataFrame def __init__(self): self.samples = MapStringDataFrame(1) @inlineCallbacks def main(reactor): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = MySchema() db = zlmdb.Database(dbpath) # WRITE some native pandas data frames to zlmdb with db.begin(write=True) as txn: for i in range(10): if i % 2: key = 'key{}'.format(i) value = pd.DataFrame(np.random.randn(8, 4), columns=['A','B','C','D']) schema.samples[txn, key] = value # READ back native pandas data frames from zlmdb with db.begin() as txn: for i in range(10): key = 'key{}'.format(i) value = schema.samples[txn, key] print('key={} : value=\n{}'.format(key, value)) yield util.sleep(1) if __name__ == '__main__': txaio.start_logging(level='info') react(main) zlmdb-22.6.1/tests/zdb/test_zdb_dyn.py000066400000000000000000000121411426100523600176730ustar00rootroot00000000000000from twisted.internet.task import react from twisted.internet.defer import inlineCallbacks from autobahn.twisted import util import txaio import yaml from pprint import pprint, pformat import zlmdb from zlmdb._pmap import MapStringJson, MapStringCbor, MapUuidJson, MapUuidCbor import random import uuid import datetime from typing import Optional, List, Dict class Tag(object): GEEK = 1 VIP = 2 class User(object): oid: int name: str authid: str uuid: uuid.UUID email: str birthday: datetime.date is_friendly: bool tags: Optional[List[str]] ratings: Dict[str, float] = {} friends: List[int] = [] referred_by: int = None def __eq__(self, other): if not isinstance(other, self.__class__): return False if other.oid != self.oid: return False if other.name != self.name: return False if other.authid != self.authid: return False if other.uuid != self.uuid: return False if other.email != self.email: return False if other.birthday != self.birthday: return False if other.is_friendly != self.is_friendly: return False if (self.tags and not other.tags) or (not self.tags and other.tags): return False return True def __ne__(self, other): return not self.__eq__(other) def __str__(self): return '\n{}\n'.format(pformat(self.marshal())) def marshal(self): obj = { 'oid': self.oid, 'name': self.name, 'authid': self.authid, 'uuid': self.uuid.hex if self.uuid else None, 'email': self.email, 'birthday': { 'year': self.birthday.year if self.birthday else None, 'month': self.birthday.month if self.birthday else None, 'day': self.birthday.day if self.birthday else None, }, 'is_friendly': self.is_friendly, 'tags': self.tags, 'ratings': self.ratings, 'friends': self.friends, 'referred_by': self.referred_by, } return obj @staticmethod def parse(obj): user = User() user.oid = obj.get('oid', None) user.name = obj.get('name', None) user.authid = obj.get('authid', None) if 'uuid' in obj: user.uuid = uuid.UUID(hex=obj['uuid']) user.email = obj.get('email', None) if 'birthday' in obj: b = obj['birthday'] user.birthday = datetime.date(b.get('year', None), b.get('month', None), b.get('day', None)) user.is_friendly = obj.get('is_friendly', None) user.tags = obj.get('tags', None) user.ratings = obj.get('ratings', {}) user.friends = obj.get('friends', []) user.referred_by = obj.get('referred_by', None) return user @staticmethod def create_test_user(oid=None): user = User() if oid is not None: user.oid = oid else: user.oid = random.randint(0, 9007199254740992) user.name = 'Test {}'.format(user.oid) user.authid = 'test-{}'.format(user.oid) user.uuid = uuid.uuid4() user.email = '{}@example.com'.format(user.authid) user.birthday = datetime.date(1950, 12, 24) user.is_friendly = True user.tags = ['geek', 'sudoko', 'yellow'] for j in range(10): user.ratings['test-rating-{}'.format(j)] = random.random() user.friends = [random.randint(0, 9007199254740992) for _ in range(10)] user.referred_by = random.randint(0, 9007199254740992) return user KV_TYPE_TO_CLASS = { 'string-json': (MapStringJson, lambda x: x, lambda x: x), 'string-json-user': (MapStringJson, User.marshal, User.parse), 'string-cbor-user': (MapStringCbor, User.marshal, User.parse), 'uuid-json-user': (MapUuidJson, User.marshal, User.parse), 'uuid-cbor-user': (MapUuidCbor, User.marshal, User.parse), } DBPATH = '/tmp/zlmdb1' DBSCHEMA = 'tests/zdb/zdb.yml' @inlineCallbacks def main(reactor): schema = zlmdb._database.Schema.parse(DBSCHEMA, KV_TYPE_TO_CLASS) print('Using database directory {} and schema {}:\n{}'.format(DBPATH, DBSCHEMA, schema)) with zlmdb.Database(DBPATH, schema) as db: with db.begin(write=True) as txn: users = schema['users'] users2 = schema['users2'] print('users', users) print('users2', users2) key = 'user1' for table in [users, users2]: user = table[txn, key] if user: print('user object already exists in {} for key {}: {}'.format(table, key, user)) else: print('user does not exist in {}, storing new object ..'.format(table)) user = User.create_test_user() table[txn, key] = user print('user object created for key {}: {}'.format(key, user)) yield util.sleep(1) if __name__ == '__main__': txaio.start_logging(level='info') react(main) zlmdb-22.6.1/tests/zdb/test_zdb_etcd.py000066400000000000000000000062771426100523600200350ustar00rootroot00000000000000from twisted.internet.task import react from twisted.internet.defer import inlineCallbacks from autobahn.twisted import util import txaio from txaioetcd import Client, KeySet import yaml from pprint import pprint, pformat import random import zlmdb from zlmdb._pmap import MapStringString try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory class MySchema(zlmdb.Schema): samples: MapStringString def __init__(self): self.samples = MapStringString(1) @inlineCallbacks def main(reactor): if True: with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = MySchema() with zlmdb.Database(dbpath) as db: # write records into zlmdb with db.begin(write=True) as txn: for i in range(10): key = 'key{}'.format(i) value = 'value{}'.format(random.randint(0, 1000)) schema.samples[txn, key] = value # read records from zlmdb with db.begin() as txn: for i in range(10): key = 'key{}'.format(i) value = schema.samples[txn, key] print('key={} : value={}'.format(key, value)) if True: # etcd database etcd = Client(reactor) status = yield etcd.status() print(status) # zlmdb database schema = MySchema() dbpath = '/tmp/.test-zlmdb' with zlmdb.Database(dbpath) as db: print('zlmdb open on {}'.format(dbpath)) # check current record count with db.begin() as txn: cnt = schema.samples.count(txn) print('currently {} rows in table'.format(cnt)) # watch changes in etcd and write to local zlmdb def on_change(kv): key = kv.key.decode() value = kv.value.decode() with db.begin(write=True) as txn: schema.samples[txn, key] = value print('on_change received from etcd and written to zlmdb: key={} value={}'.format(key, value)) # start watching for etcd changes .. ks = [KeySet('k'.encode(), prefix=True)] d = etcd.watch(ks, on_change) print('watching for 1s ..') yield txaio.sleep(1) # loop every 1s and write a key-value in etcd directly for i in range(5): print('watching for 1s ..') yield txaio.sleep(1) key = 'key{}'.format(i).encode() value = 'value{}'.format(random.randint(0, 1000)).encode() etcd.set(key, value) # cancel our watch d.cancel() yield util.sleep(1) # check current record count with db.begin() as txn: cnt = schema.samples.count(txn) print('currently {} rows in table'.format(cnt)) yield util.sleep(1) if __name__ == '__main__': txaio.start_logging(level='info') react(main) zlmdb-22.6.1/tests/zdb/test_zdb_fbs.py000066400000000000000000000030041426100523600176510ustar00rootroot00000000000000from twisted.internet.task import react from twisted.internet.defer import inlineCallbacks import txaio txaio.use_twisted() from autobahn.twisted import util import os import sys import random try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory import zlmdb sys.path.append(os.path.dirname(os.path.abspath(__file__))) from _schema_fbs import User as UserFbs # noqa class UsersSchema(zlmdb.Schema): def __init__(self): self.tab_oid_fbs = zlmdb.MapOidFlatBuffers(1, build=UserFbs.build, cast=UserFbs.cast) @inlineCallbacks def main2(reactor): dbpath = '/tmp/zlmdb1' print('Using database directory {}'.format(dbpath)) schema = UsersSchema() with zlmdb.Database(dbpath) as db: N = 1000 with db.begin() as txn: cnt_begin = schema.tab_oid_fbs.count(txn) stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for i in range(N): user = UserFbs.create_test_user() schema.tab_oid_fbs[txn, user.oid] = user assert stats.puts == N assert stats.dels == 0 stats.reset() with db.begin() as txn: cnt_end = schema.tab_oid_fbs.count(txn) cnt = cnt_end - cnt_begin assert cnt == N print('{} records written, {} records total'.format(cnt, cnt_end)) yield util.sleep(1) if __name__ == '__main__': txaio.start_logging(level='info') react(main2) zlmdb-22.6.1/tests/zdb/zdb.yml000066400000000000000000000005401426100523600161330ustar00rootroot00000000000000slots: - index: 100 name: users key: string value: json schema: user description: Arbitrary user data (serialized in JSON format). - index: 101 name: mrealms key: uuid value: cbor schema: user description: Management realms. - index: 102 name: users2 key: string schema: user value: cbor zlmdb-22.6.1/tox.ini000066400000000000000000000045351426100523600142330ustar00rootroot00000000000000[tox] skip_missing_interpreters = true envlist = py37 py38 py39 py310 pypy37 pypy38 flake8 mypy yapf sphinx # MAP: GitHub Actions Python Name => Tox Env Name (for Python) # # when called without a specific environment ("-e"), detect the # python version / get from GH action, and map to tox env # # https://github.com/ymyzk/tox-gh-actions # [gh-actions] python = 3.7: py37 3.8: py38 3.9: py39 3.10: py310 pypy-3.7: pypy37 pypy-3.8: pypy38 [testenv] whitelist_externals = sh cp rm sphinx sphinx-build coverage codecov flake8 yapf mypy pytest setenv = PYTHONPATH = {toxinidir} # LMDB_FORCE_CFFI = "1" deps = -r{toxinidir}/requirements-dev.txt commands = {py37,py38,py39,py310,pypy37,pypy38}: pytest -v -s --basetemp={envtmpdir} zlmdb [testenv:flake8] skip_install = True deps = flake8 commands = python -V flake8 --max-line-length=119 --exclude=zlmdb/tests/user_typed.py --exclude=zlmdb/flatbuffers,zlmdb/tests/MNodeLog.py zlmdb [testenv:yapf] description = Run yapf style checks. skip_install = True deps = # https://github.com/google/yapf/issues/712 yapf==0.29.0 commands = python -V yapf --version yapf -rd --style=yapf.ini --exclude="zlmdb/flatbuffers/*" --exclude="zlmdb/tests/MNodeLog.py" zlmdb [testenv:mypy] description = Run mypy type checks. skip_install = True deps = mypy commands= python -V mypy --version mypy --install-types --non-interactive --ignore-missing-imports --config-file {toxinidir}/mypy.ini zlmdb [testenv:pylint] description = Run pylint checks. skip_install = False deps = pylint commands= python -V pylint --version pylint --errors-only --ignore-patterns="zlmdb/flatbuffers/*","zlmdb/test/*" zlmdb [testenv:sphinx] description = Generate docs using Sphinx. skip_install = False deps = sphinx>=1.7.1 sphinxcontrib-images sphinxcontrib-spelling sphinx-autoapi sphinx_rtd_theme commands = python -V sphinx-build --version # first test with all warnings fatal sphinx-build -nWT -b dummy ./docs ./docs/_build # generate HTML output sphinx-build -b html ./docs ./docs/_build # move to HOME to preserve on Travis for upload to S3 -rm -rf {homedir}/zlmdb-docs cp -R ./docs/_build {homedir}/zlmdb-docs zlmdb-22.6.1/yapf.ini000066400000000000000000000034761426100523600143630ustar00rootroot00000000000000[style] based_on_style = pep8 column_limit = 119 #ALIGN_CLOSING_BRACKET_WITH_VISUAL_INDENT=True #ALLOW_MULTILINE_LAMBDAS=False #ALLOW_MULTILINE_DICTIONARY_KEYS=False #ALLOW_SPLIT_BEFORE_DEFAULT_OR_NAMED_ASSIGNS=True #ALLOW_SPLIT_BEFORE_DICT_VALUE=True #ARITHMETIC_PRECEDENCE_INDICATION=False #BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF=False #BLANK_LINE_BEFORE_CLASS_DOCSTRING=False #BLANK_LINE_BEFORE_MODULE_DOCSTRING=False #BLANK_LINES_AROUND_TOP_LEVEL_DEFINITION=2 #COALESCE_BRACKETS=False ##COLUMN_LIMIT=79 #CONTINUATION_ALIGN_STYLE='SPACE' #CONTINUATION_INDENT_WIDTH=4 #DEDENT_CLOSING_BRACKETS=False #DISABLE_ENDING_COMMA_HEURISTIC=False #EACH_DICT_ENTRY_ON_SEPARATE_LINE=True #I18N_COMMENT='' #I18N_FUNCTION_CALL='' #INDENT_DICTIONARY_VALUE=False #INDENT_WIDTH=4 #INDENT_BLANK_LINES=False #JOIN_MULTIPLE_LINES=True #NO_SPACES_AROUND_SELECTED_BINARY_OPERATORS=set() #SPACE_BETWEEN_ENDING_COMMA_AND_CLOSING_BRACKET=True #SPACES_AROUND_POWER_OPERATOR=False #SPACES_AROUND_DEFAULT_OR_NAMED_ASSIGN=False #SPACES_BEFORE_COMMENT=2 #SPLIT_ARGUMENTS_WHEN_COMMA_TERMINATED=False #SPLIT_ALL_COMMA_SEPARATED_VALUES=False #SPLIT_BEFORE_ARITHMETIC_OPERATOR=False #SPLIT_BEFORE_BITWISE_OPERATOR=True #SPLIT_BEFORE_CLOSING_BRACKET=True #SPLIT_BEFORE_DICT_SET_GENERATOR=True #SPLIT_BEFORE_DOT=False #SPLIT_BEFORE_EXPRESSION_AFTER_OPENING_PAREN=False #SPLIT_BEFORE_FIRST_ARGUMENT=False #SPLIT_BEFORE_LOGICAL_OPERATOR=True #SPLIT_BEFORE_NAMED_ASSIGNS=True #SPLIT_COMPLEX_COMPREHENSION=False #SPLIT_PENALTY_AFTER_OPENING_BRACKET=300 #SPLIT_PENALTY_AFTER_UNARY_OPERATOR=10000 #SPLIT_PENALTY_ARITHMETIC_OPERATOR=300 #SPLIT_PENALTY_BEFORE_IF_EXPR=0 #SPLIT_PENALTY_BITWISE_OPERATOR=300 #SPLIT_PENALTY_COMPREHENSION=80 #SPLIT_PENALTY_EXCESS_CHARACTER=7000 #SPLIT_PENALTY_FOR_ADDED_LINE_SPLIT=30 #SPLIT_PENALTY_IMPORT_NAMES=0 #SPLIT_PENALTY_LOGICAL_OPERATOR=300 #USE_TABS=False zlmdb-22.6.1/zlmdb/000077500000000000000000000000001426100523600140215ustar00rootroot00000000000000zlmdb-22.6.1/zlmdb/__init__.py000066400000000000000000000244401426100523600161360ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### """ZLMDB - Object-relational zero-copy in-memory database layer for LMDB.""" import uuid from typing import Dict # noqa from ._version import __version__ from ._errors import NullValueConstraint from ._pmap import PersistentMap, \ MapSlotUuidUuid, \ MapUuidString, \ MapUuidOid, \ MapUuidUuid, \ MapUuidJson, \ MapUuidCbor, \ MapUuidPickle, \ MapUuidFlatBuffers, \ MapUuidTimestampFlatBuffers, \ MapUuidBytes20Uint8FlatBuffers, \ MapUuidBytes20Uint8UuidFlatBuffers, \ MapUuidBytes20Bytes20Uint8UuidFlatBuffers, \ MapUuidTimestampCbor, \ MapTimestampFlatBuffers, \ MapTimestampStringFlatBuffers, \ MapTimestampUuidFlatBuffers, \ MapTimestampUuidStringFlatBuffers, \ MapUuidTimestampUuidFlatBuffers, \ MapUint64TimestampUuid, \ MapTimestampUuidCbor, \ MapUuidTimestampUuid, \ MapUuidStringUuid, \ MapUuidUuidStringUuid, \ MapUuidUuidUuidStringUuid, \ MapUuidStringOid, \ MapUuidUuidCbor, \ MapUuidUuidSet, \ MapUuidUuidUuid, \ MapUuidUuidUuidUuid, \ MapUuidUuidUuidUuidUuid, \ MapUuidTimestampBytes32, \ MapUuidUuidFlatBuffers, \ MapUuidUuidStringFlatBuffers, \ MapUuidStringFlatBuffers, \ MapStringString, \ MapStringOid, \ MapStringOidOid, \ MapStringUuid, \ MapStringStringUuid, \ MapStringStringStringUuid, \ MapStringJson, \ MapStringCbor, \ MapStringPickle, \ MapStringFlatBuffers, \ MapStringTimestampCbor, \ MapTimestampStringCbor, \ MapOidString, \ MapOidOid, \ MapOidUuid, \ MapOidJson, \ MapOidCbor, \ MapOidPickle, \ MapOidFlatBuffers, \ MapOidOidFlatBuffers, \ MapOid3FlatBuffers, \ MapOidOidSet, \ MapOidStringOid, \ MapOidOidOid, \ MapOidTimestampOid, \ MapOidTimestampFlatBuffers, \ MapOidTimestampStringOid, \ MapUint16UuidTimestampFlatBuffers, \ MapBytes32Uuid, \ MapBytes32Timestamp, \ MapBytes32Bytes32, \ MapBytes32FlatBuffers, \ MapBytes32UuidFlatBuffers, \ MapUuidBytes32FlatBuffers, \ MapBytes32Bytes32FlatBuffers, \ MapBytes32StringFlatBuffers, \ MapTimestampBytes32FlatBuffers, \ MapBytes20Uuid, \ MapBytes20Bytes16, \ MapBytes20Bytes20, \ MapBytes20Bytes20Timestamp, \ MapBytes20TimestampBytes20, \ MapBytes20TimestampUuid, \ MapBytes16FlatBuffers, \ MapBytes16TimestampUuid, \ MapBytes16TimestampUuidFlatBuffers, \ MapBytes20FlatBuffers, \ MapBytes20Bytes20FlatBuffers, \ MapBytes20StringFlatBuffers from ._transaction import Transaction, TransactionStats from ._database import Database from ._schema import Schema __all__ = ( '__version__', 'Schema', 'Database', 'Transaction', 'TransactionStats', 'MapSlotUuidUuid', 'table', # # Errors # 'NullValueConstraint', # # UUID pmaps # # UUID (uint128) based pmap types for object containers 'MapUuidString', 'MapUuidOid', 'MapUuidJson', 'MapUuidCbor', 'MapUuidPickle', 'MapUuidFlatBuffers', # UUID/Timestamp-combined pmap types for flatbuffers values 'MapUuidTimestampFlatBuffers', 'MapTimestampUuidFlatBuffers', 'MapTimestampFlatBuffers', 'MapTimestampStringFlatBuffers', 'MapTimestampUuidStringFlatBuffers', 'MapUuidTimestampUuidFlatBuffers', 'MapUuidBytes20Uint8FlatBuffers', 'MapUuidBytes20Uint8UuidFlatBuffers', 'MapUuidBytes20Bytes20Uint8UuidFlatBuffers', 'MapUint16UuidTimestampFlatBuffers', # UUID (uint128) based pmap types for indexes 'MapUuidUuid', 'MapUuidStringUuid', 'MapUuidUuidStringUuid', 'MapUuidUuidUuidStringUuid', 'MapUint64TimestampUuid', # more UUID (uint128) based pmap types for indexes 'MapUuidUuidSet', 'MapUuidStringOid', # UUID-UUID based pmap types 'MapUuidUuidFlatBuffers', 'MapUuidStringFlatBuffers', 'MapUuidUuidCbor', 'MapUuidUuidUuid', 'MapUuidUuidUuidUuid', 'MapUuidUuidUuidUuidUuid', 'MapUuidTimestampUuid', 'MapUuidTimestampBytes32', 'MapUuidTimestampCbor', 'MapTimestampUuidCbor', # # String pmaps # # String (utf8) based pmap types for object containers 'MapStringUuid', 'MapStringStringUuid', 'MapStringStringStringUuid', 'MapStringOid', 'MapStringOidOid', 'MapStringJson', 'MapStringCbor', 'MapStringPickle', 'MapStringFlatBuffers', 'MapStringTimestampCbor', 'MapTimestampStringCbor', # String (utf8) based pmap types for indexes 'MapStringString', # # OID pmaps # # OID (uint64) based pmap types for object containers 'MapOidString', 'MapOidUuid', 'MapOidJson', 'MapOidCbor', 'MapOidPickle', 'MapOidFlatBuffers', 'MapOidOidFlatBuffers', 'MapOidTimestampFlatBuffers', 'MapOid3FlatBuffers', # OID (uint64) based pmap types for indexes 'MapOidOid', 'MapOidOidSet', 'MapOidStringOid', 'MapOidOidOid', 'MapOidTimestampOid', 'MapOidTimestampStringOid', # # Bytes32 pmaps # 'MapBytes32Uuid', 'MapBytes32Timestamp', 'MapBytes32Bytes32', 'MapBytes32FlatBuffers', 'MapBytes32UuidFlatBuffers', 'MapUuidBytes32FlatBuffers', 'MapBytes32Bytes32FlatBuffers', 'MapBytes32StringFlatBuffers', 'MapTimestampBytes32FlatBuffers', 'MapUuidUuidStringFlatBuffers', # # Bytes20 pmaps # 'MapBytes20Uuid', 'MapBytes20Bytes16', 'MapBytes20Bytes20', 'MapBytes20Bytes20Timestamp', 'MapBytes20TimestampBytes20', 'MapBytes20TimestampUuid', 'MapBytes20FlatBuffers', 'MapBytes20Bytes20FlatBuffers', 'MapBytes20StringFlatBuffers', # # Bytes16 pmaps # 'MapBytes16FlatBuffers', 'MapBytes16TimestampUuid', 'MapBytes16TimestampUuidFlatBuffers', ) TABLES_BY_UUID: Dict[uuid.UUID, PersistentMap] = {} """ Map of table UUIDs to persistent maps stored in slots in a KV store. """ def table(oid, marshal=None, parse=None, build=None, cast=None, compress=None): if type(oid) == str: oid = uuid.UUID(oid) assert isinstance(oid, uuid.UUID) assert marshal is None or callable(marshal) assert parse is None or callable(parse) assert build is None or callable(build) assert cast is None or callable(cast) assert compress is None or compress in [PersistentMap.COMPRESS_ZLIB, PersistentMap.COMPRESS_SNAPPY] def decorate(o): if oid in TABLES_BY_UUID: assert TABLES_BY_UUID[oid]._zlmdb_oid == oid, "{} != {}".format(TABLES_BY_UUID[oid]._zlmdb_oid, oid) assert TABLES_BY_UUID[oid]._zlmdb_marshal == marshal, "{} != {}".format( TABLES_BY_UUID[oid]._zlmdb_marshal, marshal) assert TABLES_BY_UUID[oid]._zlmdb_parse == parse, "{} != {}".format(TABLES_BY_UUID[oid]._zlmdb_parse, parse) assert TABLES_BY_UUID[oid]._zlmdb_build == build, "{} != {}".format(TABLES_BY_UUID[oid]._zlmdb_build, build) assert TABLES_BY_UUID[oid]._zlmdb_cast == cast, "{} != {}".format(TABLES_BY_UUID[oid]._zlmdb_cast, cast) assert TABLES_BY_UUID[oid]._zlmdb_compress == compress, "{} != {}".format( TABLES_BY_UUID[oid]._zlmdb_compress, compress) return assert oid not in TABLES_BY_UUID, "oid {} already in map (pointing to {})".format(oid, TABLES_BY_UUID[oid]) # slot UUID that is mapped to a slot index later when attaching to db o._zlmdb_oid = oid # for CBOR/JSON o._zlmdb_marshal = marshal o._zlmdb_parse = parse # for Flatbuffers o._zlmdb_build = build o._zlmdb_cast = cast # for value compression o._zlmdb_compress = compress TABLES_BY_UUID[oid] = o return o return decorate zlmdb-22.6.1/zlmdb/_database.py000066400000000000000000000750341426100523600163070ustar00rootroot00000000000000############################################################################# # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import shutil import tempfile import uuid import pprint import struct import inspect import time from typing import Dict, Any, Tuple, List, Optional, Callable, Type import lmdb import yaml import cbor2 from zlmdb._transaction import Transaction, TransactionStats from zlmdb import _pmap from zlmdb._pmap import MapStringJson, MapStringCbor, MapUuidJson, MapUuidCbor import txaio try: from twisted.python.reflect import qual except ImportError: def qual(klass): return klass.__name__ KV_TYPE_TO_CLASS = { 'string-json': (MapStringJson, lambda x: x, lambda x: x), 'string-cbor': (MapStringCbor, lambda x: x, lambda x: x), 'uuid-json': (MapUuidJson, lambda x: x, lambda x: x), 'uuid-cbor': (MapUuidCbor, lambda x: x, lambda x: x), } _LMDB_MYPID_ENVS: Dict[str, Tuple['Database', int]] = {} class ConfigurationElement(object): """ Internal zLMDB configuration element base type. """ __slots__ = ( '_oid', '_name', '_description', '_tags', ) def __init__(self, oid: Optional[uuid.UUID] = None, name: Optional[str] = None, description: Optional[str] = None, tags: Optional[List[str]] = None): self._oid = oid self._name = name self._description = description self._tags = tags def __eq__(self, other: Any) -> bool: if not isinstance(other, self.__class__): return False if other.oid != self.oid: return False if other.name != self.name: return False if other.description != self.description: return False if (self.tags and not other.tags) or (not self.tags and other.tags): return False if other.tags and self.tags: if set(other.tags) ^ set(self.tags): return False return True def __ne__(self, other: Any) -> bool: return not self.__eq__(other) @property def oid(self) -> Optional[uuid.UUID]: return self._oid @property def name(self) -> Optional[str]: return self._name @property def description(self) -> Optional[str]: return self._description @property def tags(self) -> Optional[List[str]]: return self._tags def __str__(self) -> str: return pprint.pformat(self.marshal()) def marshal(self) -> Dict[str, Any]: value: Dict[str, Any] = { 'oid': str(self._oid), 'name': self._name, } if self.description: value['description'] = self._description if self.tags: value['tags'] = self._tags return value @staticmethod def parse(value: Dict[str, Any]) -> 'ConfigurationElement': assert type(value) == dict oid = value.get('oid', None) if oid: oid = uuid.UUID(oid) obj = ConfigurationElement(oid=oid, name=value.get('name', None), description=value.get('description', None), tags=value.get('tags', None)) return obj class Slot(ConfigurationElement): """ Internal zLMDB database slot configuration element. """ __slots__ = ( '_slot', '_creator', ) def __init__(self, oid: Optional[uuid.UUID] = None, name: Optional[str] = None, description: Optional[str] = None, tags: Optional[List[str]] = None, slot: Optional[int] = None, creator: Optional[str] = None): ConfigurationElement.__init__(self, oid=oid, name=name, description=description, tags=tags) self._slot = slot self._creator = creator @property def creator(self) -> Optional[str]: return self._creator @property def slot(self) -> Optional[int]: return self._slot def __str__(self) -> str: return pprint.pformat(self.marshal()) def marshal(self) -> Dict[str, Any]: obj = ConfigurationElement.marshal(self) obj.update({ 'creator': self._creator, 'slot': self._slot, }) return obj @staticmethod def parse(data: Dict[str, Any]) -> 'Slot': assert type(data) == dict obj = ConfigurationElement.parse(data) slot = data.get('slot', None) creator = data.get('creator', None) drvd_obj = Slot(oid=obj.oid, name=obj.name, description=obj.description, tags=obj.tags, slot=slot, creator=creator) return drvd_obj class Schema(object): def __init__(self, meta, slots, slots_byname): self._meta = meta self._slots = slots self._slots_byname = slots_byname def __str__(self): return pprint.pformat(self._meta) def __getitem__(self, name): assert type(name) == str if name not in self._slots_byname: raise IndexError('no slot with name "{}"'.format(name)) return self._slots[self._slots_byname[name]] def __setitem__(self, name, value): raise NotImplementedError('schema is read-only') def __delitem__(self, name): raise NotImplementedError('schema is read-only') def __len__(self): return len(self._slots_byname) def __iter__(self): raise Exception('not implemented') @staticmethod def parse(filename, klassmap=KV_TYPE_TO_CLASS): with open(filename) as f: _meta = yaml.load(f.read()) meta = {} slots = {} slots_byname = {} for slot in _meta.get('slots', []): _index = slot.get('index', None) assert type(_index) == int and _index >= 100 and _index < 65536 assert _index not in slots _name = slot.get('name', None) assert type(_name) == str assert _name not in slots_byname _key = slot.get('key', None) assert _key in ['string', 'uuid'] _value = slot.get('value', None) assert _value in ['json', 'cbor'] _schema = slot.get('schema', None) assert _schema is None or type(_schema) == str _description = slot.get('description', None) assert (_description is None or type(_description) == str) if _schema: _kv_type = '{}-{}-{}'.format(_key, _value, _schema) else: _kv_type = '{}-{}'.format(_key, _value) _kv_klass, _marshal, _unmarshal = klassmap.get(_kv_type, (None, None, None)) assert _kv_klass assert _marshal assert _unmarshal meta[_index] = { 'index': _index, 'name': _name, 'key': _key, 'value': _value, 'impl': _kv_klass.__name__ if _kv_klass else None, 'description': _description, } slots[_index] = _kv_klass(_index, marshal=_marshal, unmarshal=_unmarshal) slots_byname[_name] = _index return Schema(meta, slots, slots_byname) class Database(object): """ ZLMDB database access. Objects of this class are generally "light-weight" (cheap to create and destroy), but do manage internal resource such as file descriptors. To manage these resources in a robust way, this class implements the Python context manager interface. """ __slots__ = ( 'log', '_is_temp', '_tempdir', '_dbpath', '_maxsize', '_readonly', '_lock', '_sync', '_create', '_open_now', '_writemap', '_context', '_slots', '_slots_by_index', '_env', ) def __init__(self, dbpath: Optional[str] = None, maxsize: int = 10485760, readonly: bool = False, lock: bool = True, sync: bool = True, create: bool = True, open_now: bool = True, writemap: bool = False, context: Any = None, log: Optional[txaio.interfaces.ILogger] = None): """ :param dbpath: LMDB database path: a directory with (at least) 2 files, a ``data.mdb`` and a ``lock.mdb``. If no database exists at the given path, create a new one. :param maxsize: Database size limit in bytes, with a default of 1MB. :param readonly: Open database read-only. When ``True``, deny any modifying database operations. Note that the LMDB lock file (``lock.mdb``) still needs to be written (by readers also), and hence at the filesystem level, a LMDB database directory must be writable. :param sync: Open database with sync on commit. :param create: Automatically create database if it does not yet exist. :param open_now: Open the database immediately (within this constructor). :param writemap: Use direct write to mmap'ed database rather than regular file IO writes. Be careful when using any storage other than locally attached filesystem/drive. :param context: Optional context within which this database instance is created. :param log: Log object to use for logging from this class. """ self._context = context if log: self.log = log else: if not txaio._explicit_framework: txaio.use_asyncio() self.log = txaio.make_logger() if dbpath: self._is_temp = False self._tempdir = None self._dbpath = dbpath else: self._is_temp = True self._tempdir = tempfile.TemporaryDirectory() self._dbpath = self._tempdir.name self._maxsize = maxsize self._readonly = readonly self._lock = lock self._sync = sync self._create = create self._open_now = open_now self._writemap = writemap self._context = context self._slots: Optional[Dict[uuid.UUID, Slot]] = None self._slots_by_index: Optional[Dict[uuid.UUID, int]] = None # in a context manager environment we initialize with LMDB handle # when we enter the actual temporary, managed context .. self._env: Optional[lmdb.Environment] = None # in a direct run environment, we immediately open LMDB if self._open_now: self.__enter__() def __enter__(self): """ Enter database runtime context and open the underlying LMDB database environment. .. note:: Enter the runtime context related to this object. The with statement will bind this method’s return value to the target(s) specified in the as clause of the statement, if any. [Source](https://docs.python.org/3/reference/datamodel.html#object.__enter__) .. note:: A context manager is an object that defines the runtime context to be established when executing a with statement. The context manager handles the entry into, and the exit from, the desired runtime context for the execution of the block of code. Context managers are normally invoked using the with statement (described in section The with statement), but can also be used by directly invoking their methods." [Source](https://docs.python.org/3/reference/datamodel.html#with-statement-context-managers) :return: This database instance (in open state). """ if not self._env: # protect against opening the same database file multiple times within the same process: # "It is a serious error to have open (multiple times) the same LMDB file in # the same process at the same time. Failure to heed this may lead to data # corruption and interpreter crash." # https://lmdb.readthedocs.io/en/release/#environment-class if not self._is_temp: if self._dbpath in _LMDB_MYPID_ENVS: other_obj, other_pid = _LMDB_MYPID_ENVS[self._dbpath] raise RuntimeError( 'tried to open same dbpath "{}" twice within same process: cannot open database ' 'for {} (PID {}, Context {}), already opened in {} (PID {}, Context {})'.format( self._dbpath, self, os.getpid(), self.context, other_obj, other_pid, other_obj.context)) _LMDB_MYPID_ENVS[self._dbpath] = self, os.getpid() # handle lmdb.LockError: mdb_txn_begin: Resource temporarily unavailable # "The environment was locked by another process." # https://lmdb.readthedocs.io/en/release/#lmdb.LockError # count number of retries retries = 0 # delay (in seconds) before retrying retry_delay = 0 while True: try: # https://lmdb.readthedocs.io/en/release/#lmdb.Environment # https://lmdb.readthedocs.io/en/release/#writemap-mode # map_size: Maximum size database may grow to; used to size the memory mapping. # lock=True is needed for concurrent access, even when only by readers (because of space mgmt) self._env = lmdb.open(self._dbpath, map_size=self._maxsize, create=self._create, readonly=self._readonly, sync=self._sync, subdir=True, lock=self._lock, writemap=self._writemap) # ok, good: we've got a LMDB env break # see https://github.com/crossbario/zlmdb/issues/53 except lmdb.LockError as e: retries += 1 if retries >= 3: # give up and signal to user code raise RuntimeError('cannot open LMDB environment (giving up ' 'after {} retries): {}'.format(retries, e)) # use synchronous (!) sleep (1st time is sleep(0), which releases execution of this process to OS) time.sleep(retry_delay) # increase sleep time by 10ms _next_ time. that is, for our 3 attempts # the delays are: 0ms, 10ms, 20ms retry_delay += 0.01 return self def __exit__(self, exc_type, exc_value, traceback): """ Exit runtime context and close the underlying LMDB database environment. .. note:: Exit the runtime context related to this object. The parameters describe the exception that caused the context to be exited. If the context was exited without an exception, all three arguments will be None. [Source](https://docs.python.org/3/reference/datamodel.html#object.__exit__). :param exc_type: :param exc_value: :param traceback: :return: """ if self._env: self._env.close() self._env = None if not self._is_temp and self._dbpath in _LMDB_MYPID_ENVS: del _LMDB_MYPID_ENVS[self._dbpath] @staticmethod def open(dbpath: Optional[str] = None, maxsize: int = 10485760, readonly: bool = False, lock: bool = True, sync: bool = True, create: bool = True, open_now: bool = True, writemap: bool = False, context: Any = None, log: Optional[txaio.interfaces.ILogger] = None) -> 'Database': if dbpath is not None and dbpath in _LMDB_MYPID_ENVS: db, _ = _LMDB_MYPID_ENVS[dbpath] print( '{}: reusing database instance for path "{}" in new context {} already opened from (first) context {}'. format(Database.open, dbpath, context, db.context)) else: db = Database(dbpath=dbpath, maxsize=maxsize, readonly=readonly, lock=lock, sync=sync, create=create, open_now=open_now, writemap=writemap, context=context, log=log) print('{}: creating new database instance for path "{}" in context {}'.format( Database.open, dbpath, context)) return db @property def context(self): """ :return: """ return self._context @property def dbpath(self) -> Optional[str]: """ :return: """ return self._dbpath @property def maxsize(self) -> int: """ :return: """ return self._maxsize @property def is_sync(self) -> bool: """ :return: """ return self._sync @property def is_readonly(self) -> bool: """ :return: """ return self._readonly @property def is_writemap(self) -> bool: """ :return: """ return self._writemap @property def is_open(self) -> bool: """ :return: """ return self._env is not None @staticmethod def scratch(dbpath: str): """ :param dbpath: :return: """ if os.path.exists(dbpath): if os.path.isdir(dbpath): shutil.rmtree(dbpath) else: os.remove(dbpath) def begin(self, write: bool = False, buffers: bool = False, stats: Optional[TransactionStats] = None) -> Transaction: """ :param write: :param buffers: :param stats: :return: """ assert self._env is not None if write and self._readonly: raise Exception('database is read-only') txn = Transaction(db=self, write=write, buffers=buffers, stats=stats) return txn def sync(self, force: bool = False): """ :param force: :return: """ assert self._env is not None self._env.sync(force=force) def config(self) -> Dict[str, Any]: """ :return: """ res = { 'is_temp': self._is_temp, 'dbpath': self._dbpath, 'maxsize': self._maxsize, 'readonly': self._readonly, 'lock': self._lock, 'sync': self._sync, 'create': self._create, 'open_now': self._open_now, 'writemap': self._writemap, 'context': str(self._context) if self._context else None, } return res def stats(self, include_slots: bool = False) -> Dict[str, Any]: """ :param include_slots: :return: """ assert self._env is not None current_size = os.path.getsize(os.path.join(self._dbpath, 'data.mdb')) # psize Size of a database page in bytes. # depth Height of the B-tree. # branch_pages Number of internal (non-leaf) pages. # leaf_pages Number of leaf pages. # overflow_pages Number of overflow pages. # entries Number of data items. stats = self._env.stat() pages = stats['leaf_pages'] + stats['overflow_pages'] + stats['branch_pages'] used = stats['psize'] * pages self._cache_slots() res: Dict[str, Any] = { 'num_slots': len(self._slots) if self._slots else 0, 'current_size': current_size, 'max_size': self._maxsize, 'page_size': stats['psize'], 'pages': pages, 'used': used, 'free': 1. - float(used) / float(self._maxsize), 'read_only': self._readonly, 'sync_enabled': self._sync, } res.update(stats) # map_addr Address of database map in RAM. # map_size Size of database map in RAM. # last_pgno ID of last used page. # last_txnid ID of last committed transaction. # max_readers Number of reader slots allocated in the lock file. Equivalent to the value of # maxreaders= specified by the first process opening the Environment. # num_readers Maximum number of reader slots in simultaneous use since the lock file was initialized. res.update(self._env.info()) if include_slots: slots = self._get_slots() res['slots'] = [] with self.begin() as txn: for slot_id in slots: slot = slots[slot_id] pmap = _pmap.PersistentMap(slot.slot) res['slots'].append({ 'oid': str(slot_id), 'slot': slot.slot, 'name': slot.name, 'description': slot.description, 'records': pmap.count(txn), }) return res def _cache_slots(self): """ :return: """ slots = {} slots_by_index = {} with self.begin() as txn: from_key = struct.pack('>H', 0) to_key = struct.pack('>H', 1) cursor = txn._txn.cursor() found = cursor.set_range(from_key) while found: _key = cursor.key() if _key >= to_key: break if len(_key) >= 4: # key = struct.unpack('>H', _key[0:2]) slot_index = struct.unpack('>H', _key[2:4])[0] slot = Slot.parse(cbor2.loads(cursor.value())) assert slot.slot == slot_index slots[slot.oid] = slot slots_by_index[slot.oid] = slot_index found = cursor.next() self._slots = slots self._slots_by_index = slots_by_index def _get_slots(self, cached=True) -> Dict[uuid.UUID, Slot]: """ :param cached: :return: """ if self._slots is None or not cached: self._cache_slots() assert self._slots return self._slots def _get_free_slot(self) -> int: """ :return: """ if self._slots_by_index is None: self._cache_slots() assert self._slots_by_index is not None slot_indexes = sorted(self._slots_by_index.values()) if len(slot_indexes) > 0: return slot_indexes[-1] + 1 else: return 1 def _set_slot(self, slot_index: int, slot: Optional[Slot]): """ :param slot_index: :param slot: :return: """ assert type(slot_index) == int assert 0 < slot_index < 65536 assert slot is None or isinstance(slot, Slot) if self._slots is None: self._cache_slots() assert self._slots is not None assert self._slots_by_index is not None key = b'\0\0' + struct.pack('>H', slot_index) if slot: assert slot_index == slot.slot assert slot.oid data = cbor2.dumps(slot.marshal()) with self.begin(write=True) as txn: txn._txn.put(key, data) self._slots[slot.oid] = slot self._slots_by_index[slot.oid] = slot_index self.log.debug('Wrote metadata for table <{oid}> to slot {slot_index:03d}', oid=slot.oid, slot_index=slot_index) else: with self.begin(write=True) as txn: result = txn.get(key) if result: txn._txn.delete(key) slot = Slot.parse(cbor2.loads(result)) if slot.oid in self._slots: del self._slots[slot.oid] if slot.oid in self._slots_by_index: del self._slots_by_index[slot.oid] self.log.debug('Deleted metadata for table <{oid}> from slot {slot_index:03d}', oid=slot.oid, slot_index=slot_index) def attach_table(self, klass: Type[_pmap.PersistentMap]): """ :param klass: :return: """ if not inspect.isclass(klass): raise TypeError( 'cannot attach object {} as database table: a subclass of zlmdb.PersistentMap is required'.format( klass)) name = qual(klass) if not issubclass(klass, _pmap.PersistentMap): raise TypeError( 'cannot attach object of class {} as a database table: a subclass of zlmdb.PersistentMap is required'. format(name)) if not hasattr(klass, '_zlmdb_oid') or not klass._zlmdb_oid: raise TypeError('{} is not decorated as table slot'.format(klass)) description = klass.__doc__.strip() if klass.__doc__ else None if self._slots is None: self._cache_slots() pmap = self._attach_slot(klass._zlmdb_oid, klass, marshal=klass._zlmdb_marshal, parse=klass._zlmdb_parse, build=klass._zlmdb_build, cast=klass._zlmdb_cast, compress=klass._zlmdb_compress, create=True, name=name, description=description) return pmap def _attach_slot(self, oid: uuid.UUID, klass: Type[_pmap.PersistentMap], marshal: Optional[Callable] = None, parse: Optional[Callable] = None, build: Optional[Callable] = None, cast: Optional[Callable] = None, compress: Optional[int] = None, create: bool = True, name: Optional[str] = None, description: Optional[str] = None): """ :param oid: :param klass: :param marshal: :param parse: :param build: :param cast: :param compress: :param create: :param name: :param description: :return: """ assert isinstance(oid, uuid.UUID) assert issubclass(klass, _pmap.PersistentMap) assert marshal is None or callable(marshal) assert parse is None or callable(parse) assert build is None or callable(build) assert cast is None or callable(cast) # either marshal+parse (for CBOR/JSON) OR build+cast (for Flatbuffers) OR all unset assert (not marshal and not parse and not build and not cast) or \ (not marshal and not parse and build and cast) or \ (marshal and parse and not build and not cast) assert compress is None or compress in [_pmap.PersistentMap.COMPRESS_ZLIB, _pmap.PersistentMap.COMPRESS_SNAPPY] assert type(create) == bool assert name is None or type(name) == str assert description is None or type(description) == str assert self._slots_by_index is not None if oid not in self._slots_by_index: self.log.debug('No slot found in database for DB table <{oid}>: <{name}>', name=name, oid=oid) if create: slot_index = self._get_free_slot() slot = Slot(oid=oid, creator='unknown', slot=slot_index, name=name, description=description) self._set_slot(slot_index, slot) self.log.info('Allocated new slot {slot_index:03d} for database table <{oid}>: {name}', slot_index=slot_index, oid=oid, name=name) else: raise RuntimeError('No slot found in database for DB table <{}>: "{}"'.format(oid, name)) else: slot_index = self._slots_by_index[oid] # pmap = _pmap.PersistentMap(slot_index) # with self.begin() as txn: # records = pmap.count(txn) self.log.debug('Database table <{name}> attached [oid=<{oid}>, slot=<{slot_index:03d}>]', name=name, oid=oid, slot_index=slot_index) if marshal: slot_pmap = klass(slot_index, marshal=marshal, unmarshal=parse, compress=compress) # type: ignore elif build: slot_pmap = klass(slot_index, build=build, cast=cast, compress=compress) # type: ignore else: slot_pmap = klass(slot_index, compress=compress) return slot_pmap zlmdb-22.6.1/zlmdb/_errors.py000066400000000000000000000026071426100523600160530ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### class NullValueConstraint(RuntimeError): """ Null value in indexed column violates not-null constraint. """ zlmdb-22.6.1/zlmdb/_meta.py000066400000000000000000000024401426100523600154600ustar00rootroot00000000000000############################################################################# # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### MAGIC = b'ZLMDB-S1' zlmdb-22.6.1/zlmdb/_pmap.py000066400000000000000000001447051426100523600155020ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### """Persistent mappings.""" import struct import sys import uuid import zlib from typing import Optional, List, Callable, Any, Tuple, Dict from zlmdb import _types, _errors from zlmdb._transaction import Transaction try: import snappy except ImportError: HAS_SNAPPY = False else: HAS_SNAPPY = True if sys.version_info < (3, ): from UserDict import DictMixin as MutableMapping _NATIVE_PICKLE_PROTOCOL = 2 else: from collections.abc import MutableMapping _NATIVE_PICKLE_PROTOCOL = 4 class Index(object): """ Holds book-keeping metadata for indexes on tables (pmaps). """ def __init__(self, name, fkey, pmap, nullable=False, unique=True): """ :param name: Index name. :type name: str :param fkey: Function that extracts the indexed value from the indexed table. :type fkey: callable :param pmap: Persistent map for index storage. :type pmap: :class:`zlmdb._pmap.PersistentMap` :param nullable: Whether the indexed table column is allowed to take ``None`` values. :type nullable: bool :param unique: Whether the indexed table column must take unique values. :type unique: bool """ self._name = name self._fkey = fkey self._pmap = pmap self._nullable = nullable self._unique = unique @property def name(self): """ Index name property. :return: Name of the index (on the indexed table). :rtype: str """ return self._name @property def fkey(self): """ Indexed value extractor property. :return: Function to extract indexed value from the indexed table. :rtype: callable """ return self._fkey @property def pmap(self): """ Index table (pmap) property. :return: Persistent map for index storage. :rtype: :class:`zlmdb._pmap.PersistentMap` """ return self._pmap @property def nullable(self): """ Index nullable property. :return: Whether the indexed table column is allowed to take ``None`` values. :rtype: bool """ return self._nullable @property def unique(self): """ Index uniqueness property- :return: Whether the indexed table column must take unique values. :rtype: bool """ return self._unique def is_null(value): """ Check if the scalar value or tuple/list value is NULL. :param value: Value to check. :type value: a scalar or tuple or list :return: Returns ``True`` if and only if the value is NULL (scalar value is None or _any_ tuple/list elements are None). :rtype: bool """ if type(value) in (tuple, list): for v in value: if v is None: return True return False else: return value is None def qual(obj): """ Return fully qualified name of a class. """ return u'{}.{}'.format(obj.__class__.__module__, obj.__class__.__name__) class PersistentMap(MutableMapping): """ Abstract base class for persistent maps stored in LMDB. """ COMPRESS_ZLIB = 1 COMPRESS_SNAPPY = 2 # these are filled by table decorate @zlmdb.table _zlmdb_oid: Optional[uuid.UUID] = None _zlmdb_marshal: Optional[Callable] = None _zlmdb_parse: Optional[Callable] = None _zlmdb_build: Optional[Callable] = None _zlmdb_cast: Optional[Callable] = None _zlmdb_compress: Optional[int] = None def __init__(self, slot: Optional[int], compress: Optional[int] = None): """ :param slot: :param compress: """ assert slot is None or type(slot) == int assert compress is None or compress in [PersistentMap.COMPRESS_ZLIB, PersistentMap.COMPRESS_SNAPPY] self._slot = slot if compress: if compress not in [PersistentMap.COMPRESS_ZLIB, PersistentMap.COMPRESS_SNAPPY]: raise Exception('invalid compression mode') if compress == PersistentMap.COMPRESS_SNAPPY and not HAS_SNAPPY: raise Exception('snappy compression requested, but snappy is not installed') if compress == PersistentMap.COMPRESS_ZLIB: self._compress = zlib.compress self._decompress = zlib.decompress elif compress == PersistentMap.COMPRESS_SNAPPY: self._compress = snappy.compress self._decompress = snappy.uncompress else: raise Exception('logic error') else: self._compress = lambda data: data # type: ignore self._decompress = lambda data: data # type: ignore # if this pmap is an index, the table-pmap the index-pmap is attached to self._index_attached_to = None # if this pmap is NOT an index, any indexes attached to this (table-)pmap self._indexes: Dict[str, Index] = {} def indexes(self) -> List[str]: """ :return: """ return sorted(self._indexes.keys()) def is_index(self) -> bool: """ Flag indicating whether this pmap is used as an index. :return: """ return self._index_attached_to is not None def attach_index(self, name: str, pmap: 'PersistentMap', fkey: Callable, nullable: bool = False, unique: bool = True): """ :param name: :param pmap: :param fkey: :param nullable: :param unique: """ if self._index_attached_to: raise Exception('cannot attach an index to an index (this pmap is already an index attached to {})'.format( self._index_attached_to)) if pmap._index_attached_to: raise Exception('index already attached (to {})'.format(pmap._index_attached_to)) if name in self._indexes: raise Exception('index with name "{}" already exists'.format(name)) self._indexes[name] = Index(name, fkey, pmap, nullable, unique) pmap._index_attached_to = self # type: ignore def detach_index(self, name: str): """ :param name: """ if name in self._indexes: del self._indexes[name] def _serialize_key(self, key): raise Exception('must be implemented in derived class') def _deserialize_key(self, data): raise Exception('must be implemented in derived class') def _serialize_value(self, value): raise Exception('must be implemented in derived class') def _deserialize_value(self, data): raise Exception('must be implemented in derived class') def __contains__(self, txn_key): """ :param txn_key: :return: """ assert type(txn_key) == tuple and len(txn_key) == 2 txn, key = txn_key assert isinstance(txn, Transaction) _key = struct.pack('>H', self._slot) + self._serialize_key(key) _data = txn.get(_key) return _data is not None def __getitem__(self, txn_key): """ :param txn_key: :return: """ assert type(txn_key) == tuple and len(txn_key) == 2 txn, key = txn_key assert isinstance(txn, Transaction) _key = struct.pack('>H', self._slot) + self._serialize_key(key) _data = txn.get(_key) if _data: if self._decompress: _data = self._decompress(_data) return self._deserialize_value(_data) else: return None def __setitem__(self, txn_key, value): """ :param txn_key: :param value: :return: """ assert type(txn_key) == tuple and len(txn_key) == 2 txn, key = txn_key assert isinstance(txn, Transaction) _key = struct.pack('>H', self._slot) + self._serialize_key(key) _data = self._serialize_value(value) if self._compress: _data = self._compress(_data) # if there are indexes defined, get existing object (if any), # so that we can properly maintain the indexes, should indexed # columns be set to NULL, in which case we need to delete the # respective index record _old_value = None if self._indexes: _old_data = txn.get(_key) if _old_data: if self._decompress: _old_data = self._decompress(_old_data) _old_value = self._deserialize_value(_old_data) # insert data record txn.put(_key, _data) # insert records into indexes for index in self._indexes.values(): # extract indexed column value, which will become the index record key _fkey = index.fkey(value) if _old_value: _fkey_old = index.fkey(_old_value) if not is_null(_fkey_old) and _fkey_old != _fkey: _idx_key = struct.pack('>H', index.pmap._slot) + index.pmap._serialize_key(_fkey_old) txn.delete(_idx_key) if is_null(_fkey): if not index.nullable: raise _errors.NullValueConstraint( 'cannot insert NULL value into non-nullable index "{}::{}"'.format(qual(self), index.name)) else: _key = struct.pack('>H', index.pmap._slot) + index.pmap._serialize_key(_fkey) _data = index.pmap._serialize_value(key) txn.put(_key, _data) def __delitem__(self, txn_key): """ :param txn_key: :return: """ assert type(txn_key) == tuple and len(txn_key) == 2 txn, key = txn_key assert isinstance(txn, Transaction) _key = struct.pack('>H', self._slot) + self._serialize_key(key) # delete records from indexes if self._indexes: value = self.__getitem__(txn_key) if value: for index in self._indexes.values(): _idx_key = struct.pack('>H', index.pmap._slot) + index.pmap._serialize_key(index.fkey(value)) txn.delete(_idx_key) # delete actual data record txn.delete(_key) def __len__(self): raise NotImplementedError() def __iter__(self): raise NotImplementedError() def select(self, txn: Transaction, from_key: Any = None, to_key: Any = None, return_keys: bool = True, return_values: bool = True, reverse: bool = False, limit: Optional[int] = None) -> 'PersistentMapIterator': """ Select all records (key-value pairs) in table, optionally within a given key range. :param txn: The transaction in which to run. :param from_key: Return records starting from (and including) this key. :param to_key: Return records up to (but not including) this key. :param return_keys: If ``True`` (default), return keys of records. :param return_values: If ``True`` (default), return values of records. :param reverse: If ``True``, return records in reverse order. :param limit: Limit number of records returned. :return: """ assert type(return_keys) == bool assert type(return_values) == bool assert type(reverse) == bool assert limit is None or (type(limit) == int and limit > 0 and limit < 10000000) return PersistentMapIterator(txn, self, from_key=from_key, to_key=to_key, return_keys=return_keys, return_values=return_values, reverse=reverse, limit=limit) def count(self, txn: Transaction, prefix: Any = None) -> int: """ Count number of records in the persistent map. When no prefix is given, the total number of records is returned. When a prefix is given, only the number of records with keys that have this prefix are counted. :param txn: The transaction in which to run. :param prefix: The key prefix of records to count. :returns: The number of records. """ assert txn._txn key_from = struct.pack('>H', self._slot) if prefix: key_from += self._serialize_key(prefix) kfl = len(key_from) cnt = 0 cursor = txn._txn.cursor() has_more = cursor.set_range(key_from) while has_more: _key = cursor.key() _prefix = _key[:kfl] if _prefix != key_from: break cnt += 1 has_more = cursor.next() return cnt def count_range(self, txn: Transaction, from_key: Any, to_key: Any) -> int: """ Counter number of records in the perstistent map with keys within the given range. :param txn: The transaction in which to run. :param from_key: Count records starting and including from this key. :param to_key: End counting records before this key. :returns: The number of records. """ assert txn._txn key_from = struct.pack('>H', self._slot) + self._serialize_key(from_key) to_key = struct.pack('>H', self._slot) + self._serialize_key(to_key) cnt = 0 cursor = txn._txn.cursor() has_more = cursor.set_range(key_from) while has_more: if cursor.key() >= to_key: break cnt += 1 has_more = cursor.next() return cnt def truncate(self, txn: Transaction, rebuild_indexes: bool = True) -> int: """ :param txn: :param rebuild_indexes: :return: """ assert txn._txn assert self._slot key_from = struct.pack('>H', self._slot) key_to = struct.pack('>H', self._slot + 1) cursor = txn._txn.cursor() cnt = 0 if cursor.set_range(key_from): key = cursor.key() while key < key_to: if not cursor.delete(dupdata=True): break cnt += 1 if txn._stats: txn._stats.dels += 1 if rebuild_indexes: deleted, _ = self.rebuild_indexes(txn) cnt += deleted return cnt def rebuild_indexes(self, txn: Transaction) -> Tuple[int, int]: """ :param txn: :return: """ assert txn._txn total_deleted = 0 total_inserted = 0 for name in sorted(self._indexes.keys()): deleted, inserted = self.rebuild_index(txn, name) total_deleted += deleted total_inserted += inserted return total_deleted, total_inserted def rebuild_index(self, txn: Transaction, name: str) -> Tuple[int, int]: """ :param txn: :param name: :return: """ assert txn._txn assert self._slot if name in self._indexes: index = self._indexes[name] deleted = index.pmap.truncate(txn) key_from = struct.pack('>H', self._slot) key_to = struct.pack('>H', self._slot + 1) cursor = txn._txn.cursor() inserted = 0 if cursor.set_range(key_from): while cursor.key() < key_to: data = cursor.value() if data: value = self._deserialize_value(data) _key = struct.pack('>H', index.pmap._slot) + index.pmap._serialize_key(index.fkey(value)) _data = index.pmap._serialize_value(value.oid) txn.put(_key, _data) inserted += 1 if not cursor.next(): break return deleted, inserted else: raise Exception('no index "{}" attached'.format(name)) class PersistentMapIterator(object): """ Iterator that walks over zLMDB database records. """ def __init__(self, txn: Transaction, pmap: PersistentMap, from_key: Any = None, to_key: Any = None, return_keys: bool = True, return_values: bool = True, reverse: bool = False, limit: Optional[int] = None): """ :param txn: :param pmap: :param from_key: :param to_key: :param return_keys: :param return_values: :param reverse: :param limit: """ self._txn = txn self._pmap = pmap assert pmap._slot if from_key: self._from_key = struct.pack('>H', pmap._slot) + pmap._serialize_key(from_key) else: self._from_key = struct.pack('>H', pmap._slot) if to_key: self._to_key = struct.pack('>H', pmap._slot) + pmap._serialize_key(to_key) else: self._to_key = struct.pack('>H', pmap._slot + 1) self._reverse = reverse self._return_keys = return_keys self._return_values = return_values self._limit = limit self._read = 0 self._cursor = None self._found = None def __iter__(self) -> 'PersistentMapIterator': assert self._txn._txn self._cursor = self._txn._txn.cursor() assert self._cursor # https://lmdb.readthedocs.io/en/release/#lmdb.Cursor.set_range if self._reverse: # seek to the first record starting from to_key (and going reverse) self._found = self._cursor.set_range(self._to_key) if self._found: # to_key is _not_ inclusive, so we move on one record self._found = self._cursor.prev() else: self._found = self._cursor.last() else: # seek to the first record starting from from_key self._found = self._cursor.set_range(self._from_key) return self def __next__(self): """ :return: Return either ``(key, value)``, ``key`` or ``value``, depending on ``return_keys`` and ``return_values``. """ # stop criteria: no more records or limit reached if not self._found or (self._limit and self._read >= self._limit): raise StopIteration self._read += 1 # stop criteria: end of key-range reached _key = self._cursor.key() if self._reverse: if _key < self._from_key: raise StopIteration else: if _key >= self._to_key: raise StopIteration # read actual app key-value (before moving cursor) _key = self._pmap._deserialize_key(_key[2:]) if self._return_values: _data = self._cursor.value() if _data: if self._pmap._decompress: _data = self._pmap._decompress(_data) _data = self._pmap._deserialize_value(_data) else: _data = None # move the cursor if self._reverse: self._found = self._cursor.prev() else: self._found = self._cursor.next() # return app key-value if self._return_keys and self._return_values: return _key, _data elif self._return_values: return _data elif self._return_keys: return _key else: return None next = __next__ # Python 2 # # Key: UUID -> Value: String, OID, UUID, JSON, CBOR, Pickle, FlatBuffers # class MapSlotUuidUuid(_types._SlotUuidKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (slot, UUID) and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidString(_types._UuidKeysMixin, _types._StringValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and string (utf8) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidOid(_types._UuidKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and OID (uint64) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuid(_types._UuidKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and UUID (16 bytes) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidCbor(_types._UuidUuidKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapUuidTimestampBytes32(_types._UuidTimestampKeysMixin, _types._Bytes32ValuesMixin, PersistentMap): """ Persistent map with (UUID, Timestamp) keys and Bytes32 values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUint64TimestampUuid(_types._Uint64TimestampKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (Uint64, Timestamp) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidUuid(_types._UuidUuidKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidTimestampUuid(_types._UuidTimestampKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, timestamp) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidStringUuid(_types._UuidStringKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, string) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidStringUuid(_types._UuidUuidStringKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID, string) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidUuidStringUuid(_types._UuidUuidUuidStringKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID, UUID, string) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidUuidUuid(_types._UuidUuidUuidKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID, UUID) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidUuidUuidUuid(_types._UuidUuidUuidUuidKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID, UUID, UUID) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidStringOid(_types._UuidStringKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with (UUID, string) keys and Oid values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidUuidSet(_types._UuidKeysMixin, _types._UuidSetValuesMixin, PersistentMap): """ Persistent map with (UUID, string) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidJson(_types._UuidKeysMixin, _types._JsonValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and JSON values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._JsonValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapUuidCbor(_types._UuidKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapUuidPickle(_types._UuidKeysMixin, _types._PickleValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and Python Pickle values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapUuidFlatBuffers(_types._UuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with UUID (16 bytes) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidTimestampFlatBuffers(_types._UuidTimestampKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, Timestamp) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapTimestampFlatBuffers(_types._TimestampKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with Timestamp keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapTimestampUuidFlatBuffers(_types._TimestampUuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Timestamp, UUID) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidTimestampUuidFlatBuffers(_types._UuidTimestampUuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, Timestamp, UUID) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUint16UuidTimestampFlatBuffers(_types._Uint16UuidTimestampKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (uint16, UUID, Timestamp) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidBytes20Uint8FlatBuffers(_types._UuidBytes20Uint8KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, bytes[20], uint8) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidBytes20Uint8UuidFlatBuffers(_types._UuidBytes20Uint8UuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, bytes[20], uint8, UUID) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidBytes20Bytes20Uint8UuidFlatBuffers(_types._UuidBytes20Bytes20Uint8UuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, bytes[20], bytes[20], uint8, UUID) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapTimestampUuidStringFlatBuffers(_types._TimestampUuidStringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Timestamp, UUID, String) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapTimestampBytes32FlatBuffers(_types._TimestampBytes32KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Timestamp, Bytes32) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapTimestampStringFlatBuffers(_types._TimestampStringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Timestamp, String) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidTimestampCbor(_types._UuidTimestampKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with (UUID, Timestamp) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapTimestampUuidCbor(_types._TimestampUuidKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with (Timestamp, UUID) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapStringTimestampCbor(_types._StringTimestampKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with (String, Timestamp) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapTimestampStringCbor(_types._TimestampStringKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with (Timestamp, String) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) # # Key: String -> Value: String, OID, UUID, JSON, CBOR, Pickle, FlatBuffers # class MapStringString(_types._StringKeysMixin, _types._StringValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and string (utf8) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringOid(_types._StringKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and OID (uint64) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringOidOid(_types._StringOidKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with (string:utf8, OID:uint64) keys and OID:uint64 values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringUuid(_types._StringKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and UUID (16 bytes) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringStringUuid(_types._StringStringKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (string, string) keys and UUID (16 bytes) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringStringStringUuid(_types._StringStringStringKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (string, string, string) keys and UUID (16 bytes) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringJson(_types._StringKeysMixin, _types._JsonValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and JSON values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._JsonValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapStringCbor(_types._StringKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapStringPickle(_types._StringKeysMixin, _types._PickleValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and Python pickle values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapStringFlatBuffers(_types._StringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with string (utf8) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) # # Key: OID -> Value: String, OID, UUID, JSON, CBOR, Pickle, FlatBuffers # class MapOidString(_types._OidKeysMixin, _types._StringValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and string (utf8) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidOid(_types._OidKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and OID (uint64) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidUuid(_types._OidKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and UUID (16 bytes) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidJson(_types._OidKeysMixin, _types._JsonValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and JSON values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._JsonValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapOidCbor(_types._OidKeysMixin, _types._CborValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and CBOR values. """ def __init__(self, slot=None, compress=None, marshal=None, unmarshal=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._CborValuesMixin.__init__(self, marshal=marshal, unmarshal=unmarshal) class MapOidPickle(_types._OidKeysMixin, _types._PickleValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and Python pickle values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidFlatBuffers(_types._OidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapOidOidFlatBuffers(_types._OidOidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (OID, OID) / (uint64, uint64) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapOid3FlatBuffers(_types._Oid3KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (OID, OID, OID) / (uint64, uint64, uint64) keys and FlatBuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapOidOidSet(_types._OidKeysMixin, _types._OidSetValuesMixin, PersistentMap): """ Persistent map with OID (uint64) keys and OID-set (set of unique uint64) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidStringOid(_types._OidStringKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with (OID, string) keys and OID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidOidOid(_types._OidOidKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with (OID, OID) keys and OID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidTimestampOid(_types._OidTimestampKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with (OID, Timestamp) keys and OID values, where Timestamp is a np.datetime64[ns]. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapOidTimestampFlatBuffers(_types._OidTimestampKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (OID, Timestamp) keys and Flatbuffers values, where Timestamp is a np.datetime64[ns]. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapOidTimestampStringOid(_types._OidTimestampStringKeysMixin, _types._OidValuesMixin, PersistentMap): """ Persistent map with (OID, Timestamp, String) keys and OID values, where Timestamp is a np.datetime64[ns]. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) # # Key types: Bytes32, (Bytes32, Bytes32), (Bytes32, String), ... # Value type: FlatBuffers # class MapBytes32Uuid(_types._Bytes32KeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with Bytes32 keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes32Timestamp(_types._Bytes32KeysMixin, _types._TimestampValuesMixin, PersistentMap): """ Persistent map with Bytes32 keys and Timestamp values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes32Bytes32(_types._Bytes32KeysMixin, _types._Bytes32ValuesMixin, PersistentMap): """ Persistent map with Bytes32 keys and Bytes32 values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes32FlatBuffers(_types._Bytes32KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with Bytes32 keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes32UuidFlatBuffers(_types._Bytes32UuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Bytes32, UUID) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidBytes32FlatBuffers(_types._UuidBytes32KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, Bytes32) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidUuidStringFlatBuffers(_types._UuidUuidStringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID, String) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidUuidFlatBuffers(_types._UuidUuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, UUID) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapUuidStringFlatBuffers(_types._UuidStringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (UUID, String) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes32Bytes32FlatBuffers(_types._Bytes32Bytes32KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Bytes32, Bytes32) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes32StringFlatBuffers(_types._Bytes32StringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Bytes32, String) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) # # Key types: Bytes20, (Bytes20, Bytes20), (Bytes20, String) # Value type: FlatBuffers # class MapBytes20Bytes20(_types._Bytes20KeysMixin, _types._Bytes20ValuesMixin, PersistentMap): """ Persistent map with Bytes20 keys and Bytes20 values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes20Bytes20Timestamp(_types._Bytes20KeysMixin, _types._Bytes20TimestampValuesMixin, PersistentMap): """ Persistent map with Bytes20 keys and (Bytes20, Timestamp) values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes20TimestampBytes20(_types._Bytes20TimestampKeysMixin, _types._Bytes20ValuesMixin, PersistentMap): """ Persistent map with (Bytes20, Timestamp) keys and Bytes20 values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes20TimestampUuid(_types._Bytes20TimestampKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (Bytes20, Timestamp) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes20Uuid(_types._Bytes20KeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with Bytes20 keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes20Bytes16(_types._Bytes20KeysMixin, _types._Bytes16ValuesMixin, PersistentMap): """ Persistent map with Bytes20 keys and Bytes16 values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes20FlatBuffers(_types._Bytes20KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with Bytes20 keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes16FlatBuffers(_types._Bytes16KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with Bytes16 keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes16TimestampUuid(_types._Bytes16TimestampKeysMixin, _types._UuidValuesMixin, PersistentMap): """ Persistent map with (Bytes20, Timestamp) keys and UUID values. """ def __init__(self, slot=None, compress=None): PersistentMap.__init__(self, slot=slot, compress=compress) class MapBytes16TimestampUuidFlatBuffers(_types._Bytes16TimestampUuidKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Bytes20, Timestamp, UUID) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes20Bytes20FlatBuffers(_types._Bytes20Bytes20KeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Bytes20, Bytes20) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) class MapBytes20StringFlatBuffers(_types._Bytes20StringKeysMixin, _types._FlatBuffersValuesMixin, PersistentMap): """ Persistent map with (Bytes20, String) keys and Flatbuffers values. """ def __init__(self, slot=None, compress=None, build=None, cast=None): PersistentMap.__init__(self, slot=slot, compress=compress) _types._FlatBuffersValuesMixin.__init__(self, build=build, cast=cast) zlmdb-22.6.1/zlmdb/_schema.py000066400000000000000000000071021426100523600157720ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### from zlmdb._pmap import PersistentMap class Slot(object): """ LMDB database slot. A slot is defined just by the convention of using the first 2 bytes of keys in a LMDB database as the "slot index". The 2 bytes are interpreted as an uint16 in big endian byte order. """ def __init__(self, slot, name, pmap): """ :param slot: :param name: :param pmap: """ self.slot = slot self.name = name self.pmap = pmap class Schema(object): """ ZLMDB database schema definition. """ SLOT_DATA_EMPTY = 0 """ Database slot is empty (unused, not necessarily zero'ed, but uninitialized). """ SLOT_DATA_METADATA = 1 """ FIXME. """ SLOT_DATA_TYPE = 2 """ FIXME. """ SLOT_DATA_SEQUENCE = 3 """ FIXME. """ SLOT_DATA_TABLE = 4 """ Database slot contains a persistent map, for example a map of type OID to Pickle. """ SLOT_DATA_INDEX = 5 """ FIXME. """ SLOT_DATA_REPLICATION = 6 """ FIXME. """ SLOT_DATA_MATERIALIZATION = 7 """ FIXME. """ def __init__(self): self._index_to_slot = {} self._name_to_slot = {} def slot(self, slot_index, marshal=None, unmarshal=None, build=None, cast=None, compress=False): """ Decorator for use on classes derived from zlmdb.PersistentMap. The decorator define slots in a LMDB database schema based on persistent maps, and slot configuration. :param slot_index: :param marshal: :param unmarshal: :param build: :param cast: :param compress: :return: """ def decorate(o): assert isinstance(o, PersistentMap) name = o.__class__.__name__ assert slot_index not in self._index_to_slot assert name not in self._name_to_slot o._zlmdb_slot = slot_index o._zlmdb_marshal = marshal o._zlmdb_unmarshal = unmarshal o._zlmdb_build = build o._zlmdb_cast = cast o._zlmdb_compress = compress _slot = Slot(slot_index, name, o) self._index_to_slot[slot_index] = _slot self._name_to_slot[name] = _slot return o return decorate zlmdb-22.6.1/zlmdb/_transaction.py000066400000000000000000000112771426100523600170670ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### """Transactions""" import struct import lmdb from typing import Optional from txaio import time_ns as walltime class TransactionStats(object): """ Value class for holding transaction statistics. """ def __init__(self): self.puts = 0 self.dels = 0 self._started = walltime() @property def started(self): """ :return: start time in ns since epoch """ return self._started @property def duration(self): """ :return: duration in ns """ if self._started: return walltime() - self._started else: return 0 def reset(self): """ :return: """ self.puts = 0 self.dels = 0 self._started = walltime() class Transaction(object): """ Transactions in zLMDB are always run under an instance of this class. """ PUT = 1 DEL = 2 def __init__(self, db, write=False, buffers=False, stats=None): """ :param db: :type db: zlmdb.Database :param write: :type write: bool :param stats: :type stats: TransactionStats """ self._db = db self._write = write self._buffers = buffers self._stats = stats self._txn: Optional[lmdb.Transaction] = None self._log = None def __enter__(self): assert (self._txn is None) self._txn = lmdb.Transaction(self._db._env, write=self._write, buffers=self._buffers) return self def __exit__(self, exc_type, exc_value, traceback): assert (self._txn is not None) # https://docs.python.org/3/reference/datamodel.html#object.__exit__ # If the context was exited without an exception, all three arguments will be None. if exc_type is None: if self._log: cnt = 0 for op, key in self._log: _key = struct.pack('>H', 0) _data = struct.pack('>H', op) + key self._txn.put(_key, _data) cnt += 1 self._txn.commit() else: self._txn.abort() self._txn = None def id(self): """ :return: """ assert (self._txn is not None) return self._txn.id() def get(self, key): """ :param key: :return: """ assert (self._txn is not None) return self._txn.get(key) def put(self, key, data, overwrite=True): """ :param key: :param data: :param overwrite: :return: """ assert (self._txn is not None) # store the record, returning True if it was written, or False to indicate the key # was already present and overwrite=False. was_written = self._txn.put(key, data, overwrite=overwrite) if was_written: if self._stats: self._stats.puts += 1 if self._log: self._log.append((Transaction.PUT, key)) return was_written def delete(self, key): """ :param key: :return: """ assert (self._txn is not None) was_deleted = self._txn.delete(key) if was_deleted: if self._stats: self._stats.dels += 1 if self._log: self._log.append((Transaction.DEL, key)) return was_deleted zlmdb-22.6.1/zlmdb/_types.py000066400000000000000000001275571426100523600157170ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import struct import random import binascii import pickle import os import uuid import json import cbor2 import flatbuffers from txaio import time_ns try: import numpy as np except ImportError: HAS_NUMPY = False else: HAS_NUMPY = True CHARSET = u'345679ACEFGHJKLMNPQRSTUVWXY' """ Charset from which to generate random key IDs. .. note:: We take out the following 9 chars (leaving 27), because there is visual ambiguity: 0/O/D, 1/I, 8/B, 2/Z. """ CHAR_GROUPS = 4 CHARS_PER_GROUP = 6 GROUP_SEP = u'-' def _random_string(): """ Generate a globally unique serial / product code of the form ``u'YRAC-EL4X-FQQE-AW4T-WNUV-VN6T'``. The generated value is cryptographically strong and has (at least) 114 bits of entropy. :return: new random string key """ rng = random.SystemRandom() token_value = u''.join(rng.choice(CHARSET) for _ in range(CHAR_GROUPS * CHARS_PER_GROUP)) if CHARS_PER_GROUP > 1: return GROUP_SEP.join(map(u''.join, zip(*[iter(token_value)] * CHARS_PER_GROUP))) else: return token_value def dt_to_bytes(dt): """ Serialize a timestamp in big-endian byte order. :param dt: Timestamp to serialize. :return: Serialized bytes. """ assert isinstance(dt, np.datetime64) data = bytearray(dt.tobytes()) # FIXME: this must depend on host CPU arch data.reverse() return bytes(data) def bytes_to_dt(data): """ Deserialize a timestamp from big-endian byte order data. :param data: Data to deserialize. :return: Deserialized timestamp. """ assert type(data) == bytes data = bytearray(data) # FIXME: this must depend on host CPU arch data.reverse() dt = np.frombuffer(bytes(data), dtype='datetime64[ns]')[0] return dt # # Key Types # class _OidKeysMixin(object): MAX_OID = 9007199254740992 """ Valid OID are from the integer range [0, MAX_OID]. The upper bound 2**53 is chosen since it is the maximum integer that can be represented as a IEEE double such that all smaller integers are representable as well. Hence, IDs can be safely used with languages that use IEEE double as their main (or only) number type (JavaScript, Lua, etc). """ @staticmethod def new_key(secure=False): if secure: while True: data = os.urandom(8) key = struct.unpack('>Q', data)[0] if key <= _OidKeysMixin.MAX_OID: return key else: random.randint(0, _OidKeysMixin.MAX_OID) def _serialize_key(self, key): assert type(key) == int assert key >= 0 and key <= _OidKeysMixin.MAX_OID return struct.pack('>Q', key) def _deserialize_key(self, data): return struct.unpack('>Q', data)[0] class _OidOidKeysMixin(object): @staticmethod def new_key(secure=False): return _OidKeysMixin.new_key(secure=secure), _OidKeysMixin.new_key(secure=secure) def _serialize_key(self, keys): assert type(keys) == tuple assert len(keys) == 2 key1, key2 = keys assert type(key1) == int assert key1 >= 0 and key1 <= _OidKeysMixin.MAX_OID assert type(key2) == int assert key2 >= 0 and key2 <= _OidKeysMixin.MAX_OID return struct.pack('>QQ', key1, key2) def _deserialize_key(self, data): assert len(data) == 16 return struct.unpack('>QQ', data) class _Oid3KeysMixin(object): @staticmethod def new_key(secure=False): return _OidKeysMixin.new_key(secure=secure), _OidKeysMixin.new_key(secure=secure), _OidKeysMixin.new_key( secure=secure) def _serialize_key(self, keys): assert type(keys) == tuple assert len(keys) == 3 key1, key2, key3 = keys assert type(key1) == int assert key1 >= 0 and key1 <= _OidKeysMixin.MAX_OID assert type(key2) == int assert key2 >= 0 and key2 <= _OidKeysMixin.MAX_OID assert type(key3) == int assert key3 >= 0 and key3 <= _OidKeysMixin.MAX_OID return struct.pack('>QQQ', key1, key2, key3) def _deserialize_key(self, data): assert len(data) == 24 return struct.unpack('>QQQ', data) class _OidTimestampKeysMixin(object): @staticmethod def new_key(secure=False): return _OidKeysMixin.new_key(secure=secure), 0 def _serialize_key(self, keys): assert type(keys) == tuple assert len(keys) == 2 key1, key2 = keys assert type(key1) == int assert key1 >= 0 and key1 <= _OidKeysMixin.MAX_OID assert isinstance(key2, np.datetime64) return struct.pack('>Q', key1) + key2.tobytes() def _deserialize_key(self, data): assert len(data) == 16 key1, key2 = struct.unpack('>Q>Q', data) key2 = np.datetime64(key2, 'ns') return key1, key2 class _OidTimestampStringKeysMixin(object): @staticmethod def new_key(secure=False): return _OidKeysMixin.new_key(secure=secure), 0, '' def _serialize_key(self, keys): assert type(keys) == tuple assert len(keys) == 3 key1, key2, key3 = keys assert type(key1) == int assert key1 >= 0 and key1 <= _OidKeysMixin.MAX_OID assert isinstance(key2, np.datetime64) assert type(key3) == str return struct.pack('>Q', key1) + key2.tobytes() + key3.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 16 oid, ts = struct.unpack('>Q>Q', data[:16]) ts = np.datetime64(ts, 'ns') s = data[16:] return oid, ts, s class _OidStringKeysMixin(object): @staticmethod def new_key(secure=False): return _OidKeysMixin.new_key(secure=secure), '' def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == int assert type(key2) == str return struct.pack('>Q', key1) + key2.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 8 oid = struct.unpack('>Q', data[:8])[0] data = data[8:] s = data.decode('utf8') return oid, s class _StringOidKeysMixin(object): @staticmethod def new_key(secure=False): return _random_string(), _OidKeysMixin.new_key(secure=secure) def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == str assert type(key2) == int return key1.encode('utf8') + struct.pack('>Q', key2) def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 8 oid = struct.unpack('>Q', data[-8:])[0] data = data[0:8] s = data.decode('utf8') return s, oid class _StringKeysMixin(object): @staticmethod def new_key(): return _random_string() def _serialize_key(self, key): assert type(key) == str return key.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes return data.decode('utf8') class _StringStringKeysMixin(object): @staticmethod def new_key(): return _random_string(), _random_string() def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == str assert type(key2) == str return key1.encode('utf8') + b'\x00' + key2.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 0 d = data.split(b'\x00') assert len(d) == 2 return d[0], d[1] class _StringStringStringKeysMixin(object): @staticmethod def new_key(): return _random_string(), _random_string(), _random_string() def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 assert type(key1) == str assert type(key2) == str assert type(key3) == str return key1.encode('utf8') + b'\x00' + key2.encode('utf8') + b'\x00' + key3.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 0 d = data.split(b'\x00') assert len(d) == 3 return d[0], d[1], d[2] class _UuidKeysMixin(object): @staticmethod def new_key(): # https: // docs.python.org / 3 / library / uuid.html # uuid.uuid4 # return uuid.UUID(bytes=os.urandom(16)) return uuid.uuid4() def _serialize_key(self, key): assert isinstance(key, uuid.UUID), 'key must be an UUID, but was "{}"'.format(key) # The UUID as a 16-byte string (containing the six integer fields in big-endian byte order). # https://docs.python.org/3/library/uuid.html#uuid.UUID.bytes return key.bytes def _deserialize_key(self, data): assert type(data) == bytes return uuid.UUID(bytes=data) class _UuidUuidKeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, uuid.UUID) assert isinstance(key2, uuid.UUID) return key1.bytes + key2.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 32 data1, data2 = data[0:16], data[16:32] return uuid.UUID(bytes=data1), uuid.UUID(bytes=data2) class _UuidUuidUuidKeysMixin(object): def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = uuid.UUID(bytes=b'\x00' * 16) if key3 is None: key3 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, uuid.UUID) assert isinstance(key2, uuid.UUID) assert isinstance(key3, uuid.UUID) return key1.bytes + key2.bytes + key3.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 48 data1, data2, data3 = data[0:16], data[16:32], data[32:48] return uuid.UUID(bytes=data1), uuid.UUID(bytes=data2), uuid.UUID(bytes=data3) class _UuidUuidUuidUuidKeysMixin(object): def _serialize_key(self, key1_key2_key3_key4): assert type(key1_key2_key3_key4) == tuple and len(key1_key2_key3_key4) == 4 key1, key2, key3, key4 = key1_key2_key3_key4 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = uuid.UUID(bytes=b'\x00' * 16) if key3 is None: key3 = uuid.UUID(bytes=b'\x00' * 16) if key4 is None: key4 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, uuid.UUID) assert isinstance(key2, uuid.UUID) assert isinstance(key3, uuid.UUID) assert isinstance(key4, uuid.UUID) return key1.bytes + key2.bytes + key3.bytes + key4.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 64 data1, data2, data3, data4 = data[0:16], data[16:32], data[32:48], data[48:64] return uuid.UUID(bytes=data1), uuid.UUID(bytes=data2), uuid.UUID(bytes=data3), uuid.UUID(bytes=data4) class _Uint16UuidTimestampKeysMixin(object): @staticmethod def new_key(): return random.randint(0, 2**16), uuid.uuid4(), np.datetime64(time_ns(), 'ns') def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 if key1 is None: key1 = 0 if key2 is None: key2 = uuid.UUID(bytes=b'\x00' * 16) if key3 is None: key3 = np.datetime64(0, 'ns') assert type(key1) == int and key1 >= 0 and key1 < 2**16 assert isinstance(key2, uuid.UUID) assert isinstance(key3, np.datetime64) return struct.pack('H', key1) + key2.bytes + dt_to_bytes(key3) def _deserialize_key(self, data): assert type(data) == bytes and len(data) == (2 + 16 + 8) data1, data2, data3 = data[0:2], data[2:18], data[18:26], return struct.unpack('H', data1), uuid.UUID(bytes=data2), bytes_to_dt(data3) class _UuidBytes20Uint8KeysMixin(object): @staticmethod def new_key(): return uuid.uuid4(), os.urandom(20), random.randint(0, 255) def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = b'\x00' * 20 if key3 is None: key3 = 0 assert isinstance(key1, uuid.UUID) assert type(key2) == bytes and len(key2) == 20 # FIXME: workaround to process eg (3,) if type(key3) == tuple and len(key3) == 1: key3 = key3[0] assert type(key3) == int and key3 >= 0 and key3 < 256 return key1.bytes + key2 + struct.pack('B', key3) def _deserialize_key(self, data): assert type(data) == bytes and len(data) == (16 + 20 + 1) data1, data2, data3 = data[0:16], data[16:36], data[36:37], return uuid.UUID(bytes=data1), data2, struct.unpack('B', data3) class _UuidBytes20Uint8UuidKeysMixin(object): @staticmethod def new_key(): return uuid.uuid4(), os.urandom(20), random.randint(0, 255), uuid.uuid4() def _serialize_key(self, key1_key2_key3_key4): assert type(key1_key2_key3_key4) == tuple and len(key1_key2_key3_key4) == 4 key1, key2, key3, key4 = key1_key2_key3_key4 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = b'\x00' * 20 if key3 is None: key3 = 0 if key4 is None: key4 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, uuid.UUID) assert type(key2) == bytes and len(key2) == 20 assert type(key3) == int and key3 >= 0 and key3 < 256 assert isinstance(key4, uuid.UUID) return key1.bytes + key2 + struct.pack('B', key3) + key4.bytes def _deserialize_key(self, data): assert type(data) == bytes and len(data) == (16 + 20 + 1 + 16) data1, data2, data3, data4 = data[0:16], data[16:36], data[36:37], data[37:53] return uuid.UUID(bytes=data1), data2, struct.unpack('B', data3), uuid.UUID(bytes=data4) class _UuidBytes20Bytes20Uint8UuidKeysMixin(object): @staticmethod def new_key(): return uuid.uuid4(), os.urandom(20), os.urandom(20), random.randint(0, 255), uuid.uuid4() def _serialize_key(self, key1_key2_key3_key4_key5): assert type(key1_key2_key3_key4_key5) == tuple and len(key1_key2_key3_key4_key5) == 5 key1, key2, key3, key4, key5 = key1_key2_key3_key4_key5 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = b'\x00' * 20 if key3 is None: key3 = b'\x00' * 20 if key4 is None: key4 = 0 if key5 is None: key5 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, uuid.UUID) assert type(key2) == bytes and len(key2) == 20 assert type(key3) == bytes and len(key2) == 20 assert type(key4) == int and key4 >= 0 and key4 < 256 assert isinstance(key5, uuid.UUID) return key1.bytes + key2 + key3 + struct.pack('B', key4) + key5.bytes def _deserialize_key(self, data): assert type(data) == bytes and len(data) == (16 + 20 + 20 + 1 + 16) data1, data2, data3, data4, data5 = data[0:16], data[16:36], data[36:56], data[56:57], data[57:73] return uuid.UUID(bytes=data1), data2, data3, struct.unpack('B', data4), uuid.UUID(bytes=data5) class _TimestampKeysMixin(object): @staticmethod def new_key(): return np.datetime64(time_ns(), 'ns') def _serialize_key(self, key1): assert isinstance(key1, np.datetime64) return dt_to_bytes(key1) def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 8 return bytes_to_dt(data[0:8]) class _TimestampUuidKeysMixin(object): @staticmethod def new_key(): return np.datetime64(time_ns(), 'ns'), uuid.uuid4() def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = np.datetime64(0, 'ns') if key2 is None: key2 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, np.datetime64) assert isinstance(key2, uuid.UUID) return dt_to_bytes(key1) + key2.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 24 data1, data2 = data[0:8], data[8:24] key1 = bytes_to_dt(data1) key2 = uuid.UUID(bytes=data2) return key1, key2 class _UuidTimestampUuidKeysMixin(object): @staticmethod def new_key(): return uuid.uuid4(), np.datetime64(time_ns(), 'ns'), uuid.uuid4() def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = np.datetime64(0, 'ns') if key3 is None: key3 = uuid.UUID(bytes=b'\x00' * 16) assert isinstance(key1, uuid.UUID) assert isinstance(key2, np.datetime64) assert isinstance(key3, uuid.UUID) return key1.bytes + dt_to_bytes(key2) + key3.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 32 data1, data2, data3 = data[0:16], data[16:24], data[24:32] key1 = uuid.UUID(bytes=data1) key2 = bytes_to_dt(data2) key3 = uuid.UUID(bytes=data3) return key1, key2, key3 class _TimestampUuidStringKeysMixin(object): @staticmethod def new_key(): return np.datetime64(time_ns(), 'ns'), uuid.uuid4(), '' def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 if key1 is None: key1 = np.datetime64(0, 'ns') if key2 is None: key2 = uuid.UUID(bytes=b'\x00' * 16) if key3 is None: key3 = u'' assert isinstance(key1, np.datetime64) assert isinstance(key2, uuid.UUID) assert type(key3) == str return dt_to_bytes(key1) + key2.bytes + key3.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) >= 24 data1, data2, data3 = data[0:8], data[8:24], data[24:] key1 = bytes_to_dt(data1) key2 = uuid.UUID(bytes=data2) key3 = data3.decode('utf8') if data3 else u'' return key1, key2, key3 class _TimestampBytes32KeysMixin(object): @staticmethod def new_key(): return np.datetime64(time_ns(), 'ns'), os.urandom(32) def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = np.datetime64(0, 'ns') if key2 is None: key2 = b'\x00' * 32 assert isinstance(key1, np.datetime64) assert isinstance(key2, bytes) assert isinstance(key2, bytes) and len(key2) == 32 return dt_to_bytes(key1) + key2 def _deserialize_key(self, data): assert type(data) == bytes, 'data must be binary, but got {}'.format(type(data)) assert len(data) == 40, 'data must have len 40, but got {}'.format(len(data)) data1, data2 = data[0:8], data[8:40] key1 = bytes_to_dt(data1) key2 = data2 return key1, key2 class _TimestampStringKeysMixin(object): @staticmethod def new_key(): return np.datetime64(time_ns(), 'ns'), _StringKeysMixin.new_key() def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = np.datetime64(0, 'ns') if key2 is None: key2 = u'' assert isinstance(key1, np.datetime64) assert type(key2) == str return dt_to_bytes(key1) + key2.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 8 data1, data2 = data[0:8], data[8:] key1 = bytes_to_dt(data1) key2 = data2.decode('utf8') return key1, key2 class _StringTimestampKeysMixin(object): @staticmethod def new_key(): return _StringKeysMixin.new_key(), np.datetime64(time_ns(), 'ns') def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = u'' if key2 is None: key2 = np.datetime64(0, 'ns') assert type(key1) == str assert isinstance(key2, np.datetime64) return key1.encode('utf8') + dt_to_bytes(key2) def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 8 slen = len(data) - 8 data1, data2 = data[0:slen], data[slen:] key1 = data1.decode('utf8') key2 = bytes_to_dt(data2) return key1, key2 class _UuidTimestampKeysMixin(object): @staticmethod def new_key(): return uuid.uuid4(), np.datetime64(time_ns(), 'ns') def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = np.datetime64(0, 'ns') assert isinstance(key1, uuid.UUID) assert isinstance(key2, np.datetime64) return key1.bytes + dt_to_bytes(key2) def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 24 data1, data2 = data[0:16], data[16:24] key1 = uuid.UUID(bytes=data1) key2 = bytes_to_dt(data2) return key1, key2 class _Uint64TimestampKeysMixin(object): @staticmethod def new_key(): return random.randint(1, 2**64 - 1), np.datetime64(time_ns(), 'ns') def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = 0 if key2 is None: key2 = np.datetime64(0, 'ns') assert type(key1) == int, 'key1 must be int, but was {}'.format(type(key1)) assert isinstance(key2, np.datetime64), 'key2 must be np.datetime64, but was {}'.format(type(key2)) return struct.pack('>Q', key1) + dt_to_bytes(key2) def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 16 data1, data2 = data[0:8], data[8:16] key1 = struct.unpack('>Q', data1)[0] key2 = bytes_to_dt(data2) return key1, key2 class _UuidStringKeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 if key1 is None: key1 = uuid.UUID(bytes=b'\x00' * 16) if key2 is None: key2 = u'' assert isinstance(key1, uuid.UUID), 'key1 must be of type UUID, but was {}'.format(type(key1)) assert type(key2) == str, 'key2 must be of type string, but was {}'.format(type(key2)) # The UUID as a 16-byte string (containing the six integer fields in big-endian byte order). # https://docs.python.org/3/library/uuid.html#uuid.UUID.bytes return key1.bytes + key2.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) >= 16 data1 = data[:16] if len(data) > 16: data2 = data[16:] else: data2 = b'' return uuid.UUID(bytes=data1), data2.decode('utf8') class _SlotUuidKeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == int assert key1 >= 0 and key1 < 2**16 assert isinstance(key2, uuid.UUID) return struct.pack('>H', key1) + key2.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == (2 + 16) data1, data2 = data[:2], data[2:] return struct.unpack('>H', data1)[0], uuid.UUID(bytes=data2) class _Bytes32KeysMixin(object): @staticmethod def new_key(): return os.urandom(32) def _serialize_key(self, key): assert type(key) == bytes, 'key must be bytes[32], was "{}"'.format(key) assert len(key) == 32 return key def _deserialize_key(self, data): assert type(data) == bytes, 'data must be bytes[32], was "{}"'.format(data) assert len(data) == 32 return data class _Bytes32Bytes32KeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == bytes assert len(key1) == 32 assert type(key2) == bytes assert len(key2) == 32 return key1 + key2 def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 64 data1, data2 = data[0:32], data[32:64] return data1, data2 class _Bytes32UuidKeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == bytes assert len(key1) == 32 assert isinstance(key2, uuid.UUID) return key1 + key2.bytes def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 48 data1, data2 = data[0:32], data[32:48] return data1, uuid.UUID(bytes=data2) class _UuidBytes32KeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert isinstance(key1, uuid.UUID) assert type(key2) == bytes assert len(key2) == 32 return key2.bytes + key1 def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 48 data1, data2 = data[0:16], data[16:48] return uuid.UUID(bytes=data1), data2 class _Bytes32StringKeysMixin(object): def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == bytes assert len(key1) == 32 assert type(key2) == str assert len(key2) > 0 return key1 + key2.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 32 data1, data2 = data[:32], data[32:] return data1, data2.decode('utf8') class _UuidUuidStringKeysMixin(object): def _serialize_key(self, key1_key2_key3): assert type(key1_key2_key3) == tuple and len(key1_key2_key3) == 3 key1, key2, key3 = key1_key2_key3 assert isinstance(key1, uuid.UUID) assert isinstance(key2, uuid.UUID) assert type(key3) == str return key1.bytes + key2.bytes + key3.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 32 data1, data2, data3 = data[:16], data[16:32], data[32:] return uuid.UUID(bytes=data1), uuid.UUID(bytes=data2), data3.decode('utf8') class _UuidUuidUuidStringKeysMixin(object): def _serialize_key(self, key1_key2_key3_key4): assert type(key1_key2_key3_key4) == tuple and len(key1_key2_key3_key4) == 4 key1, key2, key3, key4 = key1_key2_key3_key4 assert isinstance(key1, uuid.UUID), 'key1 must be a UUID, was {}: {}'.format(type(key1), key1) assert isinstance(key2, uuid.UUID), 'key2 must be a UUID, was {}: {}'.format(type(key2), key2) assert isinstance(key3, uuid.UUID), 'key3 must be a UUID, was {}: {}'.format(type(key3), key3) assert type(key4) == str, 'key4 must be a str, was {}: {}'.format(type(key4), key4) return key1.bytes + key2.bytes + key3.bytes + key4.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) >= 48 data1, data2, data3, data4 = data[:16], data[16:32], data[32:48], data[48:] return uuid.UUID(bytes=data1), uuid.UUID(bytes=data2), uuid.UUID(bytes=data3), data4.decode('utf8') class _Bytes20KeysMixin(object): @staticmethod def new_key(): return os.urandom(20) def _serialize_key(self, key): assert type(key) == bytes and len(key) == 20, 'key must be bytes[20], was "{}"'.format(key) return key def _deserialize_key(self, data): assert type(data) == bytes and len(data) == 20, 'data must be bytes[20], was "{}"'.format(data) return data class _Bytes16KeysMixin(object): @staticmethod def new_key(): return os.urandom(16) def _serialize_key(self, key): assert type(key) == bytes and len(key) == 16, 'key must be bytes[16], was "{}"'.format(key) return key def _deserialize_key(self, data): assert type(data) == bytes and len(data) == 16, 'data must be bytes[16], was "{}"'.format(data) return data class _Bytes20Bytes20KeysMixin(object): @staticmethod def new_key(): return os.urandom(20), os.urandom(20) def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == bytes assert len(key1) == 20 assert type(key2) == bytes assert len(key2) == 20 return key1 + key2 def _deserialize_key(self, data): assert type(data) == bytes assert len(data) == 40 data1, data2 = data[0:20], data[20:40] return data1, data2 class _Bytes20StringKeysMixin(object): @staticmethod def new_key(): return os.urandom(20), binascii.b2a_base64(os.urandom(8)).decode().strip() def _serialize_key(self, key1_key2): assert type(key1_key2) == tuple and len(key1_key2) == 2 key1, key2 = key1_key2 assert type(key1) == bytes assert len(key1) == 20 assert type(key2) == str assert len(key2) > 0 return key1 + key2.encode('utf8') def _deserialize_key(self, data): assert type(data) == bytes assert len(data) > 20 data1, data2 = data[:20], data[20:] return data1, data2.decode('utf8') class _Bytes20TimestampKeysMixin(object): @staticmethod def new_key(): return os.urandom(20), np.datetime64(time_ns(), 'ns') def _serialize_key(self, keys): assert type(keys) == tuple, 'keys in {}._serialize_key must be a tuple, was: "{}"'.format( self.__class__.__name__, keys) assert len(keys) == 2 key1, key2 = keys if not key1: key1 = b'\x00' * 20 assert key1 is None or (type(key1) == bytes and len(key1) == 20) assert isinstance(key2, np.datetime64) return key1 + dt_to_bytes(key2) def _deserialize_key(self, data): assert data is None or (type(data) == bytes and len(data) == 28) if data: key1 = data[:20] key2 = bytes_to_dt(data[20:]) else: key1 = b'\x00' * 20 key2 = np.datetime64(0, 'ns') return key1, key2 class _Bytes16TimestampKeysMixin(object): @staticmethod def new_key(): return os.urandom(20), np.datetime64(time_ns(), 'ns') def _serialize_key(self, keys): assert type(keys) == tuple, 'keys in {}._serialize_key must be a tuple, was: "{}"'.format( self.__class__.__name__, keys) assert len(keys) == 2 key1, key2 = keys if not key1: key1 = b'\x00' * 16 assert key1 is None or (type(key1) == bytes and len(key1) == 16) assert isinstance(key2, np.datetime64) return key1 + dt_to_bytes(key2) def _deserialize_key(self, data): assert data is None or (type(data) == bytes and len(data) == 24) if data: key1 = data[:16] key2 = bytes_to_dt(data[16:]) else: key1 = b'\x00' * 16 key2 = np.datetime64(0, 'ns') return key1, key2 class _Bytes16TimestampUuidKeysMixin(object): @staticmethod def new_key(): return os.urandom(20), np.datetime64(time_ns(), 'ns'), uuid.uuid4() def _serialize_key(self, keys): assert type(keys) == tuple, 'keys in {}._serialize_key must be a tuple, was: "{}"'.format( self.__class__.__name__, keys) assert len(keys) == 3 key1, key2, key3 = keys if not key1: key1 = b'\x00' * 16 assert key1 is None or (type(key1) == bytes and len(key1) == 16) assert isinstance(key2, np.datetime64) assert isinstance(key3, uuid.UUID) return key1 + dt_to_bytes(key2) + key3.bytes def _deserialize_key(self, data): assert data is None or (type(data) == bytes and len(data) == 40) if data: key1 = data[:16] key2 = bytes_to_dt(data[16:24]) key3 = uuid.UUID(bytes=data[24:]) else: key1 = b'\x00' * 16 key2 = np.datetime64(0, 'ns') key3 = uuid.UUID(bytes=b'\x00' * 16) return key1, key2, key3 # # Value Types # class _StringValuesMixin(object): def _serialize_value(self, value): assert value is None or type(value) == str if value is not None: return value.encode('utf8') else: return b'' def _deserialize_value(self, data): if data: return data.decode('utf8') else: return None class _StringSetValuesMixin(object): def _serialize_value(self, value_set): assert type(value_set) == set for v in value_set: assert v is None or type(v) == str return b'\0'.join([(value.encode('utf8') if value else b'') for value in value_set]) def _deserialize_value(self, data): assert type(data) == bytes return set([(d.decode('utf8') if d else None) for d in data.split('\0')]) class _OidValuesMixin(object): def _serialize_value(self, value): assert type(value) == int assert value >= 0 and value <= _OidKeysMixin.MAX_OID return struct.pack('>Q', value) def _deserialize_value(self, data): return struct.unpack('>Q', data)[0] class _OidSetValuesMixin(object): def _serialize_value(self, value_set): assert type(value_set) == set for value in value_set: assert value >= 0 and value <= _OidKeysMixin.MAX_OID return b''.join([struct.pack('>Q', value) for value in value_set]) def _deserialize_value(self, data): VLEN = 8 assert len(data) % VLEN == 0 cnt = len(data) // VLEN return set([struct.unpack('>Q', data[i:i + VLEN])[0] for i in range(0, cnt, VLEN)]) class _UuidValuesMixin(object): def _serialize_value(self, value): assert value is None or isinstance(value, uuid.UUID), 'not a UUID - value "{}"'.format(value) # The UUID as a 16-byte string (containing the six integer fields in big-endian byte order). # https://docs.python.org/3/library/uuid.html#uuid.UUID.bytes if value: return value.bytes else: return b'\x00' * 16 def _deserialize_value(self, data): assert data is None or type(data) == bytes if data: return uuid.UUID(bytes=data) else: return uuid.UUID(bytes=b'\x00' * 16) class _TimestampValuesMixin(object): def _serialize_value(self, value): assert value is None or isinstance(value, np.datetime64) if value: return dt_to_bytes(value) else: return b'\x00' * 8 def _deserialize_value(self, data): assert data is None or type(data) == bytes and len(data) == 8 if data: return bytes_to_dt(data) else: return None class _Bytes32ValuesMixin(object): def _serialize_value(self, value): assert value is None or (type(value) == bytes and len(value) == 32) if value: return value else: return b'\x00' * 32 def _deserialize_value(self, data): assert data is None or (type(data) == bytes and len(data) == 32) if data: return data else: return None class _Bytes20ValuesMixin(object): def _serialize_value(self, value): assert value is None or (type(value) == bytes and len(value) == 20) if value: return value else: return b'\x00' * 20 def _deserialize_value(self, data): assert data is None or (type(data) == bytes and len(data) == 20) if data: return data else: return None class _Bytes20TimestampValuesMixin(object): def _serialize_value(self, values): assert type(values) == tuple assert len(values) == 2 value1, value2 = values if not value1: value1 = b'\x00' * 20 assert value1 is None or (type(value1) == bytes and len(value1) == 20) assert isinstance(value2, np.datetime64) return value1 + dt_to_bytes(value2) def _deserialize_value(self, data): assert data is None or (type(data) == bytes and len(data) == 28) if data: value1 = data[:20] value2 = bytes_to_dt(data[20:]) else: value1 = b'\x00' * 20 value2 = np.datetime64(0, 'ns') return value1, value2 class _Bytes16ValuesMixin(object): def _serialize_value(self, value): assert value is None or (type(value) == bytes and len(value) == 16) if value: return value else: return b'\x00' * 16 def _deserialize_value(self, data): assert data is None or (type(data) == bytes and len(data) == 16) if data: return data else: return None class _UuidSetValuesMixin(object): def _serialize_value(self, value_set): assert type(value_set) == set return b''.join([value.bytes for value in value_set]) def _deserialize_value(self, data): VLEN = 16 assert len(data) % VLEN == 0 cnt = len(data) // VLEN return set([uuid.UUID(bytes=data[i:i + VLEN]) for i in range(0, cnt, VLEN)]) class _JsonValuesMixin(object): def __init__(self, marshal=None, unmarshal=None): self._marshal = None if marshal: self._marshal = marshal else: if hasattr(self, '_zlmdb_marshal'): self._marshal = self._zlmdb_marshal assert self._marshal self._unmarshal = None if unmarshal: self._unmarshal = unmarshal else: if hasattr(self, '_zlmdb_unmarshal'): self._unmarshal = self._zlmdb_unmarshal assert self._unmarshal def _serialize_value(self, value): return json.dumps(self._marshal(value), separators=(',', ':'), ensure_ascii=False, sort_keys=False).encode('utf8') def _deserialize_value(self, data): return self._unmarshal(json.loads(data.decode('utf8'))) class _CborValuesMixin(object): def __init__(self, marshal=None, unmarshal=None): self._marshal = None if marshal: self._marshal = marshal else: if hasattr(self, '_zlmdb_marshal'): self._marshal = self._zlmdb_marshal assert self._marshal self._unmarshal = None if unmarshal: self._unmarshal = unmarshal else: if hasattr(self, '_zlmdb_unmarshal'): self._unmarshal = self._zlmdb_unmarshal assert self._unmarshal def _serialize_value(self, value): return cbor2.dumps(self._marshal(value)) def _deserialize_value(self, data): return self._unmarshal(cbor2.loads(data)) class _PickleValuesMixin(object): # PROTOCOL = _NATIVE_PICKLE_PROTOCOL PROTOCOL = 2 def _serialize_value(self, value): return pickle.dumps(value, protocol=self.PROTOCOL) def _deserialize_value(self, data): return pickle.loads(data) class _FlatBuffersValuesMixin(object): def __init__(self, build, cast): self._build = build or self._zlmdb_build self._cast = cast or self._zlmdb_cast def _serialize_value(self, value): builder = flatbuffers.Builder(0) obj = self._build(value, builder) builder.Finish(obj) buf = builder.Output() return bytes(buf) def _deserialize_value(self, data): return self._cast(data) class _Pickle5ValuesMixin(object): """ Arbitrary Python object values, serialized using Pickle protocol version 5. Protocol version 5 was added in Python 3.8. It adds support for out-of-band data and speedup for in-band data. .. seealso:: * https://docs.python.org/3/library/pickle.html#data-stream-format * https://www.python.org/dev/peps/pep-0574/ """ PROTOCOL = 5 def _serialize_value(self, value): obj_buffers = [] obj_data = pickle.dumps(value, protocol=self.PROTOCOL, buffer_callback=obj_buffers.append) data = [] data.append(struct.pack('>I', len(obj_data))) data.append(obj_data) for d in obj_buffers: data.append(struct.pack('>I', len(d.raw()))) data.append(d) return b''.join(data) def _deserialize_value(self, data): data = memoryview(data) obj_buffers = [] obj_len = struct.unpack('>I', data[0:4])[0] obj_data = data[4:obj_len + 4] i = obj_len + 4 while i < len(data): buffer_len = struct.unpack('>I', data[i:i + 4])[0] buffer_data = data[i + 4:i + 4 + buffer_len] obj_buffers.append(buffer_data) i += 4 + buffer_len return pickle.loads(obj_data, buffers=obj_buffers) zlmdb-22.6.1/zlmdb/_version.py000066400000000000000000000024451426100523600162240ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### __version__ = '22.6.1' zlmdb-22.6.1/zlmdb/cli.py000066400000000000000000000031751426100523600151500ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### """Console script for zlmdb.""" import sys import click @click.command() def main(args=None): """Console script for zlmdb.""" click.echo("Replace this message by putting your code into " "zlmdb.cli.main") click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover zlmdb-22.6.1/zlmdb/flatbuffers/000077500000000000000000000000001426100523600163245ustar00rootroot00000000000000zlmdb-22.6.1/zlmdb/flatbuffers/__init__.py000066400000000000000000000004151426100523600204350ustar00rootroot00000000000000# Copyright (c) FlatBuffers Contributors, Apache License 2.0 # Copied from (master branch, 05/29/2022): # * https://github.com/google/flatbuffers/blob/master/reflection/reflection.fbs # * https://github.com/google/flatbuffers/tree/master/python/flatbuffers/reflection zlmdb-22.6.1/zlmdb/flatbuffers/reflection.fbs000066400000000000000000000101621426100523600211520ustar00rootroot00000000000000// This schema defines objects that represent a parsed schema, like // the binary version of a .fbs file. // This could be used to operate on unknown FlatBuffers at runtime. // It can even ... represent itself (!) namespace reflection; // These must correspond to the enum in idl.h. enum BaseType : byte { None, UType, Bool, Byte, UByte, Short, UShort, Int, UInt, Long, ULong, Float, Double, String, Vector, Obj, // Used for tables & structs. Union, Array, // Add any new type above this value. MaxBaseType } table Type { base_type:BaseType; element:BaseType = None; // Only if base_type == Vector // or base_type == Array. index:int = -1; // If base_type == Object, index into "objects" below. // If base_type == Union, UnionType, or integral derived // from an enum, index into "enums" below. // If base_type == Vector && element == Union or UnionType. fixed_length:uint16 = 0; // Only if base_type == Array. /// The size (octets) of the `base_type` field. base_size:uint = 4; // 4 Is a common size due to offsets being that size. /// The size (octets) of the `element` field, if present. element_size:uint = 0; } table KeyValue { key:string (required, key); value:string; } table EnumVal { name:string (required); value:long (key); object:Object (deprecated); union_type:Type; documentation:[string]; } table Enum { name:string (required, key); values:[EnumVal] (required); // In order of their values. is_union:bool = false; underlying_type:Type (required); attributes:[KeyValue]; documentation:[string]; /// File that this Enum is declared in. declaration_file: string; } table Field { name:string (required, key); type:Type (required); id:ushort; offset:ushort; // Offset into the vtable for tables, or into the struct. default_integer:long = 0; default_real:double = 0.0; deprecated:bool = false; required:bool = false; key:bool = false; attributes:[KeyValue]; documentation:[string]; optional:bool = false; /// Number of padding octets to always add after this field. Structs only. padding:uint16 = 0; } table Object { // Used for both tables and structs. name:string (required, key); fields:[Field] (required); // Sorted. is_struct:bool = false; minalign:int; bytesize:int; // For structs. attributes:[KeyValue]; documentation:[string]; /// File that this Object is declared in. declaration_file: string; } table RPCCall { name:string (required, key); request:Object (required); // must be a table (not a struct) response:Object (required); // must be a table (not a struct) attributes:[KeyValue]; documentation:[string]; } table Service { name:string (required, key); calls:[RPCCall]; attributes:[KeyValue]; documentation:[string]; /// File that this Service is declared in. declaration_file: string; } /// New schema language features that are not supported by old code generators. enum AdvancedFeatures : ulong (bit_flags) { AdvancedArrayFeatures, AdvancedUnionFeatures, OptionalScalars, DefaultVectorsAndStrings, } /// File specific information. /// Symbols declared within a file may be recovered by iterating over all /// symbols and examining the `declaration_file` field. table SchemaFile { /// Filename, relative to project root. filename:string (required, key); /// Names of included files, relative to project root. included_filenames:[string]; } table Schema { objects:[Object] (required); // Sorted. enums:[Enum] (required); // Sorted. file_ident:string; file_ext:string; root_table:Object; services:[Service]; // Sorted. advanced_features:AdvancedFeatures; /// All the files used in this compilation. Files are relative to where /// flatc was invoked. fbs_files:[SchemaFile]; // Sorted. } root_type Schema; file_identifier "BFBS"; file_extension "bfbs"; zlmdb-22.6.1/zlmdb/flatbuffers/reflection/000077500000000000000000000000001426100523600204565ustar00rootroot00000000000000zlmdb-22.6.1/zlmdb/flatbuffers/reflection/AdvancedFeatures.py000066400000000000000000000005021426100523600242310ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection # New schema language features that are not supported by old code generators. class AdvancedFeatures(object): AdvancedArrayFeatures = 1 AdvancedUnionFeatures = 2 OptionalScalars = 4 DefaultVectorsAndStrings = 8 zlmdb-22.6.1/zlmdb/flatbuffers/reflection/BaseType.py000066400000000000000000000006141426100523600225450ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection class BaseType(object): None_ = 0 UType = 1 Bool = 2 Byte = 3 UByte = 4 Short = 5 UShort = 6 Int = 7 UInt = 8 Long = 9 ULong = 10 Float = 11 Double = 12 String = 13 Vector = 14 Obj = 15 Union = 16 Array = 17 MaxBaseType = 18 zlmdb-22.6.1/zlmdb/flatbuffers/reflection/Enum.py000066400000000000000000000147231426100523600217430ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class Enum(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Enum() x.Init(buf, n + offset) return x @classmethod def GetRootAsEnum(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def EnumBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # Enum def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Enum def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Enum def Values(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.EnumVal import EnumVal obj = EnumVal() obj.Init(self._tab.Bytes, x) return obj return None # Enum def ValuesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # Enum def ValuesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 # Enum def IsUnion(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # Enum def UnderlyingType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from zlmdb.flatbuffers.reflection.Type import Type obj = Type() obj.Init(self._tab.Bytes, x) return obj return None # Enum def Attributes(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.KeyValue import KeyValue obj = KeyValue() obj.Init(self._tab.Bytes, x) return obj return None # Enum def AttributesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.VectorLen(o) return 0 # Enum def AttributesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) return o == 0 # Enum def Documentation(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # Enum def DocumentationLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.VectorLen(o) return 0 # Enum def DocumentationIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) return o == 0 # File that this Enum is declared in. # Enum def DeclarationFile(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return self._tab.String(o + self._tab.Pos) return None def EnumStart(builder): builder.StartObject(7) def Start(builder): return EnumStart(builder) def EnumAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): return EnumAddName(builder, name) def EnumAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) def AddValues(builder, values): return EnumAddValues(builder, values) def EnumStartValuesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartValuesVector(builder, numElems): return EnumStartValuesVector(builder, numElems) def EnumAddIsUnion(builder, isUnion): builder.PrependBoolSlot(2, isUnion, 0) def AddIsUnion(builder, isUnion): return EnumAddIsUnion(builder, isUnion) def EnumAddUnderlyingType(builder, underlyingType): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(underlyingType), 0) def AddUnderlyingType(builder, underlyingType): return EnumAddUnderlyingType(builder, underlyingType) def EnumAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) def AddAttributes(builder, attributes): return EnumAddAttributes(builder, attributes) def EnumStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartAttributesVector(builder, numElems): return EnumStartAttributesVector(builder, numElems) def EnumAddDocumentation(builder, documentation): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(documentation), 0) def AddDocumentation(builder, documentation): return EnumAddDocumentation(builder, documentation) def EnumStartDocumentationVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartDocumentationVector(builder, numElems): return EnumStartDocumentationVector(builder, numElems) def EnumAddDeclarationFile(builder, declarationFile): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(declarationFile), 0) def AddDeclarationFile(builder, declarationFile): return EnumAddDeclarationFile(builder, declarationFile) def EnumEnd(builder): return builder.EndObject() def End(builder): return EnumEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/EnumVal.py000066400000000000000000000070771426100523600224120ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class EnumVal(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = EnumVal() x.Init(buf, n + offset) return x @classmethod def GetRootAsEnumVal(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def EnumValBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # EnumVal def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # EnumVal def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # EnumVal def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 # EnumVal def UnionType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from zlmdb.flatbuffers.reflection.Type import Type obj = Type() obj.Init(self._tab.Bytes, x) return obj return None # EnumVal def Documentation(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # EnumVal def DocumentationLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.VectorLen(o) return 0 # EnumVal def DocumentationIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) return o == 0 def EnumValStart(builder): builder.StartObject(5) def Start(builder): return EnumValStart(builder) def EnumValAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): return EnumValAddName(builder, name) def EnumValAddValue(builder, value): builder.PrependInt64Slot(1, value, 0) def AddValue(builder, value): return EnumValAddValue(builder, value) def EnumValAddUnionType(builder, unionType): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(unionType), 0) def AddUnionType(builder, unionType): return EnumValAddUnionType(builder, unionType) def EnumValAddDocumentation(builder, documentation): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(documentation), 0) def AddDocumentation(builder, documentation): return EnumValAddDocumentation(builder, documentation) def EnumValStartDocumentationVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartDocumentationVector(builder, numElems): return EnumValStartDocumentationVector(builder, numElems) def EnumValEnd(builder): return builder.EndObject() def End(builder): return EnumValEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/Field.py000066400000000000000000000200151426100523600220510ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class Field(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Field() x.Init(buf, n + offset) return x @classmethod def GetRootAsField(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def FieldBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # Field def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Field def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Field def Type(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from zlmdb.flatbuffers.reflection.Type import Type obj = Type() obj.Init(self._tab.Bytes, x) return obj return None # Field def Id(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # Field def Offset(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # Field def DefaultInteger(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 # Field def DefaultReal(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) return 0.0 # Field def Deprecated(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # Field def Required(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # Field def Key(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # Field def Attributes(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.KeyValue import KeyValue obj = KeyValue() obj.Init(self._tab.Bytes, x) return obj return None # Field def AttributesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: return self._tab.VectorLen(o) return 0 # Field def AttributesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) return o == 0 # Field def Documentation(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # Field def DocumentationLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) if o != 0: return self._tab.VectorLen(o) return 0 # Field def DocumentationIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) return o == 0 # Field def Optional(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # Number of padding octets to always add after this field. Structs only. # Field def Padding(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 def FieldStart(builder): builder.StartObject(13) def Start(builder): return FieldStart(builder) def FieldAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): return FieldAddName(builder, name) def FieldAddType(builder, type): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(type), 0) def AddType(builder, type): return FieldAddType(builder, type) def FieldAddId(builder, id): builder.PrependUint16Slot(2, id, 0) def AddId(builder, id): return FieldAddId(builder, id) def FieldAddOffset(builder, offset): builder.PrependUint16Slot(3, offset, 0) def AddOffset(builder, offset): return FieldAddOffset(builder, offset) def FieldAddDefaultInteger(builder, defaultInteger): builder.PrependInt64Slot(4, defaultInteger, 0) def AddDefaultInteger(builder, defaultInteger): return FieldAddDefaultInteger(builder, defaultInteger) def FieldAddDefaultReal(builder, defaultReal): builder.PrependFloat64Slot(5, defaultReal, 0.0) def AddDefaultReal(builder, defaultReal): return FieldAddDefaultReal(builder, defaultReal) def FieldAddDeprecated(builder, deprecated): builder.PrependBoolSlot(6, deprecated, 0) def AddDeprecated(builder, deprecated): return FieldAddDeprecated(builder, deprecated) def FieldAddRequired(builder, required): builder.PrependBoolSlot(7, required, 0) def AddRequired(builder, required): return FieldAddRequired(builder, required) def FieldAddKey(builder, key): builder.PrependBoolSlot(8, key, 0) def AddKey(builder, key): return FieldAddKey(builder, key) def FieldAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) def AddAttributes(builder, attributes): return FieldAddAttributes(builder, attributes) def FieldStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartAttributesVector(builder, numElems): return FieldStartAttributesVector(builder, numElems) def FieldAddDocumentation(builder, documentation): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(documentation), 0) def AddDocumentation(builder, documentation): return FieldAddDocumentation(builder, documentation) def FieldStartDocumentationVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartDocumentationVector(builder, numElems): return FieldStartDocumentationVector(builder, numElems) def FieldAddOptional(builder, optional): builder.PrependBoolSlot(11, optional, 0) def AddOptional(builder, optional): return FieldAddOptional(builder, optional) def FieldAddPadding(builder, padding): builder.PrependUint16Slot(12, padding, 0) def AddPadding(builder, padding): return FieldAddPadding(builder, padding) def FieldEnd(builder): return builder.EndObject() def End(builder): return FieldEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/KeyValue.py000066400000000000000000000036351426100523600225640ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class KeyValue(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = KeyValue() x.Init(buf, n + offset) return x @classmethod def GetRootAsKeyValue(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def KeyValueBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # KeyValue def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # KeyValue def Key(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # KeyValue def Value(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.String(o + self._tab.Pos) return None def KeyValueStart(builder): builder.StartObject(2) def Start(builder): return KeyValueStart(builder) def KeyValueAddKey(builder, key): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0) def AddKey(builder, key): return KeyValueAddKey(builder, key) def KeyValueAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) def AddValue(builder, value): return KeyValueAddValue(builder, value) def KeyValueEnd(builder): return builder.EndObject() def End(builder): return KeyValueEnd(builder)zlmdb-22.6.1/zlmdb/flatbuffers/reflection/Object.py000066400000000000000000000153431426100523600222440ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class Object(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Object() x.Init(buf, n + offset) return x @classmethod def GetRootAsObject(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def ObjectBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # Object def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Object def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Object def Fields(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.Field import Field obj = Field() obj.Init(self._tab.Bytes, x) return obj return None # Object def FieldsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # Object def FieldsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 # Object def IsStruct(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False # Object def Minalign(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 # Object def Bytesize(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 # Object def Attributes(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.KeyValue import KeyValue obj = KeyValue() obj.Init(self._tab.Bytes, x) return obj return None # Object def AttributesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.VectorLen(o) return 0 # Object def AttributesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) return o == 0 # Object def Documentation(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # Object def DocumentationLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return self._tab.VectorLen(o) return 0 # Object def DocumentationIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) return o == 0 # File that this Object is declared in. # Object def DeclarationFile(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.String(o + self._tab.Pos) return None def ObjectStart(builder): builder.StartObject(8) def Start(builder): return ObjectStart(builder) def ObjectAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): return ObjectAddName(builder, name) def ObjectAddFields(builder, fields): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(fields), 0) def AddFields(builder, fields): return ObjectAddFields(builder, fields) def ObjectStartFieldsVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartFieldsVector(builder, numElems): return ObjectStartFieldsVector(builder, numElems) def ObjectAddIsStruct(builder, isStruct): builder.PrependBoolSlot(2, isStruct, 0) def AddIsStruct(builder, isStruct): return ObjectAddIsStruct(builder, isStruct) def ObjectAddMinalign(builder, minalign): builder.PrependInt32Slot(3, minalign, 0) def AddMinalign(builder, minalign): return ObjectAddMinalign(builder, minalign) def ObjectAddBytesize(builder, bytesize): builder.PrependInt32Slot(4, bytesize, 0) def AddBytesize(builder, bytesize): return ObjectAddBytesize(builder, bytesize) def ObjectAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) def AddAttributes(builder, attributes): return ObjectAddAttributes(builder, attributes) def ObjectStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartAttributesVector(builder, numElems): return ObjectStartAttributesVector(builder, numElems) def ObjectAddDocumentation(builder, documentation): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(documentation), 0) def AddDocumentation(builder, documentation): return ObjectAddDocumentation(builder, documentation) def ObjectStartDocumentationVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartDocumentationVector(builder, numElems): return ObjectStartDocumentationVector(builder, numElems) def ObjectAddDeclarationFile(builder, declarationFile): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(declarationFile), 0) def AddDeclarationFile(builder, declarationFile): return ObjectAddDeclarationFile(builder, declarationFile) def ObjectEnd(builder): return builder.EndObject() def End(builder): return ObjectEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/RPCCall.py000066400000000000000000000117751426100523600222630ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class RPCCall(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = RPCCall() x.Init(buf, n + offset) return x @classmethod def GetRootAsRPCCall(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def RPCCallBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # RPCCall def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # RPCCall def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # RPCCall def Request(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from zlmdb.flatbuffers.reflection.Object import Object obj = Object() obj.Init(self._tab.Bytes, x) return obj return None # RPCCall def Response(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from zlmdb.flatbuffers.reflection.Object import Object obj = Object() obj.Init(self._tab.Bytes, x) return obj return None # RPCCall def Attributes(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.KeyValue import KeyValue obj = KeyValue() obj.Init(self._tab.Bytes, x) return obj return None # RPCCall def AttributesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.VectorLen(o) return 0 # RPCCall def AttributesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) return o == 0 # RPCCall def Documentation(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # RPCCall def DocumentationLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.VectorLen(o) return 0 # RPCCall def DocumentationIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) return o == 0 def RPCCallStart(builder): builder.StartObject(5) def Start(builder): return RPCCallStart(builder) def RPCCallAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): return RPCCallAddName(builder, name) def RPCCallAddRequest(builder, request): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(request), 0) def AddRequest(builder, request): return RPCCallAddRequest(builder, request) def RPCCallAddResponse(builder, response): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(response), 0) def AddResponse(builder, response): return RPCCallAddResponse(builder, response) def RPCCallAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) def AddAttributes(builder, attributes): return RPCCallAddAttributes(builder, attributes) def RPCCallStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartAttributesVector(builder, numElems): return RPCCallStartAttributesVector(builder, numElems) def RPCCallAddDocumentation(builder, documentation): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(documentation), 0) def AddDocumentation(builder, documentation): return RPCCallAddDocumentation(builder, documentation) def RPCCallStartDocumentationVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartDocumentationVector(builder, numElems): return RPCCallStartDocumentationVector(builder, numElems) def RPCCallEnd(builder): return builder.EndObject() def End(builder): return RPCCallEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/Schema.py000066400000000000000000000175601426100523600222410ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class Schema(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Schema() x.Init(buf, n + offset) return x @classmethod def GetRootAsSchema(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def SchemaBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # Schema def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Schema def Objects(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.Object import Object obj = Object() obj.Init(self._tab.Bytes, x) return obj return None # Schema def ObjectsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.VectorLen(o) return 0 # Schema def ObjectsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 # Schema def Enums(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.Enum import Enum obj = Enum() obj.Init(self._tab.Bytes, x) return obj return None # Schema def EnumsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # Schema def EnumsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 # Schema def FileIdent(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Schema def FileExt(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Schema def RootTable(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: x = self._tab.Indirect(o + self._tab.Pos) from zlmdb.flatbuffers.reflection.Object import Object obj = Object() obj.Init(self._tab.Bytes, x) return obj return None # Schema def Services(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.Service import Service obj = Service() obj.Init(self._tab.Bytes, x) return obj return None # Schema def ServicesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.VectorLen(o) return 0 # Schema def ServicesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) return o == 0 # Schema def AdvancedFeatures(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # All the files used in this compilation. Files are relative to where # flatc was invoked. # Schema def FbsFiles(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.SchemaFile import SchemaFile obj = SchemaFile() obj.Init(self._tab.Bytes, x) return obj return None # Schema def FbsFilesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.VectorLen(o) return 0 # Schema def FbsFilesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) return o == 0 def SchemaStart(builder): builder.StartObject(8) def Start(builder): return SchemaStart(builder) def SchemaAddObjects(builder, objects): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(objects), 0) def AddObjects(builder, objects): return SchemaAddObjects(builder, objects) def SchemaStartObjectsVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartObjectsVector(builder, numElems): return SchemaStartObjectsVector(builder, numElems) def SchemaAddEnums(builder, enums): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(enums), 0) def AddEnums(builder, enums): return SchemaAddEnums(builder, enums) def SchemaStartEnumsVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartEnumsVector(builder, numElems): return SchemaStartEnumsVector(builder, numElems) def SchemaAddFileIdent(builder, fileIdent): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(fileIdent), 0) def AddFileIdent(builder, fileIdent): return SchemaAddFileIdent(builder, fileIdent) def SchemaAddFileExt(builder, fileExt): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(fileExt), 0) def AddFileExt(builder, fileExt): return SchemaAddFileExt(builder, fileExt) def SchemaAddRootTable(builder, rootTable): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(rootTable), 0) def AddRootTable(builder, rootTable): return SchemaAddRootTable(builder, rootTable) def SchemaAddServices(builder, services): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(services), 0) def AddServices(builder, services): return SchemaAddServices(builder, services) def SchemaStartServicesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartServicesVector(builder, numElems): return SchemaStartServicesVector(builder, numElems) def SchemaAddAdvancedFeatures(builder, advancedFeatures): builder.PrependUint64Slot(6, advancedFeatures, 0) def AddAdvancedFeatures(builder, advancedFeatures): return SchemaAddAdvancedFeatures(builder, advancedFeatures) def SchemaAddFbsFiles(builder, fbsFiles): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(fbsFiles), 0) def AddFbsFiles(builder, fbsFiles): return SchemaAddFbsFiles(builder, fbsFiles) def SchemaStartFbsFilesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartFbsFilesVector(builder, numElems): return SchemaStartFbsFilesVector(builder, numElems) def SchemaEnd(builder): return builder.EndObject() def End(builder): return SchemaEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/SchemaFile.py000066400000000000000000000057501426100523600230370ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() # File specific information. # Symbols declared within a file may be recovered by iterating over all # symbols and examining the `declaration_file` field. class SchemaFile(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = SchemaFile() x.Init(buf, n + offset) return x @classmethod def GetRootAsSchemaFile(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def SchemaFileBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # SchemaFile def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Filename, relative to project root. # SchemaFile def Filename(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Names of included files, relative to project root. # SchemaFile def IncludedFilenames(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # SchemaFile def IncludedFilenamesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # SchemaFile def IncludedFilenamesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 def SchemaFileStart(builder): builder.StartObject(2) def Start(builder): return SchemaFileStart(builder) def SchemaFileAddFilename(builder, filename): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0) def AddFilename(builder, filename): return SchemaFileAddFilename(builder, filename) def SchemaFileAddIncludedFilenames(builder, includedFilenames): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(includedFilenames), 0) def AddIncludedFilenames(builder, includedFilenames): return SchemaFileAddIncludedFilenames(builder, includedFilenames) def SchemaFileStartIncludedFilenamesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartIncludedFilenamesVector(builder, numElems): return SchemaFileStartIncludedFilenamesVector(builder, numElems) def SchemaFileEnd(builder): return builder.EndObject() def End(builder): return SchemaFileEnd(builder)zlmdb-22.6.1/zlmdb/flatbuffers/reflection/Service.py000066400000000000000000000130461426100523600224340ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class Service(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Service() x.Init(buf, n + offset) return x @classmethod def GetRootAsService(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def ServiceBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # Service def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Service def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None # Service def Calls(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.RPCCall import RPCCall obj = RPCCall() obj.Init(self._tab.Bytes, x) return obj return None # Service def CallsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # Service def CallsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 # Service def Attributes(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: x = self._tab.Vector(o) x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 x = self._tab.Indirect(x) from zlmdb.flatbuffers.reflection.KeyValue import KeyValue obj = KeyValue() obj.Init(self._tab.Bytes, x) return obj return None # Service def AttributesLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.VectorLen(o) return 0 # Service def AttributesIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) return o == 0 # Service def Documentation(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: a = self._tab.Vector(o) return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return "" # Service def DocumentationLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.VectorLen(o) return 0 # Service def DocumentationIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) return o == 0 # File that this Service is declared in. # Service def DeclarationFile(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.String(o + self._tab.Pos) return None def ServiceStart(builder): builder.StartObject(5) def Start(builder): return ServiceStart(builder) def ServiceAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) def AddName(builder, name): return ServiceAddName(builder, name) def ServiceAddCalls(builder, calls): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(calls), 0) def AddCalls(builder, calls): return ServiceAddCalls(builder, calls) def ServiceStartCallsVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartCallsVector(builder, numElems): return ServiceStartCallsVector(builder, numElems) def ServiceAddAttributes(builder, attributes): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0) def AddAttributes(builder, attributes): return ServiceAddAttributes(builder, attributes) def ServiceStartAttributesVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartAttributesVector(builder, numElems): return ServiceStartAttributesVector(builder, numElems) def ServiceAddDocumentation(builder, documentation): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(documentation), 0) def AddDocumentation(builder, documentation): return ServiceAddDocumentation(builder, documentation) def ServiceStartDocumentationVector(builder, numElems): return builder.StartVector(4, numElems, 4) def StartDocumentationVector(builder, numElems): return ServiceStartDocumentationVector(builder, numElems) def ServiceAddDeclarationFile(builder, declarationFile): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(declarationFile), 0) def AddDeclarationFile(builder, declarationFile): return ServiceAddDeclarationFile(builder, declarationFile) def ServiceEnd(builder): return builder.EndObject() def End(builder): return ServiceEnd(builder) zlmdb-22.6.1/zlmdb/flatbuffers/reflection/Type.py000066400000000000000000000070761426100523600217630ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: reflection import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class Type(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = Type() x.Init(buf, n + offset) return x @classmethod def GetRootAsType(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod def TypeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x42\x46\x42\x53", size_prefixed=size_prefixed) # Type def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # Type def BaseType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 # Type def Element(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 # Type def Index(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return -1 # Type def FixedLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # The size (octets) of the `base_type` field. # Type def BaseSize(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 4 # The size (octets) of the `element` field, if present. # Type def ElementSize(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 def TypeStart(builder): builder.StartObject(6) def Start(builder): return TypeStart(builder) def TypeAddBaseType(builder, baseType): builder.PrependInt8Slot(0, baseType, 0) def AddBaseType(builder, baseType): return TypeAddBaseType(builder, baseType) def TypeAddElement(builder, element): builder.PrependInt8Slot(1, element, 0) def AddElement(builder, element): return TypeAddElement(builder, element) def TypeAddIndex(builder, index): builder.PrependInt32Slot(2, index, -1) def AddIndex(builder, index): return TypeAddIndex(builder, index) def TypeAddFixedLength(builder, fixedLength): builder.PrependUint16Slot(3, fixedLength, 0) def AddFixedLength(builder, fixedLength): return TypeAddFixedLength(builder, fixedLength) def TypeAddBaseSize(builder, baseSize): builder.PrependUint32Slot(4, baseSize, 4) def AddBaseSize(builder, baseSize): return TypeAddBaseSize(builder, baseSize) def TypeAddElementSize(builder, elementSize): builder.PrependUint32Slot(5, elementSize, 0) def AddElementSize(builder, elementSize): return TypeAddElementSize(builder, elementSize) def TypeEnd(builder): return builder.EndObject() def End(builder): return TypeEnd(builder)zlmdb-22.6.1/zlmdb/flatbuffers/reflection/__init__.py000066400000000000000000000000001426100523600225550ustar00rootroot00000000000000zlmdb-22.6.1/zlmdb/tests/000077500000000000000000000000001426100523600151635ustar00rootroot00000000000000zlmdb-22.6.1/zlmdb/tests/MNodeLog.py000066400000000000000000000561301426100523600172060ustar00rootroot00000000000000# automatically generated by the FlatBuffers compiler, do not modify # namespace: log import flatbuffers # /// Logs of runs (from node start to end) of managed CF nodes . class MNodeLog(object): __slots__ = ['_tab'] @classmethod def GetRootAsMNodeLog(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = MNodeLog() x.Init(buf, n + offset) return x # MNodeLog def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # /// Unix time in ns when this log record was received (from CFC node clock). # MNodeLog def Timestamp(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # /// CF node ID. # MNodeLog def NodeId(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # MNodeLog def NodeIdAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) return 0 # MNodeLog def NodeIdLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # /// CFC run ID (this is unique over all start-stop cycles of CFC, and constant per run). # MNodeLog def RunId(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # MNodeLog def RunIdAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) return 0 # MNodeLog def RunIdLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.VectorLen(o) return 0 # /// Current state of CF node. # MNodeLog def State(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0 # /// When the state is ENDED, the end timestamp (Unix time in ns). # MNodeLog def Ended(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # /// WAMP session ID of the CF node uplink management session to this CFC instance. # MNodeLog def Session(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # /// Unix time in ns. This timestamp is from the original received event payload (from CF node clock). # MNodeLog def Sent(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # /// Sequence number as sent in the log record by the CF node (started at 0 for CF start and incremented by one on each heartbeat). # MNodeLog def Seq(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # /// Number of router workers currently running in the CF node. # MNodeLog def Routers(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # /// Number of container workers currently running in the CF node. # MNodeLog def Containers(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # /// Number of guest workers currently running in the CF node. # MNodeLog def Guests(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # /// Number of proxy workers currently running in the CF node. # MNodeLog def Proxies(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # /// Number of XBR market maker workers currently running in the CF node. # MNodeLog def Marketmakers(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) return 0 # /// CF node system statistics. # MNodeLog def CpuCtxSwitches(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def CpuFreq(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuGuest(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuGuestNice(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuIdle(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuInterrupts(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(40)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def CpuIowait(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(42)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuIrq(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuNice(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(46)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuSoftInterrupts(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(48)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def CpuSoftirq(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(50)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuSteal(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(52)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuSystem(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(54)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def CpuUser(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(56)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def DiskBusyTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(58)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskReadBytes(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(60)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskReadCount(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(62)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskReadMergedCount(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(64)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskReadTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(66)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskWriteBytes(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(68)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskWriteCount(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(70)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskWriteMergedCount(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(72)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def DiskWriteTime(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(74)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryActive(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(76)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryAvailable(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(78)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryBuffers(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(80)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryCached(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(82)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryFree(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(84)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryInactive(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(86)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryPercent(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(88)) if o != 0: return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) return 0.0 # MNodeLog def MemoryShared(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(90)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemorySlab(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(92)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryTotal(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(94)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def MemoryUsed(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(96)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkBytesRecv(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(98)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkBytesSent(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(100)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkConnectionAfInet(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(102)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkConnectionAfInet6(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(104)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkConnectionAfUnix(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(106)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkDropin(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(108)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkDropout(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(110)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkErrin(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(112)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkErrout(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(114)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkPacketsRecv(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(116)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 # MNodeLog def NetworkPacketsSent(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(118)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) return 0 def MNodeLogStart(builder): builder.StartObject(58) def MNodeLogAddTimestamp(builder, timestamp): builder.PrependUint64Slot(0, timestamp, 0) def MNodeLogAddNodeId(builder, nodeId): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(nodeId), 0) def MNodeLogStartNodeIdVector(builder, numElems): return builder.StartVector(1, numElems, 1) def MNodeLogAddRunId(builder, runId): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(runId), 0) def MNodeLogStartRunIdVector(builder, numElems): return builder.StartVector(1, numElems, 1) def MNodeLogAddState(builder, state): builder.PrependUint8Slot(3, state, 0) def MNodeLogAddEnded(builder, ended): builder.PrependUint64Slot(4, ended, 0) def MNodeLogAddSession(builder, session): builder.PrependUint64Slot(5, session, 0) def MNodeLogAddSent(builder, sent): builder.PrependUint64Slot(6, sent, 0) def MNodeLogAddSeq(builder, seq): builder.PrependUint64Slot(7, seq, 0) def MNodeLogAddRouters(builder, routers): builder.PrependUint16Slot(8, routers, 0) def MNodeLogAddContainers(builder, containers): builder.PrependUint16Slot(9, containers, 0) def MNodeLogAddGuests(builder, guests): builder.PrependUint16Slot(10, guests, 0) def MNodeLogAddProxies(builder, proxies): builder.PrependUint16Slot(11, proxies, 0) def MNodeLogAddMarketmakers(builder, marketmakers): builder.PrependUint16Slot(12, marketmakers, 0) def MNodeLogAddCpuCtxSwitches(builder, cpuCtxSwitches): builder.PrependUint64Slot(13, cpuCtxSwitches, 0) def MNodeLogAddCpuFreq(builder, cpuFreq): builder.PrependFloat32Slot(14, cpuFreq, 0.0) def MNodeLogAddCpuGuest(builder, cpuGuest): builder.PrependFloat32Slot(15, cpuGuest, 0.0) def MNodeLogAddCpuGuestNice(builder, cpuGuestNice): builder.PrependFloat32Slot(16, cpuGuestNice, 0.0) def MNodeLogAddCpuIdle(builder, cpuIdle): builder.PrependFloat32Slot(17, cpuIdle, 0.0) def MNodeLogAddCpuInterrupts(builder, cpuInterrupts): builder.PrependUint64Slot(18, cpuInterrupts, 0) def MNodeLogAddCpuIowait(builder, cpuIowait): builder.PrependFloat32Slot(19, cpuIowait, 0.0) def MNodeLogAddCpuIrq(builder, cpuIrq): builder.PrependFloat32Slot(20, cpuIrq, 0.0) def MNodeLogAddCpuNice(builder, cpuNice): builder.PrependFloat32Slot(21, cpuNice, 0.0) def MNodeLogAddCpuSoftInterrupts(builder, cpuSoftInterrupts): builder.PrependUint64Slot(22, cpuSoftInterrupts, 0) def MNodeLogAddCpuSoftirq(builder, cpuSoftirq): builder.PrependFloat32Slot(23, cpuSoftirq, 0.0) def MNodeLogAddCpuSteal(builder, cpuSteal): builder.PrependFloat32Slot(24, cpuSteal, 0.0) def MNodeLogAddCpuSystem(builder, cpuSystem): builder.PrependFloat32Slot(25, cpuSystem, 0.0) def MNodeLogAddCpuUser(builder, cpuUser): builder.PrependFloat32Slot(26, cpuUser, 0.0) def MNodeLogAddDiskBusyTime(builder, diskBusyTime): builder.PrependUint64Slot(27, diskBusyTime, 0) def MNodeLogAddDiskReadBytes(builder, diskReadBytes): builder.PrependUint64Slot(28, diskReadBytes, 0) def MNodeLogAddDiskReadCount(builder, diskReadCount): builder.PrependUint64Slot(29, diskReadCount, 0) def MNodeLogAddDiskReadMergedCount(builder, diskReadMergedCount): builder.PrependUint64Slot(30, diskReadMergedCount, 0) def MNodeLogAddDiskReadTime(builder, diskReadTime): builder.PrependUint64Slot(31, diskReadTime, 0) def MNodeLogAddDiskWriteBytes(builder, diskWriteBytes): builder.PrependUint64Slot(32, diskWriteBytes, 0) def MNodeLogAddDiskWriteCount(builder, diskWriteCount): builder.PrependUint64Slot(33, diskWriteCount, 0) def MNodeLogAddDiskWriteMergedCount(builder, diskWriteMergedCount): builder.PrependUint64Slot(34, diskWriteMergedCount, 0) def MNodeLogAddDiskWriteTime(builder, diskWriteTime): builder.PrependUint64Slot(35, diskWriteTime, 0) def MNodeLogAddMemoryActive(builder, memoryActive): builder.PrependUint64Slot(36, memoryActive, 0) def MNodeLogAddMemoryAvailable(builder, memoryAvailable): builder.PrependUint64Slot(37, memoryAvailable, 0) def MNodeLogAddMemoryBuffers(builder, memoryBuffers): builder.PrependUint64Slot(38, memoryBuffers, 0) def MNodeLogAddMemoryCached(builder, memoryCached): builder.PrependUint64Slot(39, memoryCached, 0) def MNodeLogAddMemoryFree(builder, memoryFree): builder.PrependUint64Slot(40, memoryFree, 0) def MNodeLogAddMemoryInactive(builder, memoryInactive): builder.PrependUint64Slot(41, memoryInactive, 0) def MNodeLogAddMemoryPercent(builder, memoryPercent): builder.PrependFloat32Slot(42, memoryPercent, 0.0) def MNodeLogAddMemoryShared(builder, memoryShared): builder.PrependUint64Slot(43, memoryShared, 0) def MNodeLogAddMemorySlab(builder, memorySlab): builder.PrependUint64Slot(44, memorySlab, 0) def MNodeLogAddMemoryTotal(builder, memoryTotal): builder.PrependUint64Slot(45, memoryTotal, 0) def MNodeLogAddMemoryUsed(builder, memoryUsed): builder.PrependUint64Slot(46, memoryUsed, 0) def MNodeLogAddNetworkBytesRecv(builder, networkBytesRecv): builder.PrependUint64Slot(47, networkBytesRecv, 0) def MNodeLogAddNetworkBytesSent(builder, networkBytesSent): builder.PrependUint64Slot(48, networkBytesSent, 0) def MNodeLogAddNetworkConnectionAfInet(builder, networkConnectionAfInet): builder.PrependUint32Slot(49, networkConnectionAfInet, 0) def MNodeLogAddNetworkConnectionAfInet6(builder, networkConnectionAfInet6): builder.PrependUint32Slot(50, networkConnectionAfInet6, 0) def MNodeLogAddNetworkConnectionAfUnix(builder, networkConnectionAfUnix): builder.PrependUint32Slot(51, networkConnectionAfUnix, 0) def MNodeLogAddNetworkDropin(builder, networkDropin): builder.PrependUint32Slot(52, networkDropin, 0) def MNodeLogAddNetworkDropout(builder, networkDropout): builder.PrependUint32Slot(53, networkDropout, 0) def MNodeLogAddNetworkErrin(builder, networkErrin): builder.PrependUint32Slot(54, networkErrin, 0) def MNodeLogAddNetworkErrout(builder, networkErrout): builder.PrependUint32Slot(55, networkErrout, 0) def MNodeLogAddNetworkPacketsRecv(builder, networkPacketsRecv): builder.PrependUint64Slot(56, networkPacketsRecv, 0) def MNodeLogAddNetworkPacketsSent(builder, networkPacketsSent): builder.PrependUint64Slot(57, networkPacketsSent, 0) def MNodeLogEnd(builder): return builder.EndObject() zlmdb-22.6.1/zlmdb/tests/_schema_fbs.py000066400000000000000000000147011426100523600177710ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import random import uuid import datetime from zlmdb.flatbuffers.demo import User as _user from zlmdb.flatbuffers.demo import Date as _date class User(object): def __init__(self, from_fbs=None): self._from_fbs = from_fbs self._name = None self._authid = None self._uuid = None self._email = None self._birthday = None self._is_friendly = None self._tags = None self._ratings = None self._ratings_cached = None self._friends = None self._friends_cached = None self._referred_by = None @property def name(self): return self._name or self._from_fbs.Name() @name.setter def name(self, value): self._name = value @property def authid(self): return self._authid or self._from_fbs.Authid() @authid.setter def authid(self, value): self._authid = value @property def uuid(self): return self._uuid or self._from_fbs.Uuid() @uuid.setter def uuid(self, value): self._uuid = value @property def email(self): return self._email or self._from_fbs.Email() @email.setter def email(self, value): self._email = value @property def birthday(self): return self._birthday or self._from_fbs.Birthday() @birthday.setter def birthday(self, value): self._birthday = value @property def is_friendly(self): return self._is_friendly or self._from_fbs.IsFriendly() @is_friendly.setter def is_friendly(self, value): self._is_friendly = value @property def ratings(self): if self._ratings is not None: return self._ratings if self._ratings_cached is None: self._ratings_cached = {} if self._from_fbs: for i in range(self._from_fbs.RatingsLength()): rat = self._from_fbs.Ratings(i) self._ratings_cached[rat.Name()] = rat.Rating() return self._ratings_cached @ratings.setter def ratings(self, value): self._ratings = value @property def friends(self): if self._friends is not None: return self._friends if self._friends_cached is None: self._friends_cached = [] if self._from_fbs: for i in range(self._from_fbs.FriendsLength()): friend_oid = self._from_fbs.Friends(i) self._friends_cached.append(friend_oid) return self._friends_cached @friends.setter def friends(self, value): self._friends = value @property def referred_by(self): return self._referred_by or self._from_fbs.ReferredBy() @referred_by.setter def referred_by(self, value): self._referred_by = value def build(self, builder): if self._name is not None: name = builder.CreateString(self._name) else: name = builder.CreateString(self._from_fbs.Name()) if self._authid is not None: authid = builder.CreateString(self._authid) else: authid = builder.CreateString(self._from_fbs.Authid()) if self._email is not None: email = builder.CreateString(self._email) else: email = builder.CreateString(self._from_fbs.Email()) _user.UserStart(builder) _user.UserAddName(builder, name) _user.UserAddAuthid(builder, authid) _user.UserAddEmail(builder, email) if self._birthday is not None: _user.UserAddBirthday( builder, _date.CreateDate(builder, self._birthday.year, self._birthday.month, self._birthday.day)) else: bd = self._from_fbs.Birthday() _user.UserAddBirthday(builder, _date.CreateDate(builder, bd.Year(), bd.Month(), bd.Day())) # FIXME: tags # FIXME: ratings # FIXME: friends if self._is_friendly is not None: _user.UserAddIsFriendly(builder, self._is_friendly) else: _user.UserAddIsFriendly(builder, self._from_fbs.IsFriendly()) if self._referred_by is not None: _user.UserAddReferredBy(builder, self._referred_by) else: _user.UserAddReferredBy(builder, self._from_fbs.ReferredBy()) return _user.UserEnd(builder) @staticmethod def cast(buf): return User(_user.User.GetRootAsUser(buf, 0)) @staticmethod def create_test_user(oid=None): user = User() if oid is not None: user.oid = oid else: user.oid = random.randint(0, 9007199254740992) user.name = 'Test {}'.format(user.oid) user.authid = 'test-{}'.format(user.oid) user.uuid = uuid.uuid4() user.email = '{}@example.com'.format(user.authid) user.birthday = datetime.date(1950, 12, 24) user.is_friendly = True user.tags = ['geek', 'sudoko', 'yellow'] for j in range(10): user.ratings['test-rating-{}'.format(j)] = random.random() user.friends = [random.randint(0, 9007199254740992) for _ in range(10)] user.referred_by = random.randint(0, 9007199254740992) return user zlmdb-22.6.1/zlmdb/tests/_schema_mnode_log.py000066400000000000000000001166031426100523600211660ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import pprint import uuid import numpy as np import flatbuffers from zlmdb import table, MapTimestampUuidFlatBuffers from txaio import time_ns import MNodeLog as MNodeLogGen class _MNodeLogGen(MNodeLogGen.MNodeLog): """ Expand methods on the class code generated by flatc. FIXME: come up with a PR for flatc to generated this stuff automatically. """ @classmethod def GetRootAsMNodeLog(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = _MNodeLogGen() x.Init(buf, n + offset) return x def NodeIdAsBytes(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: _off = self._tab.Vector(o) _len = self._tab.VectorLen(o) return memoryview(self._tab.Bytes)[_off:_off + _len] return None def RunIdAsBytes(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: _off = self._tab.Vector(o) _len = self._tab.VectorLen(o) return memoryview(self._tab.Bytes)[_off:_off + _len] return None class MNodeLog(object): def __init__(self, from_fbs=None): self._from_fbs = from_fbs # uint64 self._timestamp = None # node_id: [uint8] (uuid); self._node_id = None # run_id: [uint8] (uuid); self._run_id = None # state: MNodeState; self._state = None # ended: uint64; self._ended = None # session: uint64; self._session = None # sent: uint64; self._sent = None # seq: uint64; self._seq = None # routers: uint16; self._routers = None # containers: uint16; self._containers = None # guests: uint16; self._guests = None # proxies: uint16; self._proxies = None # xbr_marketmakers: uint16; self._marketmakers = None # CF node system statistics self._cpu_ctx_switches = None self._cpu_freq = None self._cpu_guest = None self._cpu_guest_nice = None self._cpu_idle = None self._cpu_interrupts = None self._cpu_iotwait = None self._cpu_irq = None self._cpu_nice = None self._cpu_soft_interrupts = None self._cpu_softirq = None self._cpu_steal = None self._cpu_system = None self._cpu_user = None self._disk_busy_time = None self._disk_read_bytes = None self._disk_read_count = None self._disk_read_merged_count = None self._disk_read_time = None self._disk_write_bytes = None self._disk_write_count = None self._disk_write_merged_count = None self._disk_write_time = None self._memory_active = None self._memory_available = None self._memory_buffers = None self._memory_cached = None self._memory_free = None self._memory_inactive = None self._memory_percent = None self._memory_shared = None self._memory_slab = None self._memory_total = None self._memory_used = None self._network_bytes_recv = None self._network_bytes_sent = None self._network_connection_af_inet = None self._network_connection_af_inet6 = None self._network_connection_af_unix = None self._network_dropin = None self._network_dropout = None self._network_errin = None self._network_errout = None self._network_packets_recv = None self._network_packets_sent = None @staticmethod def parse(node_id, heartbeat): assert isinstance(node_id, uuid.UUID) assert type(heartbeat) == dict assert 'timestamp' in heartbeat and type(heartbeat['timestamp']) == int obj = MNodeLog() obj._timestamp = np.datetime64(time_ns(), 'ns') obj._node_id = node_id obj._run_id = uuid.UUID(bytes=b'\0' * 16) obj._state = heartbeat.get('state', None) obj._ended = np.datetime64(heartbeat['ended'], 'ns') if heartbeat.get('ended', None) else None obj._session = heartbeat.get('session', None) obj._sent = np.datetime64(heartbeat['timestamp'], 'ns') if heartbeat.get('timestamp', None) else None obj._seq = heartbeat.get('seq', None) workers = heartbeat.get('workers', {}) obj._routers = workers.get('router', None) obj._containers = workers.get('container', None) obj._guests = workers.get('guest', None) obj._proxies = workers.get('proxy', None) obj._marketmakers = workers.get('xbrmm', None) system = heartbeat.get('system', {}) system_cpu = system.get('cpu', {}) system_net = system.get('network', {}) system_mem = system.get('memory', {}) system_dsk = system.get('disk', {}) obj._cpu_ctx_switches = system_cpu.get('ctx_switches', None) obj._cpu_freq = system_cpu.get('freq', None) obj._cpu_guest = system_cpu.get('guest', None) obj._cpu_guest_nice = system_cpu.get('guest_nice', None) obj._cpu_idle = system_cpu.get('idle', None) obj._cpu_interrupts = system_cpu.get('interrupts', None) obj._cpu_iotwait = system_cpu.get('iotwait', None) obj._cpu_irq = system_cpu.get('irq', None) obj._cpu_nice = system_cpu.get('nice', None) obj._cpu_soft_interrupts = system_cpu.get('soft_interrupts', None) obj._cpu_softirq = system_cpu.get('softirq', None) obj._cpu_steal = system_cpu.get('steal', None) obj._cpu_system = system_cpu.get('system', None) obj._cpu_user = system_cpu.get('user', None) obj._network_bytes_recv = system_net.get('bytes_recv', None) obj._network_bytes_sent = system_net.get('bytes_sent', None) obj._network_packets_recv = system_net.get('packets_recv', None) obj._network_packets_sent = system_net.get('packets_sent', None) obj._network_dropin = system_net.get('dropin', None) obj._network_dropout = system_net.get('dropout', None) obj._network_errin = system_net.get('errin', None) obj._network_errout = system_net.get('errout', None) connection = system_net.get('connection', {}) obj._network_connection_af_inet = connection.get('AF_INET', None) obj._network_connection_af_inet6 = connection.get('AF_INET6', None) obj._network_connection_af_unix = connection.get('AF_UNIX', None) obj._memory_active = system_mem.get('active', None) obj._memory_available = system_mem.get('available', None) obj._memory_buffers = system_mem.get('buffers', None) obj._memory_cached = system_mem.get('cached', None) obj._memory_free = system_mem.get('free', None) obj._memory_inactive = system_mem.get('inactive', None) obj._memory_percent = system_mem.get('percent', None) obj._memory_shared = system_mem.get('shared', None) obj._memory_slab = system_mem.get('slab', None) obj._memory_total = system_mem.get('total', None) obj._memory_used = system_mem.get('used', None) obj._disk_busy_time = system_dsk.get('busy_time', None) obj._disk_read_bytes = system_dsk.get('read_bytes', None) obj._disk_read_count = system_dsk.get('read_count', None) obj._disk_read_merged_count = system_dsk.get('read_merged_count', None) obj._disk_read_time = system_dsk.get('read_time', None) obj._disk_write_bytes = system_dsk.get('write_bytes', None) obj._disk_write_count = system_dsk.get('write_count', None) obj._disk_write_merged_count = system_dsk.get('write_merged_count', None) obj._disk_write_time = system_dsk.get('write_time', None) return obj def marshal(self): obj = { 'timestamp': self.timestamp, 'node_id': str(self.node_id), 'run_id': str(self.run_id), 'state': self.state, 'ended': self.ended, 'session': self.session, 'sent': self.sent, 'seq': self.seq, 'workers': { 'router': self.routers, 'container': self.containers, 'guest': self.guests, 'proxy': self.proxies, 'xbrmm': self.marketmakers, }, 'cpu': { 'ctx_switches': self.cpu_ctx_switches, 'freq': self.cpu_freq, 'guest': self.cpu_guest, 'guest_nice': self.cpu_guest_nice, 'idle': self.cpu_idle, 'interrupts': self.cpu_interrupts, 'iotwait': self.cpu_iotwait, 'irq': self.cpu_irq, 'nice': self.cpu_nice, 'soft_interrupts': self.cpu_soft_interrupts, 'softirq': self.cpu_softirq, 'steal': self.cpu_steal, 'system': self.cpu_system, 'user': self.cpu_user, }, 'memory': { 'active': self.memory_active, 'available': self.memory_available, 'buffers': self.memory_buffers, 'cached': self.memory_cached, 'free': self.memory_free, 'inactive': self.memory_inactive, 'percent': self.memory_percent, 'shared': self.memory_shared, 'slab': self.memory_slab, 'total': self.memory_total, 'used': self.memory_used, }, 'disk': { 'busy_time': self.disk_busy_time, 'read_bytes': self.disk_read_bytes, 'read_count': self.disk_read_count, 'read_merged_count': self.disk_read_merged_count, 'read_time': self.disk_read_time, 'write_bytes': self.disk_write_bytes, 'write_count': self.disk_write_count, 'write_merged_count': self.disk_write_merged_count, 'write_time': self.disk_write_time, }, 'network': { 'bytes_recv': self.network_bytes_recv, 'bytes_sent': self.network_bytes_sent, 'connection': { 'AF_INET': self.network_connection_af_inet, 'AF_INET6': self.network_connection_af_inet6, 'UNIX': self.network_connection_af_unix, }, 'dropin': self.network_dropin, 'dropout': self.network_dropout, 'errin': self.network_errin, 'errout': self.network_errout, 'packets_recv': self.network_packets_recv, 'packets_sent': self.network_packets_sent, }, } return obj def __str__(self): return '\n{}\n'.format(pprint.pformat(self.marshal())) @property def timestamp(self): if self._timestamp is None and self._from_fbs: self._timestamp = np.datetime64(self._from_fbs.Timestamp(), 'ns') return self._timestamp @timestamp.setter def timestamp(self, value): assert value is None or isinstance(value, np.datetime64) self._timestamp = value @property def node_id(self): if self._node_id is None and self._from_fbs: if self._from_fbs.NodeIdLength(): _node_id = self._from_fbs.NodeIdAsBytes() self._node_id = uuid.UUID(bytes=_node_id.tobytes()) return self._node_id @node_id.setter def node_id(self, value): assert value is None or isinstance(value, uuid.UUID) self._node_id = value @property def run_id(self): if self._run_id is None and self._from_fbs: if self._from_fbs.RunIdLength(): _run_id = self._from_fbs.RunIdAsBytes() self._run_id = uuid.UUID(bytes=_run_id.tobytes()) return self._run_id @run_id.setter def run_id(self, value): assert value is None or isinstance(value, uuid.UUID) self._run_id = value @property def state(self): if self._state is None and self._from_fbs: self._state = self._from_fbs.State() return self._state @state.setter def state(self, value): assert value is None or type(value) == int self._state = value @property def ended(self): if self._ended is None and self._from_fbs: self._ended = np.datetime64(self._from_fbs.Ended(), 'ns') return self._ended @ended.setter def ended(self, value): assert value is None or isinstance(value, np.datetime64) self._ended = value @property def session(self): if self._session is None and self._from_fbs: self._session = self._from_fbs.Session() return self._session @session.setter def session(self, value): assert value is None or type(value) == int self._session = value @property def sent(self): if self._sent is None and self._from_fbs: self._sent = np.datetime64(self._from_fbs.Sent(), 'ns') return self._sent @sent.setter def sent(self, value): assert value is None or isinstance(value, np.datetime64) self._sent = value @property def seq(self): if self._seq is None and self._from_fbs: self._seq = self._from_fbs.Seq() return self._seq @seq.setter def seq(self, value): assert value is None or type(value) == int self._seq = value @property def routers(self): if self._routers is None and self._from_fbs: self._routers = self._from_fbs.Routers() return self._routers @routers.setter def routers(self, value): assert value is None or type(value) == int self._routers = value @property def containers(self): if self._containers is None and self._from_fbs: self._containers = self._from_fbs.Containers() return self._containers @containers.setter def containers(self, value): assert value is None or type(value) == int self._containers = value @property def guests(self): if self._guests is None and self._from_fbs: self._guests = self._from_fbs.Guests() return self._guests @guests.setter def guests(self, value): assert value is None or type(value) == int self._guests = value @property def proxies(self): if self._proxies is None and self._from_fbs: self._proxies = self._from_fbs.Proxies() return self._proxies @proxies.setter def proxies(self, value): assert value is None or type(value) == int self._proxies = value @property def marketmakers(self): if self._marketmakers is None and self._from_fbs: self._marketmakers = self._from_fbs.Marketmakers() return self._marketmakers @marketmakers.setter def marketmakers(self, value): assert value is None or type(value) == int self._marketmakers = value @property def cpu_ctx_switches(self): if self._cpu_ctx_switches is None and self._from_fbs: self._cpu_ctx_switches = self._from_fbs.CpuCtxSwitches() return self._cpu_ctx_switches @cpu_ctx_switches.setter def cpu_ctx_switches(self, value): assert value is None or type(value) == int self._cpu_ctx_switches = value @property def cpu_freq(self): if self._cpu_freq is None and self._from_fbs: self._cpu_freq = self._from_fbs.CpuFreq() return self._cpu_freq @cpu_freq.setter def cpu_freq(self, value): assert value is None or type(value) == float self._cpu_freq = value @property def cpu_guest(self): if self._cpu_guest is None and self._from_fbs: self._cpu_guest = self._from_fbs.CpuGuest() return self._cpu_guest @cpu_guest.setter def cpu_guest(self, value): assert value is None or type(value) == float self._cpu_guest = value @property def cpu_guest_nice(self): if self._cpu_guest_nice is None and self._from_fbs: self._cpu_guest_nice = self._from_fbs.CpuGuestNice() return self._cpu_guest_nice @cpu_guest_nice.setter def cpu_guest_nice(self, value): assert value is None or type(value) == float self._cpu_guest_nice = value @property def cpu_idle(self): if self._cpu_idle is None and self._from_fbs: self._cpu_idle = self._from_fbs.CpuIdle() return self._cpu_idle @cpu_idle.setter def cpu_idle(self, value): assert value is None or type(value) == float self._cpu_idle = value @property def cpu_interrupts(self): if self._cpu_interrupts is None and self._from_fbs: self._cpu_interrupts = self._from_fbs.CpuInterrupts() return self._cpu_interrupts @cpu_interrupts.setter def cpu_interrupts(self, value): assert value is None or type(value) == int self._cpu_interrupts = value @property def cpu_iotwait(self): if self._cpu_iotwait is None and self._from_fbs: self._cpu_iotwait = self._from_fbs.CpuIowait() return self._cpu_iotwait @cpu_iotwait.setter def cpu_iotwait(self, value): assert value is None or type(value) == float self._cpu_iotwait = value @property def cpu_irq(self): if self._cpu_irq is None and self._from_fbs: self._cpu_irq = self._from_fbs.CpuIrq() return self._cpu_irq @cpu_irq.setter def cpu_irq(self, value): assert value is None or type(value) == float self._cpu_irq = value @property def cpu_nice(self): if self._cpu_nice is None and self._from_fbs: self._cpu_nice = self._from_fbs.CpuNice() return self._cpu_nice @cpu_nice.setter def cpu_nice(self, value): assert value is None or type(value) == float self._cpu_nice = value @property def cpu_soft_interrupts(self): if self._cpu_soft_interrupts is None and self._from_fbs: self._cpu_soft_interrupts = self._from_fbs.CpuSoftInterrupts() return self._cpu_soft_interrupts @cpu_soft_interrupts.setter def cpu_soft_interrupts(self, value): assert value is None or type(value) == int self._cpu_soft_interrupts = value @property def cpu_softirq(self): if self._cpu_softirq is None and self._from_fbs: self._cpu_softirq = self._from_fbs.CpuSoftirq() return self._cpu_softirq @cpu_softirq.setter def cpu_softirq(self, value): assert value is None or type(value) == float self._cpu_softirq = value @property def cpu_steal(self): if self._cpu_steal is None and self._from_fbs: self._cpu_steal = self._from_fbs.CpuSteal() return self._cpu_steal @cpu_steal.setter def cpu_steal(self, value): assert value is None or type(value) == float self._cpu_steal = value @property def cpu_system(self): if self._cpu_system is None and self._from_fbs: self._cpu_system = self._from_fbs.CpuSystem() return self._cpu_system @cpu_system.setter def cpu_system(self, value): assert value is None or type(value) == float self._cpu_system = value @property def cpu_user(self): if self._cpu_user is None and self._from_fbs: self._cpu_user = self._from_fbs.CpuUser() return self._cpu_user @cpu_user.setter def cpu_user(self, value): assert value is None or type(value) == float self._cpu_user = value @property def network_bytes_recv(self): if self._network_bytes_recv is None and self._from_fbs: self._network_bytes_recv = self._from_fbs.NetworkBytesRecv() return self._network_bytes_recv @network_bytes_recv.setter def network_bytes_recv(self, value): assert value is None or type(value) == int self._network_bytes_recv = value @property def network_bytes_sent(self): if self._network_bytes_sent is None and self._from_fbs: self._network_bytes_sent = self._from_fbs.NetworkBytesSent() return self._network_bytes_sent @network_bytes_sent.setter def network_bytes_sent(self, value): assert value is None or type(value) == int self._network_bytes_sent = value @property def network_connection_af_inet(self): if self._network_connection_af_inet is None and self._from_fbs: self._network_connection_af_inet = self._from_fbs.NetworkConnectionAfInet() return self._network_connection_af_inet @network_connection_af_inet.setter def network_connection_af_inet(self, value): assert value is None or type(value) == int self._network_connection_af_inet = value @property def network_connection_af_inet6(self): if self._network_connection_af_inet6 is None and self._from_fbs: self._network_connection_af_inet6 = self._from_fbs.NetworkConnectionAfInet6() return self._network_connection_af_inet6 @network_connection_af_inet6.setter def network_connection_af_inet6(self, value): assert value is None or type(value) == int self._network_connection_af_inet6 = value @property def network_connection_af_unix(self): if self._network_connection_af_unix is None and self._from_fbs: self._network_connection_af_unix = self._from_fbs.NetworkConnectionAfUnix() return self._network_connection_af_unix @network_connection_af_unix.setter def network_connection_af_unix(self, value): assert value is None or type(value) == int self._network_connection_af_unix = value @property def network_dropin(self): if self._network_dropin is None and self._from_fbs: self._network_dropin = self._from_fbs.NetworkDropin() return self._network_dropin @network_dropin.setter def network_dropin(self, value): assert value is None or type(value) == int self._network_dropin = value @property def network_dropout(self): if self._network_dropout is None and self._from_fbs: self._network_dropout = self._from_fbs.NetworkDropout() return self._network_dropout @network_dropout.setter def network_dropout(self, value): assert value is None or type(value) == int self._network_dropout = value @property def network_errin(self): if self._network_errin is None and self._from_fbs: self._network_errin = self._from_fbs.NetworkErrin() return self._network_errin @network_errin.setter def network_errin(self, value): assert value is None or type(value) == int self._network_errin = value @property def network_errout(self): if self._network_errout is None and self._from_fbs: self._network_errout = self._from_fbs.NetworkErrout() return self._network_errout @network_errout.setter def network_errout(self, value): assert value is None or type(value) == int self._network_errout = value @property def network_packets_recv(self): if self._network_packets_recv is None and self._from_fbs: self._network_packets_recv = self._from_fbs.NetworkPacketsRecv() return self._network_packets_recv @network_packets_recv.setter def network_packets_recv(self, value): assert value is None or type(value) == int self._network_packets_recv = value @property def network_packets_sent(self): if self._network_packets_sent is None and self._from_fbs: self._network_packets_sent = self._from_fbs.NetworkPacketsSent() return self._network_packets_sent @network_packets_sent.setter def network_packets_sent(self, value): assert value is None or type(value) == int self._network_packets_sent = value @property def memory_active(self): if self._memory_active is None and self._from_fbs: self._memory_active = self._from_fbs.MemoryActive() return self._memory_active @memory_active.setter def memory_active(self, value): assert value is None or type(value) == int self._memory_active = value @property def memory_available(self): if self._memory_available is None and self._from_fbs: self._memory_available = self._from_fbs.MemoryAvailable() return self._memory_available @memory_available.setter def memory_available(self, value): assert value is None or type(value) == int self._memory_available = value @property def memory_buffers(self): if self._memory_buffers is None and self._from_fbs: self._memory_buffers = self._from_fbs.MemoryBuffers() return self._memory_buffers @memory_buffers.setter def memory_buffers(self, value): assert value is None or type(value) == int self._memory_buffers = value @property def memory_cached(self): if self._memory_cached is None and self._from_fbs: self._memory_cached = self._from_fbs.MemoryCached() return self._memory_cached @memory_cached.setter def memory_cached(self, value): assert value is None or type(value) == int self._memory_cached = value @property def memory_free(self): if self._memory_free is None and self._from_fbs: self._memory_free = self._from_fbs.MemoryFree() return self._memory_free @memory_free.setter def memory_free(self, value): assert value is None or type(value) == int self._memory_free = value @property def memory_inactive(self): if self._memory_inactive is None and self._from_fbs: self._memory_inactive = self._from_fbs.MemoryInactive() return self._memory_inactive @memory_inactive.setter def memory_inactive(self, value): assert value is None or type(value) == int self._memory_inactive = value @property def memory_percent(self): if self._memory_percent is None and self._from_fbs: self._memory_percent = self._from_fbs.MemoryPercent() return self._memory_percent @memory_percent.setter def memory_percent(self, value): assert value is None or type(value) == float self._memory_percent = value @property def memory_shared(self): if self._memory_shared is None and self._from_fbs: self._memory_shared = self._from_fbs.MemoryShared() return self._memory_shared @memory_shared.setter def memory_shared(self, value): assert value is None or type(value) == int self._memory_shared = value @property def memory_slab(self): if self._memory_slab is None and self._from_fbs: self._memory_slab = self._from_fbs.MemorySlab() return self._memory_slab @memory_slab.setter def memory_slab(self, value): assert value is None or type(value) == int self._memory_slab = value @property def memory_total(self): if self._memory_total is None and self._from_fbs: self._memory_total = self._from_fbs.MemoryTotal() return self._memory_total @memory_total.setter def memory_total(self, value): assert value is None or type(value) == int self._memory_total = value @property def memory_used(self): if self._memory_used is None and self._from_fbs: self._memory_used = self._from_fbs.MemoryUsed() return self._memory_used @memory_used.setter def memory_used(self, value): assert value is None or type(value) == int self._memory_used = value @property def disk_busy_time(self): if self._disk_busy_time is None and self._from_fbs: self._disk_busy_time = self._from_fbs.DiskBusyTime() return self._disk_busy_time @disk_busy_time.setter def disk_busy_time(self, value): assert value is None or type(value) == int self._disk_busy_time = value @property def disk_read_bytes(self): if self._disk_read_bytes is None and self._from_fbs: self._disk_read_bytes = self._from_fbs.DiskReadBytes() return self._disk_read_bytes @disk_read_bytes.setter def disk_read_bytes(self, value): assert value is None or type(value) == int self._disk_read_bytes = value @property def disk_read_count(self): if self._disk_read_count is None and self._from_fbs: self._disk_read_count = self._from_fbs.DiskReadCount() return self._disk_read_count @disk_read_count.setter def disk_read_count(self, value): assert value is None or type(value) == int self._disk_read_count = value @property def disk_read_merged_count(self): if self._disk_read_merged_count is None and self._from_fbs: self._disk_read_merged_count = self._from_fbs.DiskReadMergedCount() return self._disk_read_merged_count @disk_read_merged_count.setter def disk_read_merged_count(self, value): assert value is None or type(value) == int self._disk_read_merged_count = value @property def disk_read_time(self): if self._disk_read_time is None and self._from_fbs: self._disk_read_time = self._from_fbs.DiskReadTime() return self._disk_read_time @disk_read_time.setter def disk_read_time(self, value): assert value is None or type(value) == int self._disk_read_time = value @property def disk_write_bytes(self): if self._disk_write_bytes is None and self._from_fbs: self._disk_write_bytes = self._from_fbs.DiskWriteBytes() return self._disk_write_bytes @disk_write_bytes.setter def disk_write_bytes(self, value): assert value is None or type(value) == int self._disk_write_bytes = value @property def disk_write_count(self): if self._disk_write_count is None and self._from_fbs: self._disk_write_count = self._from_fbs.DiskWriteCount() return self._disk_write_count @disk_write_count.setter def disk_write_count(self, value): assert value is None or type(value) == int self._disk_write_count = value @property def disk_write_merged_count(self): if self._disk_write_merged_count is None and self._from_fbs: self._disk_write_merged_count = self._from_fbs.DiskWriteMergedCount() return self._disk_write_merged_count @disk_write_merged_count.setter def disk_write_merged_count(self, value): assert value is None or type(value) == int self._disk_write_merged_count = value @property def disk_write_time(self): if self._disk_write_time is None and self._from_fbs: self._disk_write_time = self._from_fbs.DiskWriteTime() return self._disk_write_time @disk_write_time.setter def disk_write_time(self, value): assert value is None or type(value) == int self._disk_write_time = value @staticmethod def cast(buf): assert type(buf) in [bytes, bytearray], 'bytes expected, got {}'.format(type(buf)) return MNodeLog(_MNodeLogGen.GetRootAsMNodeLog(buf, 0)) def build(self, builder): node_id = self.node_id.bytes if self.node_id else None if node_id: node_id = builder.CreateString(node_id) run_id = self.run_id.bytes if self.run_id else None if run_id: run_id = builder.CreateString(run_id) MNodeLogGen.MNodeLogStart(builder) if self.timestamp: MNodeLogGen.MNodeLogAddTimestamp(builder, int(self.timestamp)) if node_id: MNodeLogGen.MNodeLogAddNodeId(builder, node_id) if run_id: MNodeLogGen.MNodeLogAddRunId(builder, run_id) if self.state: MNodeLogGen.MNodeLogAddState(builder, self.state) if self.ended: MNodeLogGen.MNodeLogAddEnded(builder, int(self.ended)) if self.session: MNodeLogGen.MNodeLogAddSession(builder, self.session) if self.sent: MNodeLogGen.MNodeLogAddSent(builder, int(self.sent)) if self.seq: MNodeLogGen.MNodeLogAddSeq(builder, self.seq) if self.routers: MNodeLogGen.MNodeLogAddRouters(builder, self.routers) if self.containers: MNodeLogGen.MNodeLogAddContainers(builder, self.containers) if self.guests: MNodeLogGen.MNodeLogAddGuests(builder, self.guests) if self.proxies: MNodeLogGen.MNodeLogAddProxies(builder, self.proxies) if self.marketmakers: MNodeLogGen.MNodeLogAddMarketmakers(builder, self.marketmakers) if self.cpu_ctx_switches: MNodeLogGen.MNodeLogAddCpuCtxSwitches(builder, self.cpu_ctx_switches) if self.cpu_freq: MNodeLogGen.MNodeLogAddCpuFreq(builder, self.cpu_freq) if self.cpu_guest: MNodeLogGen.MNodeLogAddCpuGuest(builder, self.cpu_guest) if self.cpu_guest_nice: MNodeLogGen.MNodeLogAddCpuGuestNice(builder, self.cpu_guest_nice) if self.cpu_idle: MNodeLogGen.MNodeLogAddCpuIdle(builder, self.cpu_idle) if self.cpu_interrupts: MNodeLogGen.MNodeLogAddCpuInterrupts(builder, self.cpu_interrupts) if self.cpu_iotwait: MNodeLogGen.MNodeLogAddCpuIowait(builder, self.cpu_iotwait) if self.cpu_irq: MNodeLogGen.MNodeLogAddCpuIrq(builder, self.cpu_irq) if self.cpu_nice: MNodeLogGen.MNodeLogAddCpuNice(builder, self.cpu_nice) if self.cpu_soft_interrupts: MNodeLogGen.MNodeLogAddCpuSoftInterrupts(builder, self.cpu_soft_interrupts) if self.cpu_softirq: MNodeLogGen.MNodeLogAddCpuSoftirq(builder, self.cpu_softirq) if self.cpu_steal: MNodeLogGen.MNodeLogAddCpuSteal(builder, self.cpu_steal) if self.cpu_system: MNodeLogGen.MNodeLogAddCpuSystem(builder, self.cpu_system) if self.cpu_user: MNodeLogGen.MNodeLogAddCpuUser(builder, self.cpu_user) if self.network_bytes_recv: MNodeLogGen.MNodeLogAddNetworkBytesRecv(builder, self.network_bytes_recv) if self.network_bytes_sent: MNodeLogGen.MNodeLogAddNetworkBytesSent(builder, self.network_bytes_sent) if self.network_connection_af_inet: MNodeLogGen.MNodeLogAddNetworkConnectionAfInet(builder, self.network_connection_af_inet) if self.network_connection_af_inet6: MNodeLogGen.MNodeLogAddNetworkConnectionAfInet6(builder, self.network_connection_af_inet6) if self.network_connection_af_unix: MNodeLogGen.MNodeLogAddNetworkConnectionAfUnix(builder, self.network_connection_af_unix) if self.network_dropin: MNodeLogGen.MNodeLogAddNetworkDropin(builder, self.network_dropin) if self.network_dropout: MNodeLogGen.MNodeLogAddNetworkDropout(builder, self.network_dropout) if self.network_errin: MNodeLogGen.MNodeLogAddNetworkErrin(builder, self.network_errin) if self.network_errout: MNodeLogGen.MNodeLogAddNetworkErrout(builder, self.network_errout) if self.network_packets_recv: MNodeLogGen.MNodeLogAddNetworkPacketsRecv(builder, self.network_packets_recv) if self.network_packets_sent: MNodeLogGen.MNodeLogAddNetworkPacketsSent(builder, self.network_packets_sent) if self.memory_active: MNodeLogGen.MNodeLogAddMemoryActive(builder, self.memory_active) if self.memory_available: MNodeLogGen.MNodeLogAddMemoryAvailable(builder, self.memory_available) if self.memory_buffers: MNodeLogGen.MNodeLogAddMemoryBuffers(builder, self.memory_buffers) if self.memory_cached: MNodeLogGen.MNodeLogAddMemoryCached(builder, self.memory_cached) if self.memory_free: MNodeLogGen.MNodeLogAddMemoryFree(builder, self.memory_free) if self.memory_inactive: MNodeLogGen.MNodeLogAddMemoryInactive(builder, self.memory_inactive) if self.memory_percent: MNodeLogGen.MNodeLogAddMemoryPercent(builder, self.memory_percent) if self.memory_shared: MNodeLogGen.MNodeLogAddMemoryShared(builder, self.memory_shared) if self.memory_slab: MNodeLogGen.MNodeLogAddMemorySlab(builder, self.memory_slab) if self.memory_total: MNodeLogGen.MNodeLogAddMemoryTotal(builder, self.memory_total) if self.memory_used: MNodeLogGen.MNodeLogAddMemoryUsed(builder, self.memory_used) if self.disk_busy_time: MNodeLogGen.MNodeLogAddDiskBusyTime(builder, self.disk_busy_time) if self.disk_read_bytes: MNodeLogGen.MNodeLogAddDiskReadBytes(builder, self.disk_read_bytes) if self.disk_read_count: MNodeLogGen.MNodeLogAddDiskReadCount(builder, self.disk_read_count) if self.disk_read_merged_count: MNodeLogGen.MNodeLogAddDiskReadMergedCount(builder, self.disk_read_merged_count) if self.disk_read_time: MNodeLogGen.MNodeLogAddDiskReadTime(builder, self.disk_read_time) if self.disk_write_bytes: MNodeLogGen.MNodeLogAddDiskWriteBytes(builder, self.disk_write_bytes) if self.disk_write_count: MNodeLogGen.MNodeLogAddDiskWriteCount(builder, self.disk_write_count) if self.disk_write_merged_count: MNodeLogGen.MNodeLogAddDiskWriteMergedCount(builder, self.disk_write_merged_count) if self.disk_write_time: MNodeLogGen.MNodeLogAddDiskWriteTime(builder, self.disk_write_time) final = MNodeLogGen.MNodeLogEnd(builder) return final @table('256a071f-5aeb-47f3-8786-97cd8281bdb7', build=MNodeLog.build, cast=MNodeLog.cast) class MNodeLogs(MapTimestampUuidFlatBuffers): pass class Schema(object): mnode_logs = None def __init__(self, db): self.db = db @staticmethod def attach(db): schema = Schema(db) schema.mnode_logs = db.attach_table(MNodeLogs) return schema zlmdb-22.6.1/zlmdb/tests/_schema_py2.py000066400000000000000000000217651426100523600177410ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import random import uuid import datetime import zlmdb class User(object): def __init__(self): self.oid = None self.name = None self.authid = None self.uuid = None self.email = None self.birthday = None self.is_friendly = None self.tags = None self.ratings = {} self.friends = [] self.referred_by = None self.realm_oid = None self.icecream = None self.mrealm = None self.mrealm_notnull = None def __eq__(self, other): if not isinstance(other, self.__class__): return False if other.oid != self.oid: return False if other.name != self.name: return False if other.authid != self.authid: return False if other.uuid != self.uuid: return False if other.email != self.email: return False if other.birthday != self.birthday: return False if other.is_friendly != self.is_friendly: return False if (self.tags and not other.tags) or (not self.tags and other.tags): return False if other.realm_oid != self.realm_oid: return False if other.icecream != self.icecream: return False if other.mrealm != self.mrealm: return False if other.mrealm_notnull != self.mrealm_notnull: return False return True def __ne__(self, other): return not self.__eq__(other) def marshal(self): obj = { u'oid': self.oid, u'name': self.name, u'authid': self.authid, u'uuid': self.uuid.hex if self.uuid else None, u'email': self.email, u'birthday': { u'year': self.birthday.year if self.birthday else None, u'month': self.birthday.month if self.birthday else None, u'day': self.birthday.day if self.birthday else None, }, u'is_friendly': self.is_friendly, u'tags': self.tags, u'ratings': self.ratings, u'friends': self.friends, u'referred_by': self.referred_by, u'realm_oid': self.realm_oid, u'icecream': self.icecream, u'mrealm': self.mrealm.hex if self.mrealm else None, u'mrealm_notnull': self.mrealm_notnull.hex if self.mrealm_notnull else None, } return obj @staticmethod def parse(obj): user = User() user.oid = obj.get(u'oid', None) user.name = obj.get(u'name', None) user.authid = obj.get(u'authid', None) if u'uuid' in obj: user.uuid = uuid.UUID(hex=obj[u'uuid']) user.email = obj.get(u'email', None) if u'birthday' in obj: b = obj[u'birthday'] user.birthday = datetime.date(b.year, b.month, b.day) user.is_friendly = obj.get(u'is_friendly', None) user.tags = obj.get(u'tags', None) user.ratings = obj.get(u'ratings', {}) user.friends = obj.get(u'friends', []) user.referred_by = obj.get(u'referred_by', None) user.realm_oid = obj.get(u'realm_oid', None) user.icecream = obj.get(u'icecream', None) if 'mrealm' in obj and obj['mrealm']: user.mrealm = uuid.UUID(hex=obj['mrealm']) if 'mrealm_notnull' in obj and obj['mrealm_notnull']: user.mrealm_notnull = uuid.UUID(hex=obj['mrealm_notnull']) return user @staticmethod def create_test_user(oid=None, realm_oid=None): user = User() if oid is not None: user.oid = oid else: user.oid = random.randint(0, 9007199254740992) user.name = u'Test {}'.format(user.oid) user.authid = u'test-{}'.format(user.oid) user.uuid = uuid.uuid4() user.email = u'{}@example.com'.format(user.authid) user.birthday = datetime.date(1950, 12, 24) user.is_friendly = True user.tags = [u'geek', u'sudoko', u'yellow'] for j in range(10): user.ratings[u'test-rating-{}'.format(j)] = random.random() user.friends = [random.randint(0, 9007199254740992) for _ in range(10)] user.referred_by = random.randint(0, 9007199254740992) if realm_oid is not None: user.realm_oid = realm_oid else: user.realm_oid = random.randint(0, 9007199254740992) user.icecream = random.choice([u'vanilla', u'lemon', u'strawberry']) user.mrealm = uuid.uuid4() user.mrealm_notnull = uuid.uuid4() return user class Schema1(zlmdb.Schema): def __init__(self): super(Schema1, self).__init__() self.tab_uuid_str = zlmdb.MapUuidString(slot=1) self.tab_uuid_oid = zlmdb.MapUuidOid(slot=2) self.tab_uuid_uuid = zlmdb.MapUuidUuid(slot=3) self.tab_str_str = zlmdb.MapStringString(slot=4) self.tab_str_oid = zlmdb.MapStringOid(slot=5) self.tab_str_uuid = zlmdb.MapStringUuid(slot=6) self.tab_oid_str = zlmdb.MapOidString(slot=7) self.tab_oid_oid = zlmdb.MapOidOid(slot=8) self.tab_oid_uuid = zlmdb.MapOidUuid(slot=9) self.tab_uuid_json = zlmdb.MapUuidJson(slot=10, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_uuid_cbor = zlmdb.MapUuidCbor(slot=11, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_uuid_pickle = zlmdb.MapUuidPickle(slot=12) self.tab_str_json = zlmdb.MapStringJson(slot=20, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_str_cbor = zlmdb.MapStringCbor(slot=21, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_str_pickle = zlmdb.MapStringPickle(slot=22) self.tab_oid_json = zlmdb.MapOidJson(slot=30, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_oid_cbor = zlmdb.MapOidCbor(slot=31, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_oid_pickle = zlmdb.MapOidPickle(slot=32) class Schema2(zlmdb.Schema): def __init__(self): super(Schema2, self).__init__() self.users = zlmdb.MapOidPickle(1) class Schema3(zlmdb.Schema): def __init__(self): super(Schema3, self).__init__() self.users = zlmdb.MapStringPickle(1) class Schema4(zlmdb.Schema): def __init__(self): super(Schema4, self).__init__() self.users = zlmdb.MapOidPickle(1) self.idx_users_by_authid = zlmdb.MapStringOid(2) self.users.attach_index('idx1', self.idx_users_by_authid, lambda user: user.authid, nullable=False) self.idx_users_by_email = zlmdb.MapStringOid(3) self.users.attach_index('idx2', self.idx_users_by_email, lambda user: user.email, nullable=True) self.idx_users_by_realm = zlmdb.MapOidOidOid(4) self.users.attach_index('idx3', self.idx_users_by_realm, lambda user: (user.realm_oid, user.oid)) self.idx_users_by_icecream = zlmdb.MapStringOidOid(5) self.users.attach_index('idx4', self.idx_users_by_icecream, lambda user: (user.icecream, user.oid), nullable=False) self.idx_users_by_mrealm_authid = zlmdb.MapUuidStringOid(6) self.users.attach_index('idx5', self.idx_users_by_mrealm_authid, lambda user: (user.mrealm, user.authid), nullable=True) self.idx_users_by_mrealm_notnull_authid = zlmdb.MapUuidStringOid(7) self.users.attach_index('idx6', self.idx_users_by_mrealm_notnull_authid, lambda user: (user.mrealm_notnull, user.authid), nullable=False) zlmdb-22.6.1/zlmdb/tests/_schema_py3.py000066400000000000000000000246251426100523600177400ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import random import uuid import datetime from typing import Optional, List, Dict import zlmdb class User(object): oid: int name: str authid: str uuid: uuid.UUID email: str birthday: datetime.date is_friendly: bool tags: Optional[List[str]] ratings: Dict[str, float] friends: List[int] referred_by: int realm_oid: int icecream: str mrealm: uuid.UUID # type:ignore mrealm_notnull: uuid.UUID # type:ignore def __init__(self): self.oid = None self.name = None self.authid = None self.uuid = None self.email = None self.birthday = None self.is_friendly = None self.tags = None self.ratings = {} self.friends = [] self.referred_by = None self.realm_oid = None self.icecream = None self.mrealm = None self.mrealm_notnull = None def __eq__(self, other): if not isinstance(other, self.__class__): return False if other.oid != self.oid: return False if other.name != self.name: return False if other.authid != self.authid: return False if other.uuid != self.uuid: return False if other.email != self.email: return False if other.birthday != self.birthday: return False if other.is_friendly != self.is_friendly: return False if (self.tags and not other.tags) or (not self.tags and other.tags): return False if other.realm_oid != self.realm_oid: return False if other.icecream != self.icecream: return False if other.mrealm != self.mrealm: return False if other.mrealm_notnull != self.mrealm_notnull: return False return True def __ne__(self, other): return not self.__eq__(other) def marshal(self): obj = { 'oid': self.oid, 'name': self.name, 'authid': self.authid, 'uuid': self.uuid.hex if self.uuid else None, 'email': self.email, 'birthday': { 'year': self.birthday.year if self.birthday else None, 'month': self.birthday.month if self.birthday else None, 'day': self.birthday.day if self.birthday else None, }, 'is_friendly': self.is_friendly, 'tags': self.tags, 'ratings': self.ratings, 'friends': self.friends, 'referred_by': self.referred_by, 'realm_oid': self.realm_oid, 'icecream': self.icecream, 'mrealm': self.mrealm.hex if self.mrealm else None, 'mrealm_notnull': self.mrealm_notnull.hex if self.mrealm_notnull else None, } return obj @staticmethod def parse(obj): user = User() user.oid = obj.get('oid', None) user.name = obj.get('name', None) user.authid = obj.get('authid', None) if 'uuid' in obj: user.uuid = uuid.UUID(hex=obj['uuid']) user.email = obj.get('email', None) if 'birthday' in obj: b = obj['birthday'] user.birthday = datetime.date(b.year, b.month, b.day) user.is_friendly = obj.get('is_friendly', None) user.tags = obj.get('tags', None) user.ratings = obj.get('ratings', {}) user.friends = obj.get('friends', []) user.referred_by = obj.get('referred_by', None) user.realm_oid = obj.get('realm_oid', None) user.icecream = obj.get('icecream', None) if 'mrealm' in obj and obj['mrealm']: user.mrealm = uuid.UUID(hex=obj['mrealm']) if 'mrealm_notnull' in obj and obj['mrealm_notnull']: user.mrealm_notnull = uuid.UUID(hex=obj['mrealm_notnull']) return user @staticmethod def create_test_user(oid=None, realm_oid=None): user = User() if oid is not None: user.oid = oid else: user.oid = random.randint(0, 9007199254740992) user.name = 'Test {}'.format(user.oid) user.authid = 'test-{}'.format(user.oid) user.uuid = uuid.uuid4() user.email = '{}@example.com'.format(user.authid) user.birthday = datetime.date(1950, 12, 24) user.is_friendly = True user.tags = ['geek', 'sudoko', 'yellow'] for j in range(10): user.ratings['test-rating-{}'.format(j)] = 1 / (j + 1) # round(random.random(), 3) user.friends = [random.randint(0, 9007199254740992) for _ in range(10)] user.referred_by = random.randint(0, 9007199254740992) if realm_oid is not None: user.realm_oid = realm_oid else: user.realm_oid = random.randint(0, 9007199254740992) user.icecream = random.choice(['vanilla', 'lemon', 'strawberry']) user.mrealm = uuid.uuid4() user.mrealm_notnull = uuid.uuid4() return user class Schema1(zlmdb.Schema): tab_uuid_str: zlmdb.MapUuidString tab_uuid_oid: zlmdb.MapUuidOid tab_uuid_uuid: zlmdb.MapUuidUuid tab_str_str: zlmdb.MapStringString tab_str_oid: zlmdb.MapStringOid tab_str_uuid: zlmdb.MapStringUuid tab_oid_str: zlmdb.MapOidString tab_oid_oid: zlmdb.MapOidOid tab_oid_uuid: zlmdb.MapOidUuid tab_uuid_json: zlmdb.MapUuidJson tab_uuid_cbor: zlmdb.MapUuidCbor tab_uuid_pickle: zlmdb.MapUuidPickle tab_str_json: zlmdb.MapStringJson tab_str_cbor: zlmdb.MapStringCbor tab_str_pickle: zlmdb.MapStringPickle tab_oid_json: zlmdb.MapOidJson tab_oid_cbor: zlmdb.MapOidCbor tab_oid_pickle: zlmdb.MapOidPickle def __init__(self): self.tab_uuid_str = zlmdb.MapUuidString(slot=1) self.tab_uuid_oid = zlmdb.MapUuidOid(slot=2) self.tab_uuid_uuid = zlmdb.MapUuidUuid(slot=3) self.tab_str_str = zlmdb.MapStringString(slot=4) self.tab_str_oid = zlmdb.MapStringOid(slot=5) self.tab_str_uuid = zlmdb.MapStringUuid(slot=6) self.tab_oid_str = zlmdb.MapOidString(slot=7) self.tab_oid_oid = zlmdb.MapOidOid(slot=8) self.tab_oid_uuid = zlmdb.MapOidUuid(slot=9) self.tab_uuid_json = zlmdb.MapUuidJson(slot=10, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_uuid_cbor = zlmdb.MapUuidCbor(slot=11, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_uuid_pickle = zlmdb.MapUuidPickle(slot=12) self.tab_str_json = zlmdb.MapStringJson(slot=20, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_str_cbor = zlmdb.MapStringCbor(slot=21, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_str_pickle = zlmdb.MapStringPickle(slot=22) self.tab_oid_json = zlmdb.MapOidJson(slot=30, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_oid_cbor = zlmdb.MapOidCbor(slot=31, marshal=(lambda o: o.marshal()), unmarshal=User.parse) self.tab_oid_pickle = zlmdb.MapOidPickle(slot=32) class Schema2(zlmdb.Schema): users: zlmdb.MapOidPickle def __init__(self): self.users = zlmdb.MapOidPickle(1) class Schema3(zlmdb.Schema): users: zlmdb.MapStringPickle def __init__(self): self.users = zlmdb.MapStringPickle(1) class Schema4(zlmdb.Schema): users: zlmdb.MapOidPickle idx_users_by_authid: zlmdb.MapStringOid idx_users_by_email: zlmdb.MapStringOid idx_users_by_realm: zlmdb.MapOidOidOid idx_users_by_icecream: zlmdb.MapStringOidOid idx_users_by_mrealm_authid: zlmdb.MapUuidStringOid idx_users_by_mrealm_authid_notnull: zlmdb.MapUuidStringOid def __init__(self): super(Schema4, self).__init__() self.users = zlmdb.MapOidPickle(1) self.idx_users_by_authid = zlmdb.MapStringOid(2) self.users.attach_index('idx1', self.idx_users_by_authid, lambda user: user.authid, nullable=False) self.idx_users_by_email = zlmdb.MapStringOid(3) self.users.attach_index('idx2', self.idx_users_by_email, lambda user: user.email, nullable=True) self.idx_users_by_realm = zlmdb.MapOidOidOid(4) self.users.attach_index('idx3', self.idx_users_by_realm, lambda user: (user.realm_oid, user.oid), nullable=False) self.idx_users_by_icecream = zlmdb.MapStringOidOid(5) self.users.attach_index('idx4', self.idx_users_by_icecream, lambda user: (user.icecream, user.oid), nullable=False) self.idx_users_by_mrealm_authid = zlmdb.MapUuidStringOid(6) self.users.attach_index('idx5', self.idx_users_by_mrealm_authid, lambda user: (user.mrealm, user.authid), nullable=True) self.idx_users_by_mrealm_notnull_authid = zlmdb.MapUuidStringOid(7) self.users.attach_index('idx6', self.idx_users_by_mrealm_notnull_authid, lambda user: (user.mrealm_notnull, user.authid), nullable=False) zlmdb-22.6.1/zlmdb/tests/_test_flatbuffers.py000066400000000000000000000113421426100523600212370ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import sys import random import txaio txaio.use_twisted() try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore import zlmdb # noqa sys.path.append(os.path.dirname(os.path.abspath(__file__))) from _schema_fbs import User # noqa class UsersSchema(zlmdb.Schema): def __init__(self): self.tab_oid_fbs = zlmdb.MapOidFlatBuffers(1, build=User.build, cast=User.cast) def test_pmap_flatbuffers_values(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = UsersSchema() with zlmdb.Database(dbpath) as db: N = 100 stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for i in range(N): user = User.create_test_user() schema.tab_oid_fbs[txn, user.oid] = user assert stats.puts == N assert stats.dels == 0 stats.reset() with db.begin() as txn: cnt = schema.tab_oid_fbs.count(txn) assert cnt == N def test_pmap_flatbuffers_count(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = UsersSchema() # max DB size is 100 MB with zlmdb.Database(dbpath, maxsize=100 * (2**20)) as db: oids = set() oid_to_referred_by = {} stats = zlmdb.TransactionStats() # number of transactions M = 5 # number of insert rows per transaction N = 10000 for j in range(M): with db.begin(write=True, stats=stats) as txn: for i in range(N): user = User.create_test_user() schema.tab_oid_fbs[txn, user.oid] = user oids.add(user.oid) oid_to_referred_by[user.oid] = user.referred_by assert stats.puts == N assert stats.dels == 0 duration_ns = stats.duration duration_ms = int(duration_ns / 1000000.) rows_per_sec = int(round(float(stats.puts + stats.dels) * 1000. / float(duration_ms))) print('Transaction ended: puts={} / dels={} rows in {} ms, {} rows/sec'.format( stats.puts, stats.dels, duration_ms, rows_per_sec)) stats.reset() # count all rows with db.begin() as txn: cnt = schema.tab_oid_fbs.count(txn) assert cnt == N * M # retrieve with db.begin() as txn: for j in range(5): started = zlmdb.walltime() M = 100 for i in range(M): for oid in random.sample(oids, N): user = schema.tab_oid_fbs[txn, oid] assert user assert user.referred_by == oid_to_referred_by.get(oid, None) duration_ns = zlmdb.walltime() - started duration_ms = int(duration_ns / 1000000.) rows_per_sec = int(round(float(M * N) * 1000. / float(duration_ms))) print('Transaction ended: {} rows read in {} ms, {} rows/sec'.format( M * N, duration_ms, rows_per_sec)) zlmdb-22.6.1/zlmdb/tests/_test_serialization.py000066400000000000000000000106271426100523600216160ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import sys import timeit import uuid import platform import humanize import txaio txaio.use_twisted() from zlmdb import _types # noqa from _schema_fbs import User as UserFbs # noqa sys.path.append(os.path.dirname(os.path.abspath(__file__))) if sys.version_info >= (3, 6): from _schema_py3 import User else: from _schema_py2 import User _TEST = {'oid': 0, 'uuid': None, 'bytes': 0, 'serializer': None} def _serializer_run_fbs(): serialize = _TEST['serializer']._serialize_value user = UserFbs.create_test_user() data = serialize(user) _TEST['bytes'] += len(data) def _serializer_run(): serialize = _TEST['serializer']._serialize_value user = User.create_test_user() data = serialize(user) _TEST['bytes'] += len(data) def _serialization_speed(serializer, testfun): N = 10 M = 10000 samples = [] print('running on:') print(sys.version) print(platform.uname()) _TEST['oid'] = 0 _TEST['uuid'] = uuid.uuid4() _TEST['bytes'] = 0 _TEST['bytes'] = 0 _TEST['serializer'] = serializer for i in range(N): secs = timeit.timeit(testfun, number=M) ops = round(float(M) / secs, 1) samples.append(ops) print('{} objects/sec {}'.format(ops, humanize.naturalsize(_TEST['bytes']))) ops_max = max(samples) bytes_per_obj = float(_TEST['bytes']) / float(N * M) print('{} objects/sec max, {} bytes total, {} bytes/obj'.format(ops_max, humanize.naturalsize(_TEST['bytes']), humanize.naturalsize(bytes_per_obj))) return ops_max, _TEST['bytes'] def test_json_serialization_speed(): ser = _types._JsonValuesMixin(marshal=User.marshal, unmarshal=User.parse) ops_max, total = _serialization_speed(ser, _serializer_run) # cpy36: 19564.6 objects/sec max, 135456153 bytes total assert ops_max > 1000 assert total > 1000000 def test_cbor_serialization_speed(): ser = _types._CborValuesMixin(marshal=User.marshal, unmarshal=User.parse) ops_max, total = _serialization_speed(ser, _serializer_run) # cpy36: 7787.4 objects/sec max, 97815364 bytes total assert ops_max > 1000 assert total > 1000000 def test_pickle_serialization_speed(): ser = _types._PickleValuesMixin() ops_max, total = _serialization_speed(ser, _serializer_run) # cpy36: 33586.0 objects/sec max, 137738869 bytes total assert ops_max > 1000 assert total > 1000000 def test_flatbuffer_serialization_speed(): ser = _types._FlatBuffersValuesMixin(build=UserFbs.build, cast=UserFbs.cast) ops_max, total = _serialization_speed(ser, _serializer_run_fbs) assert ops_max > 1000 assert total > 1000000 if __name__ == '__main__': from typing import List sers: List[object] = [] sers.append(_types._JsonValuesMixin(marshal=User.marshal, unmarshal=User.parse)) sers.append(_types._CborValuesMixin(marshal=User.marshal, unmarshal=User.parse)) sers.append(_types._PickleValuesMixin()) sers.append(_types._FlatBuffersValuesMixin(build=UserFbs.build, cast=UserFbs.cast)) for ser in sers: print(_serialization_speed(ser, _serializer_run)) zlmdb-22.6.1/zlmdb/tests/test_basic.py000066400000000000000000000153661426100523600176700ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import sys import os import pytest import txaio txaio.use_twisted() import zlmdb # noqa try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore sys.path.append(os.path.dirname(os.path.abspath(__file__))) if sys.version_info >= (3, 6): from _schema_py3 import User, Schema2 else: from _schema_py2 import User, Schema2 @pytest.fixture(scope='module') def testset1(): users = [] for i in range(1000): user = User.create_test_user(i) users.append(user) return users def test_open1(): with TemporaryDirectory() as dbpath: with zlmdb.Database(dbpath) as db: assert db.maxsize == 10485760 assert db.is_sync assert db.is_open assert not db.is_readonly assert not db.is_writemap def test_open2(): with TemporaryDirectory() as dbpath: with zlmdb.Database(dbpath) as db: assert db.is_open with zlmdb.Database(dbpath) as db: assert db.is_open def test_open3(): with TemporaryDirectory() as dbpath: db = zlmdb.Database(dbpath) assert db.is_open try: zlmdb.Database(dbpath) assert False, 'opening same dbpath twice in same process did not throw an exception' except RuntimeError as e: # RuntimeError: tried to open same dbpath "/tmp/tmpwc1dw5c8" twice within same process assert 'twice within same process' in str(e), 'exception did not contain text we excepted: {}'.format(e) except Exception as e: assert False, 'unexpected exception {} raised'.format(e) def test_open4(): with TemporaryDirectory() as dbpath: db1 = zlmdb.Database.open(dbpath) assert db1.is_open db2 = zlmdb.Database.open(dbpath) assert db2.is_open assert db1 == db2 def test_transaction(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) with zlmdb.Database(dbpath) as db: with db.begin() as txn: print('transaction open', txn.id()) print('transaction committed') print('database closed') def test_save_load(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema2() user = User.create_test_user() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: schema.users[txn, user.oid] = user print('user saved') _user = schema.users[txn, user.oid] assert _user assert user == _user print('user loaded') print('transaction committed') print('database closed') def test_save_load_many_1(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema2() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user cnt = schema.users.count(txn) print('user saved:', cnt) assert cnt == len(testset1) with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == len(testset1) with zlmdb.Database(dbpath) as db: with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == len(testset1) def test_save_load_many_2(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema2() oids = [] with zlmdb.Database(dbpath) as db: # write records in a 1st transaction with db.begin(write=True) as txn: c = 0 for user in testset1: schema.users[txn, user.oid] = user oids.append(user.oid) c += 1 assert c == len(testset1) print('[1] successfully stored {} records'.format(c)) # in the same transaction, read back records c = 0 for oid in oids: user = schema.users[txn, oid] if user: c += 1 assert c == len(testset1) print('[1] successfully loaded {} records'.format(c)) # in a new transaction, read back records c = 0 with db.begin() as txn: for oid in oids: user = schema.users[txn, oid] if user: c += 1 assert c == len(testset1) print('[2] successfully loaded {} records'.format(c)) # in a new database environment (and hence new transaction), read back records with zlmdb.Database(dbpath) as db: with db.begin() as txn: count = schema.users.count(txn) assert count == len(testset1) print('total records:', count) c = 0 for oid in oids: user = schema.users[txn, oid] if user: c += 1 assert c == len(testset1) print('[3] successfully loaded {} records'.format(c)) zlmdb-22.6.1/zlmdb/tests/test_etcd.py000066400000000000000000000207301426100523600175150ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import sys import pytest import txaio txaio.use_twisted() import zlmdb # noqa try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore sys.path.append(os.path.dirname(os.path.abspath(__file__))) if sys.version_info >= (3, 6): from _schema_py3 import User, Schema1, Schema3, Schema4 else: from _schema_py2 import User, Schema1, Schema3, Schema4 @pytest.fixture(scope='module') def testset1(): users = [] for i in range(1000): user = User.create_test_user(i) users.append(user) return users def test_truncate_table(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema1() stats = zlmdb.TransactionStats() tabs = [ schema.tab_oid_json, schema.tab_str_json, schema.tab_uuid_json, schema.tab_oid_cbor, schema.tab_str_cbor, schema.tab_uuid_cbor, schema.tab_oid_pickle, schema.tab_str_pickle, schema.tab_uuid_pickle, ] with zlmdb.Database(dbpath) as db: with db.begin(write=True, stats=stats) as txn: for tab in tabs: tab.truncate(txn) print(stats.puts) print(stats.dels) def test_fill_check(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.authid] = user with zlmdb.Database(dbpath) as db: with db.begin() as txn: for user in testset1: _user = schema.users[txn, user.authid] assert _user assert _user == user def test_select(testset1): testset1_keys = set([user.authid for user in testset1]) with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.authid] = user with zlmdb.Database(dbpath) as db: with db.begin() as txn: i = 0 for authid, user in schema.users.select(txn): i += 1 assert user assert authid == user.authid assert authid in testset1_keys def test_count_all(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: # count on empty table with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == 0 # fill (and count on each insert) with db.begin(write=True) as txn: i = 0 for user in testset1: schema.users[txn, user.authid] = user i += 1 # table count within filling transaction cnt = schema.users.count(txn) assert cnt == i # table count within transaction cnt = schema.users.count(txn) assert cnt == len(testset1) # table count in new transaction with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == len(testset1) # table count in new connection with zlmdb.Database(dbpath) as db: with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == len(testset1) def test_count_prefix(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.authid] = user n = len(testset1) tests = [ (None, n), (u'', n), (u'test-', n), (u'test-1', 111), (u'test-11', 11), (u'test-111', 1), ] with zlmdb.Database(dbpath) as db: with db.begin() as txn: for prefix, num in tests: cnt = schema.users.count(txn, prefix) assert cnt == num def test_fill_with_indexes(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user # check indexes has been written to (in addition to the table itself) num_indexes = len(schema.users.indexes()) assert stats.puts == len(testset1) * (1 + num_indexes) def test_truncate_table_with_index(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user stats = zlmdb.TransactionStats() with zlmdb.Database(dbpath) as db: with db.begin(write=True, stats=stats) as txn: records = schema.users.truncate(txn) print('table truncated:', records) print(stats.puts) print(stats.dels) def test_rebuild_index(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: records = schema.users.rebuild_index(txn, 'idx1') print('\nrebuilt specific index "idx1" on "users": {} records affected'.format(records)) def test_rebuild_all_indexes(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: records = schema.users.rebuild_indexes(txn) print('\nrebuilt all indexes on "users": {} records affected'.format(records)) zlmdb-22.6.1/zlmdb/tests/test_lmdb.py000066400000000000000000000131271426100523600175160ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import random import os import lmdb import struct import platform import pytest try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore def test_lmdb_create(): """ Test creation of LMDB database. """ with TemporaryDirectory() as dbpath: env = lmdb.open(dbpath) with env.begin() as txn: assert txn.id() == 0 def test_lmdb_insert_empty_key_raises(): """ Test that LMDB raises on inserting record with empty (bytes) key. """ with TemporaryDirectory() as dbpath: env = lmdb.open(dbpath) with env.begin(write=True) as txn: key = b'' value = random.randint(0, 2**32 - 1) data = struct.pack('= (3, 6): print('Using _schema_py3 !') from _schema_py3 import User, Schema4 else: print('Using _schema_py2 !') from _schema_py2 import User, Schema4 @pytest.fixture(scope='function') def testset1(N=10, M=100): users = [] for j in range(N): for i in range(M): user = User.create_test_user(oid=j * M + i, realm_oid=j) users.append(user) return users def test_fill_indexes(testset1): """ Fill a table with multiple indexes with data records that have all columns filled with NON-NULL values. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() # fill table, which also triggers inserts into the index pmaps with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user # check indexes has been written to (in addition to the table itself) num_indexes = len(schema.users.indexes()) assert stats.puts == len(testset1) * (1 + num_indexes) # check saved objects with db.begin() as txn: for user in testset1: obj = schema.users[txn, user.oid] assert user == obj # check unique indexes with db.begin() as txn: for user in testset1: user_oid = schema.idx_users_by_authid[txn, user.authid] assert user.oid == user_oid user_oid = schema.idx_users_by_email[txn, user.email] assert user.oid == user_oid user_oid = schema.idx_users_by_icecream[txn, (user.icecream, user.oid)] assert user.oid == user_oid user_oid = schema.idx_users_by_mrealm_authid[txn, (user.mrealm, user.authid)] assert user.oid == user_oid user_oid = schema.idx_users_by_mrealm_notnull_authid[txn, (user.mrealm_notnull, user.authid)] assert user.oid == user_oid # check non-unique index users_by_icecream = {} for user in testset1: if user.icecream not in users_by_icecream: users_by_icecream[user.icecream] = set() users_by_icecream[user.icecream].add(user.oid) MAX_OID = 9007199254740992 with db.begin() as txn: for icecream in users_by_icecream: for _, user_oid in schema.idx_users_by_icecream.select(txn, from_key=(icecream, 0), to_key=(icecream, MAX_OID + 1), return_values=False): assert user_oid in users_by_icecream[icecream] def test_fill_indexes_nullable(testset1): """ Test filling a table with multiple indexes, some of which are on NULLable columns, and fill with records that have those column values actually NULL. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for user in testset1: _user = deepcopy(user) # "user.email" is an indexed column that is nullable _user.email = None # "user.mrealm" is an indexed (composite) column that is nullable _user.mrealm = None schema.users[txn, _user.oid] = _user # check indexes has been written to (in addition to the table itself) num_indexes = len(schema.users.indexes()) # because we have set 2 indexed columns to NULL, we need to subtract those 2 # from the total number of indexes assert stats.puts == len(testset1) * (1 + num_indexes - 2) # check saved objects with db.begin() as txn: for user in testset1: _user = deepcopy(user) _user.email = None _user.mrealm = None obj = schema.users[txn, _user.oid] assert _user == obj # check unique indexes with db.begin() as txn: for user in testset1: # check one of the indexes that was indeed filled user_oid = schema.idx_users_by_authid[txn, user.authid] assert user.oid == user_oid # check indexes that have NOT been filled user_oid = schema.idx_users_by_email[txn, user.email] assert user_oid is None user_oid = schema.idx_users_by_mrealm_authid[txn, (user.mrealm, user.authid)] assert user_oid is None def test_fill_index_non_nullable_raises(testset1): """ Insert records into a table with a unique-non-nullable index with the record having a NULL value in the indexed column raises an exception. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: # "user.authid" is an indexed column that is non-nullable _user = deepcopy(user) _user.authid = None with pytest.raises(zlmdb.NullValueConstraint): schema.users[txn, _user.oid] = _user # "user.mrealm_notnull" is an indexed (composite) column that is non-nullable _user = deepcopy(user) _user.mrealm_notnull = None with pytest.raises(zlmdb.NullValueConstraint): schema.users[txn, _user.oid] = _user def test_fill_non_unique_indexes(testset1): """ Insert records into a table with a non-unique, non-nullable indexed column. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user # check non-unique indexes with db.begin() as txn: for j in range(10): user_oids = list( schema.idx_users_by_realm.select(txn, return_keys=False, from_key=(j, 0), to_key=(j + 1, 0))) assert list(range(j * 100, (j + 1) * 100)) == user_oids def test_delete_indexes(testset1): """ Insert records into a table with indexes, delete data records and check that index records have been deleted as a consequence too. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() # insert data records with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user # check that all index records have been deleted as well with db.begin(write=True) as txn: for user in testset1: del schema.users[txn, user.oid] user_oid = schema.idx_users_by_authid[txn, user.authid] assert user_oid is None user_oid = schema.idx_users_by_email[txn, user.email] assert user_oid is None user_oid = schema.idx_users_by_realm[txn, (user.realm_oid, user.oid)] assert user_oid is None user_oid = schema.idx_users_by_icecream[txn, (user.icecream, user.oid)] assert user_oid is None user_oid = schema.idx_users_by_mrealm_authid[txn, (user.mrealm, user.authid)] assert user_oid is None user_oid = schema.idx_users_by_mrealm_notnull_authid[txn, (user.mrealm_notnull, user.authid)] assert user_oid is None def test_delete_nonunique_indexes(testset1): """ Insert records into a table with a non-unique index, delete data records and check that index records have been deleted as a consequence too. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user with db.begin(write=True) as txn: for user in testset1: del schema.users[txn, user.oid] with db.begin() as txn: for j in range(10): user_oids = list( schema.idx_users_by_realm.select(txn, return_keys=False, from_key=(j, 0), to_key=(j + 1, 0))) assert [] == user_oids def test_delete_nonindexes2(testset1): """ WARNING: quadratic run-time (in testset size) """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user with db.begin(write=True) as txn: for j in range(10): fullset = set(range(j * 100, (j + 1) * 100)) for i in range(100): user_oid = j * 100 + i del schema.users[txn, user_oid] fullset.discard(user_oid) user_oids = set( schema.idx_users_by_realm.select(txn, return_keys=False, from_key=(j, 0), to_key=(j + 1, 0))) assert fullset == user_oids def test_set_null_indexes_nullable(testset1): """ Fill table with indexed column (unique-nullable) with indexed column values NULL, then (in a 2nd transaction) set the indexed column to NON-NULL value and check that index records are deleted. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() # fill table with NON-NULLs in indexed column with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user # now update table with NULLs in indexed column with db.begin(write=True, stats=stats) as txn: for user in testset1: _user = schema.users[txn, user.oid] _user.email = None schema.users[txn, _user.oid] = _user # check that the table records have their indexed # column values updated to NULLs with db.begin() as txn: for user in testset1: _user = deepcopy(user) _user.email = None obj = schema.users[txn, user.oid] assert _user == obj # check that the index records that previously existed # have been deleted (as the indexed column values have been # set to NULLs) with db.begin() as txn: for user in testset1: user_oid = schema.idx_users_by_authid[txn, user.authid] assert user.oid == user_oid user_oid = schema.idx_users_by_email[txn, user.email] assert user_oid is None def test_set_notnull_indexes_nullable(testset1): """ Fill table with indexed column (unique-nullable) with indexed column values NON-NULL, then (in a 2nd transaction) set the indexed column to NULL value and check that index records are created. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() # fill table with NULLs in indexed column with db.begin(write=True) as txn: for user in testset1: _user = deepcopy(user) _user.email = None schema.users[txn, _user.oid] = _user # now update table with NON-NULLs in indexed column with db.begin(write=True, stats=stats) as txn: for user in testset1: _user = schema.users[txn, user.oid] _user.email = user.email schema.users[txn, _user.oid] = _user # check that the table records have their indexed # column values updated to NON-NULLs with db.begin() as txn: for user in testset1: obj = schema.users[txn, user.oid] assert user == obj # check that the index records that previously not existed # have been created (as the indexed column values have been # set to NON-NULLs) with db.begin() as txn: for user in testset1: user_oid = schema.idx_users_by_authid[txn, user.authid] assert user.oid == user_oid user_oid = schema.idx_users_by_email[txn, user.email] assert user.oid == user_oid def test_truncate_table_with_index(testset1): """ Fill a table with records that has indexes, truncate the table and check that all index records have been deleted as well. """ with TemporaryDirectory() as dbpath: schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user stats = zlmdb.TransactionStats() with zlmdb.Database(dbpath) as db: with db.begin(write=True, stats=stats) as txn: records = schema.users.truncate(txn) assert records == len(testset1) * (len(schema.users.indexes()) + 1) assert stats.dels == records assert stats.puts == 0 zlmdb-22.6.1/zlmdb/tests/test_pmap_types.py000066400000000000000000000060301426100523600207540ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import sys try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore import txaio txaio.use_twisted() import zlmdb # noqa sys.path.append(os.path.dirname(os.path.abspath(__file__))) if sys.version_info >= (3, 6): from _schema_py3 import User, Schema1 else: from _schema_py2 import User, Schema1 def test_pmap_value_types(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema1() n = 100 stats = zlmdb.TransactionStats() tabs = [ (schema.tab_oid_json, schema.tab_str_json, schema.tab_uuid_json), (schema.tab_oid_cbor, schema.tab_str_cbor, schema.tab_uuid_cbor), (schema.tab_oid_pickle, schema.tab_str_pickle, schema.tab_uuid_pickle), ] with zlmdb.Database(dbpath) as db: for tab_oid, tab_str, tab_uuid in tabs: with db.begin(write=True, stats=stats) as txn: for i in range(n): user = User.create_test_user(i) tab_oid[txn, user.oid] = user tab_str[txn, user.authid] = user tab_uuid[txn, user.uuid] = user print('transaction committed') assert stats.puts == n * 3 assert stats.dels == 0 stats.reset() with db.begin() as txn: cnt = tab_oid.count(txn) assert cnt == n cnt = tab_str.count(txn) assert cnt == n cnt = tab_uuid.count(txn) assert cnt == n print('database closed') zlmdb-22.6.1/zlmdb/tests/test_pmaps.py000066400000000000000000000221661426100523600177230ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import sys import pytest import txaio txaio.use_twisted() import zlmdb # noqa try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore sys.path.append(os.path.dirname(os.path.abspath(__file__))) if sys.version_info >= (3, 6): from _schema_py3 import User, Schema1, Schema3, Schema4 else: from _schema_py2 import User, Schema1, Schema3, Schema4 @pytest.fixture(scope='module') def testset1(): users = [] for j in range(10): for i in range(100): user = User.create_test_user(oid=j * 100 + i, realm_oid=j) users.append(user) return users def test_truncate_table(): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema1() stats = zlmdb.TransactionStats() tabs = [ schema.tab_oid_json, schema.tab_str_json, schema.tab_uuid_json, schema.tab_oid_cbor, schema.tab_str_cbor, schema.tab_uuid_cbor, schema.tab_oid_pickle, schema.tab_str_pickle, schema.tab_uuid_pickle, ] with zlmdb.Database(dbpath) as db: with db.begin(write=True, stats=stats) as txn: for tab in tabs: tab.truncate(txn) print(stats.puts) print(stats.dels) def test_fill_check(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.authid] = user with zlmdb.Database(dbpath) as db: with db.begin() as txn: for user in testset1: _user = schema.users[txn, user.authid] assert _user assert _user == user def test_fill_check2(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user with zlmdb.Database(dbpath) as db: with db.begin() as txn: for user in testset1: _user = schema.users[txn, user.oid] assert _user assert _user == user def test_select(testset1): testset1_keys = set([user.authid for user in testset1]) with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.authid] = user with zlmdb.Database(dbpath) as db: with db.begin() as txn: i = 0 for authid, user in schema.users.select(txn): i += 1 assert user assert authid == user.authid assert authid in testset1_keys def test_count_all(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: # count on empty table with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == 0 # fill (and count on each insert) with db.begin(write=True) as txn: i = 0 for user in testset1: schema.users[txn, user.authid] = user i += 1 # table count within filling transaction cnt = schema.users.count(txn) assert cnt == i # table count within transaction cnt = schema.users.count(txn) assert cnt == len(testset1) # table count in new transaction with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == len(testset1) # table count in new connection with zlmdb.Database(dbpath) as db: with db.begin() as txn: cnt = schema.users.count(txn) assert cnt == len(testset1) def test_count_prefix(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema3() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.authid] = user n = len(testset1) tests = [ (None, n), (u'', n), (u'test-', n), (u'test-1', 111), (u'test-11', 11), (u'test-111', 1), ] with zlmdb.Database(dbpath) as db: with db.begin() as txn: for prefix, num in tests: cnt = schema.users.count(txn, prefix) assert cnt == num def test_fill_with_indexes(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: stats = zlmdb.TransactionStats() with db.begin(write=True, stats=stats) as txn: for user in testset1: schema.users[txn, user.oid] = user # check indexes has been written to (in addition to the table itself) num_indexes = len(schema.users.indexes()) assert stats.puts == len(testset1) * (1 + num_indexes) def test_truncate_table_with_index(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user stats = zlmdb.TransactionStats() with zlmdb.Database(dbpath) as db: with db.begin(write=True, stats=stats) as txn: records = schema.users.truncate(txn) print('table truncated:', records) print(stats.puts) print(stats.dels) def test_rebuild_index(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: records = schema.users.rebuild_index(txn, 'idx1') print('\nrebuilt specific index "idx1" on "users": {} records affected'.format(records)) def test_rebuild_all_indexes(testset1): with TemporaryDirectory() as dbpath: print('Using temporary directory {} for database'.format(dbpath)) schema = Schema4() with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: for user in testset1: schema.users[txn, user.oid] = user with zlmdb.Database(dbpath) as db: with db.begin(write=True) as txn: records = schema.users.rebuild_indexes(txn) print('\nrebuilt all indexes on "users": {} records affected'.format(records)) zlmdb-22.6.1/zlmdb/tests/test_select.py000066400000000000000000000553061426100523600200640ustar00rootroot00000000000000############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import os import uuid import random import struct import flatbuffers import numpy as np import pytest import zlmdb # noqa from _schema_mnode_log import Schema, MNodeLog import txaio txaio.use_twisted() from txaio import time_ns # noqa try: from tempfile import TemporaryDirectory except ImportError: from backports.tempfile import TemporaryDirectory # type:ignore if 'COVERAGE_PROCESS_START' in os.environ: COVERAGE = True else: COVERAGE = False @pytest.fixture(scope='function') def builder(): _builder = flatbuffers.Builder(0) return _builder def rfloat(): return struct.unpack('>f', struct.pack('>f', random.random() * 10**10))[0] def fill_mnodelog(obj): obj.timestamp = np.datetime64(time_ns(), 'ns') + np.timedelta64(random.randint(1, 120), 's') obj.node_id = uuid.uuid4() obj.run_id = uuid.uuid4() obj.state = random.randint(1, 2) obj.ended = obj.timestamp + np.timedelta64(random.randint(1, 120), 's') obj.session = random.randint(1, 9007199254740992) obj.sent = obj.timestamp obj.seq = random.randint(1, 10000) obj.routers = random.randint(1, 32) obj.containers = random.randint(1, 32) obj.guests = random.randint(1, 32) obj.proxies = random.randint(1, 32) obj.marketmakers = random.randint(1, 32) obj.cpu_ctx_switches = random.randint(1, 1000000) # we can't just use random() here, since it won't work for roundtrip # data checking (eg 33.42830630594208 != 33.428306579589844) # obj.cpu_freq = random.random() * 100. obj.cpu_freq = rfloat() obj.cpu_guest = rfloat() obj.cpu_guest_nice = rfloat() obj.cpu_idle = rfloat() obj.cpu_interrupts = random.randint(1, 100000) obj.cpu_iotwait = rfloat() obj.cpu_irq = rfloat() obj.cpu_nice = rfloat() obj.cpu_soft_interrupts = random.randint(1, 100000) obj.cpu_softirq = rfloat() obj.cpu_steal = rfloat() obj.cpu_system = rfloat() obj.cpu_user = rfloat() obj.network_bytes_recv = random.randint(1, 2**32) obj.network_bytes_sent = random.randint(1, 2**32) obj.network_connection_af_inet = random.randint(1, 1000) obj.network_connection_af_inet6 = random.randint(1, 1000) obj.network_connection_af_unix = random.randint(1, 1000) obj.network_dropin = random.randint(1, 10000) obj.network_dropout = random.randint(1, 10000) obj.network_errin = random.randint(1, 10000) obj.network_errout = random.randint(1, 10000) obj.network_packets_recv = random.randint(1, 2**32) obj.network_packets_sent = random.randint(1, 2**32) M = 32 * 2**30 obj.memory_active = random.randint(1, M) obj.memory_available = random.randint(1, M) obj.memory_buffers = random.randint(1, M) obj.memory_cached = random.randint(1, M) obj.memory_free = random.randint(1, M) obj.memory_inactive = random.randint(1, M) obj.memory_percent = rfloat() obj.memory_shared = random.randint(1, M) obj.memory_slab = random.randint(1, M) obj.memory_total = random.randint(1, M) obj.memory_used = random.randint(1, M) M = 10 * 10 obj.disk_busy_time = random.randint(1, M) obj.disk_read_bytes = random.randint(1, M) obj.disk_read_count = random.randint(1, M) obj.disk_read_merged_count = random.randint(1, M) obj.disk_read_time = random.randint(1, M) obj.disk_write_bytes = random.randint(1, M) obj.disk_write_count = random.randint(1, M) obj.disk_write_merged_count = random.randint(1, M) obj.disk_write_time = random.randint(1, M) @pytest.fixture(scope='function') def mnodelog(): _mnodelog = MNodeLog() fill_mnodelog(_mnodelog) return _mnodelog def test_mnodelog_roundtrip(mnodelog, builder): # serialize to bytes (flatbuffers) from python object obj = mnodelog.build(builder) builder.Finish(obj) data = builder.Output() assert len(data) == 544 # create python object from bytes (flatbuffes) _mnodelog = MNodeLog.cast(data) assert mnodelog.timestamp == _mnodelog.timestamp assert mnodelog.node_id == _mnodelog.node_id assert mnodelog.run_id == _mnodelog.run_id assert mnodelog.state == _mnodelog.state assert mnodelog.ended == _mnodelog.ended assert mnodelog.session == _mnodelog.session assert mnodelog.sent == _mnodelog.sent assert mnodelog.seq == _mnodelog.seq assert mnodelog.routers == _mnodelog.routers assert mnodelog.containers == _mnodelog.containers assert mnodelog.guests == _mnodelog.guests assert mnodelog.proxies == _mnodelog.proxies assert mnodelog.marketmakers == _mnodelog.marketmakers assert mnodelog.cpu_ctx_switches == _mnodelog.cpu_ctx_switches assert mnodelog.cpu_freq == _mnodelog.cpu_freq assert mnodelog.cpu_guest == _mnodelog.cpu_guest assert mnodelog.cpu_guest_nice == _mnodelog.cpu_guest_nice assert mnodelog.cpu_idle == _mnodelog.cpu_idle assert mnodelog.cpu_interrupts == _mnodelog.cpu_interrupts assert mnodelog.cpu_iotwait == _mnodelog.cpu_iotwait assert mnodelog.cpu_irq == _mnodelog.cpu_irq assert mnodelog.cpu_nice == _mnodelog.cpu_nice assert mnodelog.cpu_soft_interrupts == _mnodelog.cpu_soft_interrupts assert mnodelog.cpu_softirq == _mnodelog.cpu_softirq assert mnodelog.cpu_steal == _mnodelog.cpu_steal assert mnodelog.cpu_system == _mnodelog.cpu_system assert mnodelog.cpu_user == _mnodelog.cpu_user assert mnodelog.network_bytes_recv == _mnodelog.network_bytes_recv assert mnodelog.network_bytes_sent == _mnodelog.network_bytes_sent assert mnodelog.network_connection_af_inet == _mnodelog.network_connection_af_inet assert mnodelog.network_connection_af_inet6 == _mnodelog.network_connection_af_inet6 assert mnodelog.network_connection_af_unix == _mnodelog.network_connection_af_unix assert mnodelog.network_dropin == _mnodelog.network_dropin assert mnodelog.network_dropout == _mnodelog.network_dropout assert mnodelog.network_errin == _mnodelog.network_errin assert mnodelog.network_errout == _mnodelog.network_errout assert mnodelog.network_packets_recv == _mnodelog.network_packets_recv assert mnodelog.network_packets_sent == _mnodelog.network_packets_sent assert mnodelog.memory_active == _mnodelog.memory_active assert mnodelog.memory_available == _mnodelog.memory_available assert mnodelog.memory_buffers == _mnodelog.memory_buffers assert mnodelog.memory_cached == _mnodelog.memory_cached assert mnodelog.memory_free == _mnodelog.memory_free assert mnodelog.memory_inactive == _mnodelog.memory_inactive assert mnodelog.memory_percent == _mnodelog.memory_percent assert mnodelog.memory_shared == _mnodelog.memory_shared assert mnodelog.memory_slab == _mnodelog.memory_slab assert mnodelog.memory_total == _mnodelog.memory_total assert mnodelog.memory_used == _mnodelog.memory_used assert mnodelog.disk_busy_time == _mnodelog.disk_busy_time assert mnodelog.disk_read_bytes == _mnodelog.disk_read_bytes assert mnodelog.disk_read_count == _mnodelog.disk_read_count assert mnodelog.disk_read_merged_count == _mnodelog.disk_read_merged_count assert mnodelog.disk_read_time == _mnodelog.disk_read_time assert mnodelog.disk_write_bytes == _mnodelog.disk_write_bytes assert mnodelog.disk_write_count == _mnodelog.disk_write_count assert mnodelog.disk_write_merged_count == _mnodelog.disk_write_merged_count assert mnodelog.disk_write_time == _mnodelog.disk_write_time def test_mnodelog_insert(N=1000): with TemporaryDirectory() as dbpath: with zlmdb.Database(dbpath) as db: schema = Schema.attach(db) data = {} # insert test data # with db.begin(write=True) as txn: for i in range(N): rec = MNodeLog() fill_mnodelog(rec) key = (rec.timestamp, rec.node_id) schema.mnode_logs[txn, key] = rec data[key] = rec # do test scans over inserted data # with db.begin() as txn: cnt = schema.mnode_logs.count(txn) assert cnt == N # do a simple full scan and compare to original data # for mnodelog in schema.mnode_logs.select(txn, return_keys=False): key = (mnodelog.timestamp, mnodelog.node_id) _mnodelog = data.get(key, None) # check that we have the record in the original data assert _mnodelog # check that the record data is equal to the original data assert mnodelog.timestamp == _mnodelog.timestamp assert mnodelog.node_id == _mnodelog.node_id assert mnodelog.run_id == _mnodelog.run_id assert mnodelog.state == _mnodelog.state assert mnodelog.ended == _mnodelog.ended assert mnodelog.session == _mnodelog.session assert mnodelog.sent == _mnodelog.sent assert mnodelog.seq == _mnodelog.seq assert mnodelog.routers == _mnodelog.routers assert mnodelog.containers == _mnodelog.containers assert mnodelog.guests == _mnodelog.guests assert mnodelog.proxies == _mnodelog.proxies assert mnodelog.marketmakers == _mnodelog.marketmakers assert mnodelog.cpu_ctx_switches == _mnodelog.cpu_ctx_switches assert mnodelog.cpu_freq == _mnodelog.cpu_freq assert mnodelog.cpu_guest == _mnodelog.cpu_guest assert mnodelog.cpu_guest_nice == _mnodelog.cpu_guest_nice assert mnodelog.cpu_idle == _mnodelog.cpu_idle assert mnodelog.cpu_interrupts == _mnodelog.cpu_interrupts assert mnodelog.cpu_iotwait == _mnodelog.cpu_iotwait assert mnodelog.cpu_irq == _mnodelog.cpu_irq assert mnodelog.cpu_nice == _mnodelog.cpu_nice assert mnodelog.cpu_soft_interrupts == _mnodelog.cpu_soft_interrupts assert mnodelog.cpu_softirq == _mnodelog.cpu_softirq assert mnodelog.cpu_steal == _mnodelog.cpu_steal assert mnodelog.cpu_system == _mnodelog.cpu_system assert mnodelog.cpu_user == _mnodelog.cpu_user assert mnodelog.network_bytes_recv == _mnodelog.network_bytes_recv assert mnodelog.network_bytes_sent == _mnodelog.network_bytes_sent assert mnodelog.network_connection_af_inet == _mnodelog.network_connection_af_inet assert mnodelog.network_connection_af_inet6 == _mnodelog.network_connection_af_inet6 assert mnodelog.network_connection_af_unix == _mnodelog.network_connection_af_unix assert mnodelog.network_dropin == _mnodelog.network_dropin assert mnodelog.network_dropout == _mnodelog.network_dropout assert mnodelog.network_errin == _mnodelog.network_errin assert mnodelog.network_errout == _mnodelog.network_errout assert mnodelog.network_packets_recv == _mnodelog.network_packets_recv assert mnodelog.network_packets_sent == _mnodelog.network_packets_sent assert mnodelog.memory_active == _mnodelog.memory_active assert mnodelog.memory_available == _mnodelog.memory_available assert mnodelog.memory_buffers == _mnodelog.memory_buffers assert mnodelog.memory_cached == _mnodelog.memory_cached assert mnodelog.memory_free == _mnodelog.memory_free assert mnodelog.memory_inactive == _mnodelog.memory_inactive assert mnodelog.memory_percent == _mnodelog.memory_percent assert mnodelog.memory_shared == _mnodelog.memory_shared assert mnodelog.memory_slab == _mnodelog.memory_slab assert mnodelog.memory_total == _mnodelog.memory_total assert mnodelog.memory_used == _mnodelog.memory_used assert mnodelog.disk_busy_time == _mnodelog.disk_busy_time assert mnodelog.disk_read_bytes == _mnodelog.disk_read_bytes assert mnodelog.disk_read_count == _mnodelog.disk_read_count assert mnodelog.disk_read_merged_count == _mnodelog.disk_read_merged_count assert mnodelog.disk_read_time == _mnodelog.disk_read_time assert mnodelog.disk_write_bytes == _mnodelog.disk_write_bytes assert mnodelog.disk_write_count == _mnodelog.disk_write_count assert mnodelog.disk_write_merged_count == _mnodelog.disk_write_merged_count assert mnodelog.disk_write_time == _mnodelog.disk_write_time def test_mnodelog_queries(N=1000): with TemporaryDirectory() as dbpath: with zlmdb.Database(dbpath) as db: schema = Schema.attach(db) data = {} # insert test data # with db.begin(write=True) as txn: for i in range(N): rec = MNodeLog() fill_mnodelog(rec) key = (rec.timestamp, rec.node_id) schema.mnode_logs[txn, key] = rec data[key] = rec # do test scans over inserted data # with db.begin() as txn: # do some record counting queries # skeys = sorted(data.keys()) for key in skeys: mnodelog = schema.mnode_logs[txn, key] assert mnodelog first_key = (np.datetime64(0, 'ns'), uuid.UUID(bytes=b'\0' * 16)) last_key = (np.datetime64(2**63 - 1, 'ns'), uuid.UUID(bytes=b'\xff' * 16)) cnt = schema.mnode_logs.count_range(txn, from_key=first_key, to_key=last_key) assert cnt == N cnt = schema.mnode_logs.count_range(txn, from_key=skeys[0], to_key=skeys[-1]) assert cnt == N - 1 from_key = skeys[0] to_key = (skeys[-1][0], uuid.UUID(bytes=b'\xff' * 16)) cnt = schema.mnode_logs.count_range(txn, from_key=from_key, to_key=to_key) assert cnt == N K = len(skeys) // 2 cnt = schema.mnode_logs.count_range(txn, from_key=skeys[0], to_key=skeys[K]) assert cnt == N - K K = 10 from_key = skeys[-K] to_key = (skeys[-1][0], uuid.UUID(bytes=b'\xff' * 16)) cnt = schema.mnode_logs.count_range(txn, from_key=from_key, to_key=to_key) assert cnt == K # do some scanning queries # # full scan keys1 = [] for key in schema.mnode_logs.select(txn, return_values=False, reverse=False): keys1.append(key) assert len(keys1) == N # full reverse scan keys2 = [] for key in schema.mnode_logs.select(txn, return_values=False, reverse=True): keys2.append(key) assert len(keys2) == N assert keys1 == list(reversed(keys2)) # scan [from_key, to_key[ keys1 = [] for key in schema.mnode_logs.select(txn, return_values=False, from_key=from_key, to_key=to_key, reverse=False): keys1.append(key) assert len(keys1) == K # reverse scan [from_key, to_key[ keys2 = [] for key in schema.mnode_logs.select(txn, return_values=False, from_key=from_key, to_key=to_key, reverse=True): keys2.append(key) assert len(keys2) == K assert keys1 == list(reversed(keys2)) K = len(skeys) // 2 anchor_key = skeys[K] # scan [from_key, .. keys1 = [] for key in schema.mnode_logs.select(txn, return_values=False, from_key=anchor_key, reverse=False): keys1.append(key) assert len(keys1) == K assert skeys[K:] == keys1 # reverse scan ..., to_key[ keys2 = [] for key in schema.mnode_logs.select(txn, return_values=False, to_key=anchor_key, reverse=True): keys2.append(key) assert len(keys2) == K assert skeys[:K] == list(reversed(keys2)) # scan a range with 2 boundaries # K = 10 from_key = skeys[K] to_key = skeys[-K] _skeys = skeys[K:-K] L = len(_skeys) cnt = schema.mnode_logs.count_range(txn, from_key=from_key, to_key=to_key) assert cnt == L # scan [from_key, to_key[ keys1 = [] for key in schema.mnode_logs.select(txn, return_values=False, from_key=from_key, to_key=to_key, reverse=False): keys1.append(key) assert len(keys1) == L assert _skeys == keys1 # reverse scan [from_key, to_key[ keys2 = [] for key in schema.mnode_logs.select(txn, return_values=False, from_key=from_key, to_key=to_key, reverse=True): keys2.append(key) assert len(keys2) == L assert _skeys == list(reversed(keys2)) def _test_mnodelog_bigtable(N, M, K): with TemporaryDirectory() as dbpath: with zlmdb.Database(dbpath, maxsize=(5 * 2**30)) as db: schema = Schema.attach(db) data = {} print() # fill table # started = time_ns() with db.begin(write=True) as txn: for i in range(N): rec = MNodeLog() fill_mnodelog(rec) key = (rec.timestamp, rec.node_id) schema.mnode_logs[txn, key] = rec data[key] = rec duration = (time_ns() - started) / 1000000000. rps = int(round(N / duration)) duration = int(round(duration)) print('Inserted {} records in {} seconds [{} records/sec]'.format(N, duration, rps)) skeys = sorted(data.keys()) # random single record selects # if True: started = time_ns() with db.begin() as txn: for i in range(M): key = random.choice(skeys) mnodelog = schema.mnode_logs[txn, key] assert mnodelog duration = (time_ns() - started) / 1000000000. rps = int(round(M / duration)) duration = int(round(duration)) print('Selected {} records in {} seconds [{} records/sec]'.format(M, duration, rps)) # random range counts # if True: started = time_ns() with db.begin() as txn: for i in range(K): # we select a fixed range of (max) 1000 elements: i1 = random.randint(0, len(skeys) - 1) i2 = random.randint(i1, min(len(skeys) - 1, i1 + 1000)) key1 = skeys[i1] key2 = skeys[i2] cnt = schema.mnode_logs.count_range(txn, from_key=key1, to_key=key2) assert cnt == len(skeys[i1:i2]) duration = (time_ns() - started) / 1000000000. rps = int(round(K / duration)) duration = int(round(duration)) print('Performed {} range counts in {} seconds [{} queries/sec]'.format(K, duration, rps)) def test_mnodelog_bigtable_size10k(): _test_mnodelog_bigtable(N=10000, M=500000, K=10000) @pytest.mark.skipif(COVERAGE, reason="skipping on coverage") def test_mnodelog_bigtable_size20k(): _test_mnodelog_bigtable(N=20000, M=1000000, K=20000) @pytest.mark.skipif(COVERAGE, reason="skipping on coverage") def test_mnodelog_bigtable_size40k(): _test_mnodelog_bigtable(N=40000, M=2000000, K=40000) @pytest.mark.skipif(COVERAGE, reason="skipping on coverage") def test_mnodelog_bigtable_size80k(): _test_mnodelog_bigtable(N=80000, M=4000000, K=80000) @pytest.mark.skipif(COVERAGE, reason="skipping on coverage") def test_mnodelog_bigtable_size160k(): _test_mnodelog_bigtable(N=160000, M=8000000, K=160000)