diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index 9e569e03..8a68d2ae 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -1,39 +1,28 @@ -name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI - +name: Publish Python 🐍 distributions 📦 to PyPI on: - # Trigger the workflow on push , PR or release publish, - # but only for the main branch - push: - branches: - - master - pull_request: - branches: - - master - release: - types: - - published - - released - + release: + types: + - published jobs: - build-n-publish: - name: Build and publish Python 🐍 distributions 📦 to PyPI and TestPyPI - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@master - - name: Set up Python 3.9 - uses: actions/setup-python@v1 - with: - python-version: 3.9 - - name: Build package - run: python setup.py sdist - - name: Publish distribution 📦 to Test PyPI - if: github.event_name == 'push' - uses: pypa/gh-action-pypi-publish@master - with: - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository_url: https://test.pypi.org/legacy/ - - name: Publish distribution 📦 to PyPI - if: github.event_name == 'release' - uses: pypa/gh-action-pypi-publish@master - with: - password: ${{ secrets.PYPI_API_TOKEN }} + publish: + name: Build and publish Python 🐍 distributions 📦 to PyPI and TestPyPI + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + id-token: write + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.8 + uses: actions/setup-python@v5 + with: + python-version: 3.8 + - name: Set Version + id: version + run: echo "${GITHUB_REF#refs/tags/}" > polypheny-connector-version.txt + - name: Create MANIFEST.in + run: echo "include polypheny-connector-version.txt" > MANIFEST.in + - name: Build package + run: python setup.py sdist + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 00000000..3039f1bb --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,171 @@ +name: Runs the tests + +on: + push: + branches: + - proto-without-grpc + +jobs: + build-polypheny: + timeout-minutes: 15 + runs-on: ubuntu-latest + steps: + - uses: polypheny/GitHub-Action-Build-Polypheny@main + with: + branch: master + + test-system-python: + timeout-minutes: 10 + strategy: + fail-fast: false + matrix: + version: ['3.8', '3.9', '3.10', '3.11', '3.12'] # TODO: When dropping 3.8, adjust type hints for polypheny.connect + os: [ubuntu-latest, macos-latest, windows-latest] + runs-on: ${{ matrix.os }} + needs: build-polypheny + steps: + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.version }} + + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '17' + + - name: Checkout driver + uses: actions/checkout@v4 + + - name: Set Version + id: version + run: echo "v0.0.0" > polypheny-connector-version.txt + + - name: Create MANIFEST.in + run: echo "include polypheny-connector-version.txt" > MANIFEST.in + + - name: Install driver dependencies + run: pip install -r requirements.txt + + - name: Load Polypheny JAR + uses: actions/download-artifact@v4 + with: + name: polypheny-jar + + - name: Run tests + uses: polypheny/GitHub-Action-Run-Polypheny@v0.2.0 + with: + cmd: pytest --doctest-modules -v + jar: polypheny.jar + autodocker: ${{ matrix.os != 'windows' }} + timeout-minutes: 10 + + - name: Install locally + run: pip install . + + - name: Run tests in docs + uses: polypheny/GitHub-Action-Run-Polypheny@v0.2.0 + with: + cmd: make doctest + jar: ../polypheny.jar + autodocker: ${{ matrix.os != 'windows' }} + working-directory: docs + + test-stores: + timeout-minutes: 10 + strategy: + fail-fast: false + matrix: + adapter: [ mongodb, hsqldb, monetdb, postgresql, file, cottontail, neo4j ] + runs-on: ubuntu-latest + needs: build-polypheny + steps: + - name: Set store env variable + run: | + echo "DEFAULT_STORE=${{ matrix.adapter }}" >> $GITHUB_ENV + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '17' + + - name: Checkout driver + uses: actions/checkout@v4 + + - name: Set Version + id: version + run: echo "v0.0.0" > polypheny-connector-version.txt + + - name: Create MANIFEST.in + run: echo "include polypheny-connector-version.txt" > MANIFEST.in + + - name: Install driver dependencies + run: pip install -r requirements.txt + + - name: Load Polypheny JAR + uses: actions/download-artifact@v4 + with: + name: polypheny-jar + + - name: Run tests + uses: polypheny/GitHub-Action-Run-Polypheny@v0.2.0 + with: + cmd: pytest --doctest-modules -v + jar: polypheny.jar + default-store: ${{ matrix.adapter }} + timeout-minutes: 10 + + - name: Install locally + run: pip install . + + - name: Run tests in docs + uses: polypheny/GitHub-Action-Run-Polypheny@v0.2.0 + with: + cmd: make doctest + jar: ../polypheny.jar + working-directory: docs + default-store: ${{ matrix.adapter }} + + build-docs: + timeout-minutes: 2 + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Checkout driver + uses: actions/checkout@v4 + + - name: Set Version + id: version + run: echo "v0.0.0" > polypheny-connector-version.txt + + - name: Create MANIFEST.in + run: echo "include polypheny-connector-version.txt" > MANIFEST.in + + - name: Install driver dependencies + run: pip install -r requirements.txt + + - name: Install markdown generator + run: pip install sphinx-markdown-builder + + - name: Install locally + run: pip install . + + - name: Generate documentation (markdown) + run: make markdown + working-directory: docs + + - name: Generate documentation (HTML) + run: make html + working-directory: docs + + - name: Store docs + uses: actions/upload-artifact@v4 + with: + name: docs + path: docs/_build + diff --git a/.gitignore b/.gitignore index a2b90e53..48600707 100644 --- a/.gitignore +++ b/.gitignore @@ -19,11 +19,11 @@ sdist/ var/ .installed.cfg /dist/ -/doc/_build/ +/docs/_build/ /doc/build/ *.pyc debug.py -*.proto +#*.proto polypheny-avatica-tmp/ # core dumps @@ -32,4 +32,10 @@ core.* # Editor specific .vscode -.DS_STORE \ No newline at end of file +.DS_STORE + +venv/ + +.idea + +.coverage \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 5c67347a..00000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,43 +0,0 @@ -This package includes the Polyheny-DB Connector for Python, which conforms to the Python DB API 2.0 specification: -https://www.python.org/dev/peps/pep-0249/ - -Polypheny Documentation is available at: -https://polypheny.org/documentation/ - -Source code is also available at: https://github.com/polypheny/Polypheny-Connector-Python - -# Release Notes - -## 0.2.0 -### 24 January 2022 - -IMPROVEMENTS: - -* core: Added initial support for basic operations DQL, DML and DDL as well as transaction handling. - -BUG FIXES: - -* N/A - - -## 0.0.1 -### 1 November 2021 - -IMPROVEMENTS: - -* core: first dummy improvement - -BUG FIXES: - -* core: Fixed a deadlock -* cli: fixed invalid params - - -## 0.0.0 -### 31 October 2021 - -IMPROVEMENTS: - -* core: created project - -BUG FIXES: diff --git a/README.md b/README.md index aecb5124..3f9162d8 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ This enables Python programs to access Polypheny databases, using an API that is compliant with the [Python Database API Specification v2.0 (PEP 249)](https://www.python.org/dev/peps/pep-0249/). -------------------------- + ## Installation The recommended way to install the Python Connector for Polypheny is via pip: @@ -26,17 +26,19 @@ python setup.py install ``` -------------------------- ## Getting Started A few examples of the most common functionalities provided by the adapter: -```python +```python3 import polypheny # Connect to Polypheny -connection = polypheny.connect('localhost', 20591, user='pa', password='') +connection = polypheny.connect('127.0.0.1', 20590, username='pa', password='', transport='plain') + +# If Python 3.8 is used the connection needs to bespecified as a tuple. Uncomment the following line +#connection = polypheny.connect(('127.0.0.1', 20590), username='pa', password='', transport='plain') # Get a cursor cursor = connection.cursor() @@ -50,32 +52,55 @@ connection.commit() # Execute a query cursor.execute("SELECT * from dummy") -result = cursor.fetchall() + +print("\nRelational output from SQL") +print("\t",cursor.fetchone()) + +# Accessing data using MQL +cursor.executeany('mongo', 'db.dummy.find()',namespace='public') + +return_mql = cursor.fetchone() +#json_output = json.loads( return_mql ) + + +print("\nPlain JSON output from MQL") +print("\t",return_mql) + + + +print("\nPlain JSON key 'text' from from MQL return") +print("\t",return_mql["text"]) + + +cursor.execute("DROP TABLE dummy") + +# Print result +#for f in cursor: +# print(f) # Close the connection connection.close() ``` -An in-depth and more detailed documentation can be found [here](https://polypheny.org/documentation/Drivers/PythonConnector/). +An in-depth and more detailed documentation can be found [here](https://docs.polypheny.com/en/latest/drivers/python/overview). + + +## Tests +Run the tests with coverage report: +``` +coverage run --source polypheny -m pytest && coverage report -m +``` -------------------------- ## Roadmap See the [open issues](https://github.com/polypheny/Polypheny-DB/labels/A-python) for a list of proposed features (and known issues). -------------------------- + ## Contributing We highly welcome your contributions to the _Polypheny Connector for Python_. If you would like to contribute, please fork the repository and submit your changes as a pull request. Please consult our [Admin Repository](https://github.com/polypheny/Admin) and our [Website](https://polypheny.org) for guidelines and additional information. Please note that we have a [code of conduct](https://github.com/polypheny/Admin/blob/master/CODE_OF_CONDUCT.md). Please follow it in all your interactions with the project. -------------------------- -## Credits -This work was influenced by the following projects: - -* [python-phoenixdb](https://github.com/lalinsky/python-phoenixdb) -* [snowflake-connector-python](https://github.com/snowflakedb/snowflake-connector-python) -------------------------- ## License The Apache 2.0 License diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..b31c677c --- /dev/null +++ b/conftest.py @@ -0,0 +1,45 @@ +import decimal +import sys + +import pytest +import polypheny + +@pytest.fixture(scope='function', autouse=True) +def add_cur(request, doctest_namespace): + # Only create tables, if we run a doctest + # In case of a doctest, request.function is None + if request.function is not None: + yield + return + + if sys.platform == 'win32': + con = polypheny.connect(('127.0.0.1', 20590), username='pa', password='', transport='plain') + else: + con = polypheny.connect() + + cur = con.cursor() + cur.execute('DROP TABLE IF EXISTS fruits') + cur.execute('CREATE TABLE fruits(id INTEGER PRIMARY KEY, name VARCHAR(50)/*TEXT*/ NOT NULL)') + cur.execute('INSERT INTO fruits (id, name) VALUES (1, ?)', ('Orange',)) + con.commit() + cur.execute('DROP TABLE IF EXISTS demo') + cur.close() + + def myprint(*objects, sep=' ', end='\n', file=None, flush=False): + def toint(i): + if isinstance(i, decimal.Decimal) and int(i) == i: + return int(i) + else: + return i + if len(objects) == 1 and isinstance(objects[0], dict): + print('{' + ', '.join(map(lambda i: f'{repr(i[0])}: {repr(toint(i[1]))}', sorted(objects[0].items()))) + '}') + else: + print(*objects, sep=sep, end=end, file=file, flush=flush) + + doctest_namespace['con'] = con + doctest_namespace['cur'] = con.cursor() + doctest_namespace['print'] = myprint + + yield + + con.close() diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d4bb2cbb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..ad791d94 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,74 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = 'Polypheny-Connector-Python' +copyright = '2024, The Polypheny Project' +author = 'The Polypheny Project' + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.linkcode', + 'sphinx.ext.doctest', + 'myst_parser', +] + +autodoc_typehints = "description" +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), +} +doctest_test_doctest_blocks = '' + +templates_path = ['_templates'] +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = 'alabaster' +html_static_path = ['_static'] + + +def linkcode_resolve(domain, info): + if domain != 'py': + return None + + module = info['module'] + parts = module.split('.') + if parts[0] != 'polypheny': + raise "Not supported: {}".format(module) + + mod = __import__(module) + + fullname = info['fullname'] + + if len(parts) > 1: + fullname = '.'.join(parts[1:]) + '.' + fullname + + from functools import reduce + try: + code = reduce(getattr, fullname.split('.'), mod).__code__ + except: + return None + from pathlib import Path + p = Path(code.co_filename) + lineno = code.co_firstlineno + + filename = p.parts[-1] + if p.parts[-2] != 'polypheny': + return None + + if not Path('..', 'polypheny', filename).exists(): # and is_file + return None + + return "https://github.com/polypheny/Polypheny-Connector-Python/blob/proto-without-grpc/polypheny/{}#L{}".format(filename, lineno) diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..54d65dea --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,175 @@ +.. Polypheny-Connector-Python documentation master file, created by + sphinx-quickstart on Wed Mar 20 16:27:44 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Polypheny Driver for Python +====================================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + +This is the official Python Driver for Polypheny. Unless mentioned +otherwise it follows the DBI 2.0 specification. + +Getting Started +--------------- + +In this tutorial we learn how to: + - install the driver using `pip` + - connect to local and remote Polypheny instances + - perform SQL queries using the DBI 2.0 interface + - perform multi model queries using different languages + +First you need to get Polypheny running. If you do not have Polypheny +yet, follow the instructions here_. + +.. _here: https://docs.polypheny.com/en/latest/getting_started/setup/install + +.. testsetup:: + + import decimal + import sys + + import polypheny + oldconnect = polypheny.connect + def connect(address=None, *, username=None, password=None, transport=None, **kwargs): + if address == None and transport == None and sys.platform == 'win32': + return oldconnect(('127.0.0.1', 20590), username='pa', password='', transport='plain', **kwargs) + elif transport == 'unix' and sys.platform == 'win32': + return None + return oldconnect(address, username=username, password=password, transport=transport, **kwargs) + polypheny.connect = connect + con = polypheny.connect() + cur = con.cursor() + cur.execute('DROP TABLE IF EXISTS fruits') + cur.execute('CREATE TABLE fruits(id INTEGER PRIMARY KEY, name VARCHAR(50)/*TEXT*/ NOT NULL)') + cur.execute('INSERT INTO fruits (id, name) VALUES (1, ?)', ('Orange',)) + con.commit() + con.close() + + oldprint = print + def myprint(*objects, sep=' ', end='\n', file=None, flush=False): + def toint(i): + if isinstance(i, decimal.Decimal) and int(i) == i: + return int(i) + else: + return i + + if len(objects) == 1 and isinstance(objects[0], dict): + oldprint('{' + ', '.join(map(lambda i: f'{repr(i[0])}: {repr(toint(i[1]))}', sorted(objects[0].items()))) + '}') + elif len(objects) == 1 and isinstance(objects[0], list): + oldprint('[' + ', '.join(map(repr, map(toint, objects[0]))) + ']') + else: + oldprint(*objects, sep=sep, end=end, file=file, flush=flush) + + print = myprint + +Installation +^^^^^^^^^^^^ + +Using pip:: + + pip install polypheny + +Then import the package in your code: + +.. code-block:: python + + import polypheny + +Connect to Polypheny +^^^^^^^^^^^^^^^^^^^^ + +There are two ways to connect to Polypheny: + + 1. Locally via Unix sockets (only Linux, BSD, macOS): + + .. testcode:: + + con = polypheny.connect() + + Or passing an explicit path, username and password: + + .. testcode:: + + import os + con = polypheny.connect( + os.path.expanduser('~/.polypheny/polypheny-prism.sock'), + username='pa', + password='', + transport='unix', + ) + + .. note:: + + If the user running the Python script has the same username as + a database user, the user will automatically be logged in as that + user and username and password are ignored. + + 2. Unencrypted over the network (all systems): + + .. testcode:: + + con = polypheny.connect( + ('127.0.0.1', 20590), + username='pa', + password='', + transport='plain', + ) + +Executing a query +^^^^^^^^^^^^^^^^^ + +.. testcode:: + + cur = con.cursor() + cur.execute('SELECT id, name FROM fruits') + for row in cur: + print(row) + cur.close() + +Output: + +.. testoutput:: + + [1, 'Orange'] + +Multimodel queries +------------------ + +In addition to SQL, Polypheny supports many more query languages. To +use another language, replace the +:py:meth:`~polypheny.Cursor.execute` with +:py:meth:`~polypheny.Cursor.executeany` and add the desired +language as first argument. + +So instead of SQL, we can also use e.g the Mongo Query Language: + +.. testcode:: + + cur = con.cursor() + cur.executeany('mongo', 'db.fruits.find({})') + print(cur.fetchone()) + +Because this query returns documents, :py:meth:`~polypheny.Cursor.fetchone` +returns a :py:class:`dict` instead of a :py:class:`list`: + +.. testoutput:: + + {'id': 1, 'name': 'Orange'} + +The return type of :py:meth:`~polypheny.Cursor.fetchone` depends on the +query. + +.. note:: + + Queries returning results of type graph are not supported yet. + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/interval.rst b/docs/interval.rst new file mode 100644 index 00000000..2bb21ec3 --- /dev/null +++ b/docs/interval.rst @@ -0,0 +1,16 @@ +Intervals +--------- + +In Polypheny intervals consist of two values: Months and milliseconds. +Values of this type are returned as instances of the +:py:class:`polypheny.interval.IntervalMonthMilliseconds` class. + +.. Note:: + + Intervals cannot be used as dynamic parameter in queries. + +.. autoclass:: polypheny.interval.IntervalMonthMilliseconds() + + .. autoattribute:: months + .. autoattribute:: milliseconds + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..32bb2452 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/polypheny.rst b/docs/polypheny.rst new file mode 100644 index 00000000..0dd7de38 --- /dev/null +++ b/docs/polypheny.rst @@ -0,0 +1,25 @@ +API +--- + +.. title:: API Documentation + +.. automodule:: polypheny + :members: + +.. autoclass:: Connection() + + .. automethod:: cursor() -> ~polypheny.Cursor + .. automethod:: commit + .. automethod:: rollback + .. automethod:: close + +.. autoclass:: Cursor() + + .. automethod:: close + .. automethod:: execute + .. automethod:: executemany + .. automethod:: executeany + .. automethod:: fetchone + .. automethod:: fetchmany + .. automethod:: setinputsizes + .. automethod:: setoutputsize diff --git a/docs/types.md b/docs/types.md new file mode 100644 index 00000000..a7a80a9f --- /dev/null +++ b/docs/types.md @@ -0,0 +1,44 @@ +# Types + +This page gives an overview how values are converted between Polypheny +and Python. For more information of the Polypheny Types check +[this](https://docs.polypheny.com/en/latest/concepts/data-types) page. + +## Polypheny to Python + +| Polypheny | Python | Notes | +|---------------------------|--------------------------------------|---------------------------------------------------------------------------------------------------| +| BIGINT | {py:class}`int` | | +| BOOLEAN | {py:class}`bool` | | +| DATE | {py:class}`datetime.date` | | +| DECIMAL | {py:class}`int` or {py:class}`float` | Python type depends on if `DECIMAL` is a whole number or not. | +| DOUBLE | {py:class}`float` | | +| INTEGER | {py:class}`int` | | +| REAL | {py:class}`float` | | +| SMALLINT | {py:class}`int` | | +| TEXT | {py:class}`str` | | +| TIME | {py:class}`datetime.time` | | +| TIMESTAMP | {py:class}`datetime.datetime` | When converting a `TIMESTAMP` to {py:class}`datetime.datetime` the timezone is always set to UTC. | +| TINYINT | {py:class}`int` | | +| VARCHAR | {py:class}`str` | | +| AUDIO, FILE, IMAGE, VIDEO | {py:class}`bytes` | | + +### Special types + +| Special Type | Python Type | Notes | +|---------------------------|-------------------|-------| +| Arrays | {py:class}`list` | | +| Documents | {py:class}`dict` | | + +### Intervals +To learn more about intervals see {doc}`interval`. + +## Python to Polypheny + +The following types can be serialized by the Python driver: + +{py:class}`bool`, {py:class}`bytes`, {py:class}`datetime.date`, +{py:class}`datetime.datetime`, {py:class}`datetime.time`, +{py:class}`float`, {py:class}`int`, {py:class}`list` and +{py:class}`str`. + diff --git a/license_header.txt b/license_header.txt deleted file mode 100644 index 1210032b..00000000 --- a/license_header.txt +++ /dev/null @@ -1,4 +0,0 @@ - -Copyright 2019-2021 The Polypheny Project - - \ No newline at end of file diff --git a/polypheny/__init__.py b/polypheny/__init__.py index d6a8fc69..6c189dd6 100644 --- a/polypheny/__init__.py +++ b/polypheny/__init__.py @@ -1,66 +1,86 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project +# Copyright 2024 The Polypheny Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -# Python Db API v2 -# https://www.python.org/dev/peps/pep-0249/ +from polypheny.connection import Connection, Cursor +from polypheny.exceptions import * +import datetime +from typing import Union, Tuple -apilevel = "2.0" -threadsafety = 1 # Threads may share the module, but not connections. -paramstyle = "qmark" # Question mark style, e.g. ...WHERE name=? +# See https://peps.python.org/pep-0249/#globals +apilevel = '2.0' +threadsafety = 0 +paramstyle = 'qmark' -# TODO allow multiple paramstyles and parse individuallay: -# See: https://github.com/snowflakedb/snowflake-connector-python/blob/master/src/snowflake/connector/connection.py#L1126 +def Date(year, month, day): + return datetime.date(year, month, day) -from polypheny.version import VERSION -from polypheny.avatica import PolyphenyAvaticaClient -from polypheny.connection import PolyphenyConnection -from polypheny.environment import (POLYPHENY_CONNECTOR_VERSION) +def Time(hour, minute, second): + return datetime.time(hour, minute, second) -def Connect(host, port, protocol="http", **kwargs): - """Connects to a Polypheny server. +def Timestamp(year, month, day, hour, minute, second): + return datetime.datetime(year, month, day, hour, minute, second) - :param host: - Polypheny server host, e.g. ``localhost`` - :param port: - port to the Phoenix query server, e.g. ``20591`` +# See PEP 249 +import time - :param protocol: - Transport protocol to connect to host, e.g. ``http`` or ``https`` - :param max_retries: - The maximum number of retries in case there is a connection error. +def DateFromTicks(ticks): + return Date(*time.localtime(ticks)[:3]) # TODO: Really local time? + + +def TimeFromTicks(ticks): + return Time(*time.localtime(ticks)[3:6]) - :returns: - :class:`~polypheny.connection.PolyphenyConnection` object. - """ - polypheny_client = PolyphenyAvaticaClient(host, port, protocol) - polypheny_client.connect() - return PolyphenyConnection(polypheny_client, **kwargs) - +def TimestampFromTicks(ticks): + return Timestamp(*time.localtime(ticks)[:6]) -connect = Connect +def Binary(string): + return string.encode('UTF-8') -__version__ = POLYPHENY_CONNECTOR_VERSION +# Intentionally omitted, we always give type_code = None, like sqlite3 +# STRING = 1 +# BINARY = 2 +# NUMBER = 3 +# DATETIME = 4 +# ROWID = 5 + + +# TODO: Change Tuple to tuple when Python 3.8 is no longer supported +def connect(address: Union[Tuple[str, int], str] = None, *, username: str = None, password: str = None, + transport: str = None, **kwargs) -> Connection: + """ + Connect to a Polypheny instance with the given parameters. When + no parameters are given, the driver will connect via the ``unix`` + transport to ``~/.polypheny/polypheny-prism.sock``. + + :param address: A :py:class:`str` for ``unix`` transport or a (hostname, port) :py:class:`tuple` for ``plain`` transport. + :param username: username + :param password: password + :param transport: Either ``plain`` or ``unix`` + + """ + if address is None and transport is None and username is None and password is None and len(kwargs) == 0: + transport = 'unix' + elif address is None or transport is None: + raise Error("Address and transport must be given") + return Connection(address, username, password, transport, kwargs) diff --git a/polypheny/auth.py b/polypheny/auth.py deleted file mode 100644 index 1c2682e1..00000000 --- a/polypheny/auth.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -CLIENT_PROOF_SIZE = 32 -CLIENT_KEY_SIZE = 64 - -class AuthManager(object): - - def __init__(self, connection, user, password): - self.connection = connection - self.user = user - self.password = password - - self.method = b"SCRAMSHA256" - self.client_key = os.urandom(CLIENT_KEY_SIZE) - self.client_proof = None \ No newline at end of file diff --git a/polypheny/avatica/__init__.py b/polypheny/avatica/__init__.py deleted file mode 100644 index 9529aef0..00000000 --- a/polypheny/avatica/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .client import PolyphenyAvaticaClient \ No newline at end of file diff --git a/polypheny/avatica/client.py b/polypheny/avatica/client.py deleted file mode 100644 index a663dd55..00000000 --- a/polypheny/avatica/client.py +++ /dev/null @@ -1,539 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Implementation of the JSON-over-HTTP RPC protocol used by Avatica.""" - - -import socket -import pprint -import math -import logging -import time -from polypheny.errors import * -from polypheny.avatica.protobuf import requests_pb2, common_pb2, responses_pb2 - -from html.parser import HTMLParser -import http.client as httplib -import urllib.parse as urlparse - - -from typing import ( - Any, - Callable, - Dict, - Generator, - Iterable, - List, - NamedTuple, - Optional, - Sequence, - Tuple, - Type, - Union, -) - - -# Default configs, tuple of default variable and accepted types -DEFAULT_CONFIGURATION: Dict[str, Tuple[Any, Union[Type, Tuple[Type, ...]]]] = { - "host": ("127.0.0.1", str), # standard - "port": (20591, (int, str)), # standard - "max_retries": (3, (int, str)), # standard - "autocommit": (True, (bool, str)), # standard -} - - - -# Relevant properties as defined by https://calcite.apache.org/avatica/docs/client_reference.html -OPEN_CONNECTION_PROPERTIES = ( - 'user', # User for the database connection - 'password', # Password for the user -) - - -__all__ = ['PolyphenyAvaticaClient'] - -logger = logging.getLogger(__name__) - - -################# -## GLOBAL ## -################# - - -def parse_connection_params(host, port, protocol): - - # Simplify protocol for construction - protocol = protocol.replace('\\', '').replace(':','') - - if protocol != "http" and protocol != "https": - raise ProgrammingError("Error: '%s' is not a supported protocol" % ( protocol )) - - url = _build_connection_string(protocol,host, port) - - url = urlparse.urlparse(url) - if not url.scheme and not url.netloc and url.path: - netloc = url.path - if ':' not in netloc: - netloc = '{}:20591'.format(netloc) - return urlparse.ParseResult('http', netloc, '/', '', '', '') - return url - - - - -def parse_error_page(html): - parser = JettyErrorPageParser() - parser.feed(html) - if parser.title == ['HTTP ERROR: 500']: - message = ' '.join(parser.message).strip() - raise InternalError(message) - - - -def parse_error_protobuf(text): - message = common_pb2.WireMessage() - message.ParseFromString(text) - - err = responses_pb2.ErrorResponse() - err.ParseFromString(message.wrapped_message) - - raise Error(err.error_code, err.sql_state, err.error_message) - - - -def _build_connection_string(protocol, host, port): - return protocol + '://' + host + ':' + str(port) - - -#################### -## HELPER Classes ## -#################### - - -class JettyErrorPageParser(HTMLParser): - """This is a helper class, which is used internally - by class:`~polypheny.connection.PolyphenyAvaticaClient` - - to parse inforamtion and errors of the Avatica RPCs servers. - JettyServer - - """ - - def __init__(self): - HTMLParser.__init__(self) - self.path = [] - self.title = [] - self.message = [] - - - - def handle_starttag(self, tag, attrs): - self.path.append(tag) - - - - def handle_endtag(self, tag): - self.path.pop() - - - - def handle_data(self, data): - if len(self.path) > 2 and self.path[0] == 'html' and self.path[1] == 'body': - if len(self.path) == 3 and self.path[2] == 'h2': - self.title.append(data.strip()) - elif len(self.path) == 4 and self.path[2] == 'p' and self.path[3] == 'pre': - self.message.append(data.strip()) - - - - - -class PolyphenyAvaticaClient(object): - """Client for Polypheny's custom Avatica-RPC server. - It connects to Polyheny's JDBC Interface. - - This exposes all low-level functionality that the Avatica - server provides, using the native terminology. - - You most likely do not want to use this class directly, but rather get connect - to a server using :func:`polypheny.connect`. - """ - - def __init__(self, host, port, protocol, max_retries=None): - """Constructs a new client object. - - :param host: - host which runs Polypheny's JDBC interface. - - :param port: - connection port of server. - - :param protocol: - protocol to use for connection setup ``https`` or ``http``. - """ - self.url = parse_connection_params(host, port, protocol) - self.max_retries = max_retries if max_retries is not None else 3 - self.connection = None - - - - def connect(self): - """Opens a HTTP connection to the RPC server.""" - logger.debug("Opening connection to %s:%s", self.url.hostname, self.url.port) - try: - self.connection = httplib.HTTPConnection(self.url.hostname, self.url.port) - self.connection.connect() - - except (httplib.HTTPException, socket.error) as e: - raise InterfaceError('Unable to connect to the specified service', e) - - - - def close(self): - """Closes the HTTP connection to the RPC server.""" - if self.connection is not None: - logger.debug("Closing connection to %s:%s", self.url.hostname, self.url.port) - try: - self.connection.close() - except httplib.HTTPException: - logger.warning("Error while closing connection", exc_info=True) - self.connection = None - - - - def open_connection(self, connection_id, info=None): - """Opens a new connection. - :param connection_id: - ID of the connection to open. - - :param info: - Additional connectionParameters for - openening the connection. - """ - - request = requests_pb2.OpenConnectionRequest() - request.connection_id = connection_id - - if info is not None: - # Info is a list of repeated pairs, setting a dict directly fails - for k, v in info.items(): - request.info[k] = v - - logger.debug("Constructed REQUEST:" , request) - - response_data = self._apply(request) - response = responses_pb2.OpenConnectionResponse() - response.ParseFromString(response_data) - - logger.debug("RESPONSE: " + str(response)) - - - - def close_connection(self, connection_id): - """Closes a connection. - - :param connection_id: - ID of the connection to close. - """ - - request = requests_pb2.CloseConnectionRequest() - request.connection_id = connection_id - self._apply(request) - - - - def create_statement(self, connection_id): - """Creates a new statement. - - :param connection_id: - ID of the current connection. - - :returns: - New statement ID. - """ - request = requests_pb2.CreateStatementRequest() - request.connection_id = connection_id - - response_data = self._apply(request) - response = responses_pb2.CreateStatementResponse() - response.ParseFromString(response_data) - - return response.statement_id - - - - def close_statement(self, connection_id, statement_id): - """Closes a statement. - - :param connection_id: - ID of the current connection. - - :param statement_id: - ID of the statement to close. - """ - request = requests_pb2.CloseStatementRequest() - request.connection_id = connection_id - request.statement_id = statement_id - - self._apply(request) - - - - def prepare_and_execute(self, connection_id, statement_id, sql, max_rows_total=None, first_frame_max_size=None): - """Prepares and immediately executes a statement. - - :param connection_id: - ID of the current connection. - - :param statement_id: - ID of the statement to prepare. - - :param sql: - SQL query. - - :param max_rows_total: - The maximum number of rows that will be allowed for this query. - - :param first_frame_max_size: - The maximum number of rows that will be returned in the first Frame returned for this query. - - :returns: - Result set with the signature of the prepared statement and the first frame data. - """ - request = requests_pb2.PrepareAndExecuteRequest() - request.connection_id = connection_id - request.statement_id = statement_id - request.sql = sql - if max_rows_total is not None: - request.max_rows_total = max_rows_total - - if first_frame_max_size is not None: - request.first_frame_max_size = first_frame_max_size - - response_data = self._apply(request, 'ExecuteResponse') - response = responses_pb2.ExecuteResponse() - response.ParseFromString(response_data) - - return response.results - - - - def prepare(self, connection_id, command, max_rows_total=None): - """Prepares a statement. - - :param connection_id: - ID of the current connection. - - :param command: - Qquery to be prepared SQL,CQL,MQL, etc. - - :param max_rows_total: - The maximum number of rows that will be allowed for this query. - - :returns: - Signature of the prepared statement. - """ - - request = requests_pb2.PrepareRequest() - request.connection_id = connection_id - request.sql = command - if max_rows_total is not None: - request.max_rows_total = max_rows_total - - response_data = self._apply(request) - response = responses_pb2.PrepareResponse() - response.ParseFromString(response_data) - return response.statement - - - def execute(self, connection_id, statement_id, signature, parameter_values=None, first_frame_max_size=None): - """Returns a frame of rows. - - The frame describes whether there may be another frame. If there is not - another frame, the current iteration is done when we have finished the - rows in the this frame. - - :param connection_id: - ID of the current connection. - - :param statement_id: - ID of the statement to fetch rows from. - - :param signature: - common_pb2.Signature object - - :param parameter_values: - A list of parameter values, if statement is to be executed; otherwise ``None``. - - :param first_frame_max_size: - The maximum number of rows that will be returned in the first Frame returned for this query. - - :returns: - Frame data, or ``None`` if there are no more. - """ - request = requests_pb2.ExecuteRequest() - request.statementHandle.id = statement_id - request.statementHandle.connection_id = connection_id - request.statementHandle.signature.CopyFrom(signature) - - if parameter_values is not None: - request.parameter_values.extend(parameter_values) - request.has_parameter_values = True - - if first_frame_max_size is not None: - request.deprecated_first_frame_max_size = first_frame_max_size - request.first_frame_max_size = first_frame_max_size - - response_data = self._apply(request) - response = responses_pb2.ExecuteResponse() - response.ParseFromString(response_data) - - return response.results - - - def fetch(self, connection_id, statement_id, offset=0, frame_max_size=None): - """Returns a frame of rows. - - The frame describes whether there may be another frame. If there is not - another frame, the current iteration is done when we have finished the - rows in the this frame. - - :param connection_id: - ID of the current connection. - - :param statement_id: - ID of the statement to fetch rows from. - - :param offset: - Zero-based offset of first row in the requested frame. - - :param frame_max_size: - Maximum number of rows to return; negative means no limit. - - :returns: - Frame data, or ``None`` if there are no more. - """ - request = requests_pb2.FetchRequest() - request.connection_id = connection_id - request.statement_id = statement_id - request.offset = offset - - if frame_max_size is not None: - request.frame_max_size = frame_max_size - - response_data = self._apply(request) - response = responses_pb2.FetchResponse() - response.ParseFromString(response_data) - - return response.frame - - - def commit(self, connection_id): - """Commits the current active transaction of a connection - - :param connection_id: - ID of the connection to commit. - """ - - request = requests_pb2.CommitRequest() - request.connection_id = connection_id - self._apply(request) - - - def rollback(self, connection_id): - """CommRolls back the current active transaction of a connection - - :param connection_id: - ID of the connection to rollback. - """ - - request = requests_pb2.RollbackRequest() - request.connection_id = connection_id - self._apply(request) - - - def _post_request(self, body, headers): - retry_count = 2 - - while True: - logger.debug("POST %s %r %r", self.url.path, body, headers) - try: - self.connection.request('POST', self.url.path, body=body, headers=headers) - response = self.connection.getresponse() - - # Graceful retry to resume and reestablish session - except httplib.HTTPException as e: - if retry_count > 0: - delay = math.exp(-retry_count) - logger.debug("HTTP protocol error, will retry in %s seconds...", delay, exc_info=True) - self.close() - self.connect() - time.sleep(delay) - retry_count -= 1 - continue - raise InterfaceError('RPC request failed', cause=e) - else: - if response.status == httplib.SERVICE_UNAVAILABLE: - if retry_count > 0: - delay = math.exp(-retry_count) - logger.debug("Service unavailable, will retry in %s seconds...", delay, exc_info=True) - time.sleep(delay) - retry_count -= 1 - continue - - return response - - - - def _apply(self, request_data, expected_response_type=None): - logger.debug("Sending request\n%s", pprint.pformat(request_data)) - - request_name = request_data.__class__.__name__ - message = common_pb2.WireMessage() - message.name = 'org.apache.calcite.avatica.proto.Requests${}'.format(request_name) - message.wrapped_message = request_data.SerializeToString() - body = message.SerializeToString() - headers = {'content-type': 'application/x-google-protobuf'} - - - response = self._post_request(body, headers) - response_body = response.read() - - if response.status != httplib.OK: - logger.info("Received response\n%s", response_body) - if b'' in response_body: - parse_error_page(response_body) - else: - # assume the response is in protobuf format - parse_error_protobuf(response_body) - raise InterfaceError('RPC request returned invalid status code', response.status) - - message = common_pb2.WireMessage() - message.ParseFromString(response_body) - - logger.debug("Received response\n%s", message) - - if expected_response_type is None: - expected_response_type = request_name.replace('Request', 'Response') - - expected_response_type = 'org.apache.calcite.avatica.proto.Responses$' + expected_response_type - if message.name != expected_response_type: - raise InterfaceError('unexpected response type "{}"'.format(message.name)) - - return message.wrapped_message - - diff --git a/polypheny/avatica/protobuf/__init__.py b/polypheny/avatica/protobuf/__init__.py deleted file mode 100644 index dedca9df..00000000 --- a/polypheny/avatica/protobuf/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Placeholder for setup to detect protobuf in packaging -# DO NOT DELETE \ No newline at end of file diff --git a/polypheny/avatica/protobuf/common_pb2.py b/polypheny/avatica/protobuf/common_pb2.py deleted file mode 100644 index 2e5d14ed..00000000 --- a/polypheny/avatica/protobuf/common_pb2.py +++ /dev/null @@ -1,276 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: common.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x63ommon.proto\"\xc0\x01\n\x14\x43onnectionProperties\x12\x10\n\x08is_dirty\x18\x01 \x01(\x08\x12\x13\n\x0b\x61uto_commit\x18\x02 \x01(\x08\x12\x17\n\x0fhas_auto_commit\x18\x07 \x01(\x08\x12\x11\n\tread_only\x18\x03 \x01(\x08\x12\x15\n\rhas_read_only\x18\x08 \x01(\x08\x12\x1d\n\x15transaction_isolation\x18\x04 \x01(\r\x12\x0f\n\x07\x63\x61talog\x18\x05 \x01(\t\x12\x0e\n\x06schema\x18\x06 \x01(\t\"S\n\x0fStatementHandle\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\r\x12\x1d\n\tsignature\x18\x03 \x01(\x0b\x32\n.Signature\"\xb0\x01\n\tSignature\x12 \n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x0f.ColumnMetaData\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12%\n\nparameters\x18\x03 \x03(\x0b\x32\x11.AvaticaParameter\x12&\n\x0e\x63ursor_factory\x18\x04 \x01(\x0b\x32\x0e.CursorFactory\x12%\n\rstatementType\x18\x05 \x01(\x0e\x32\x0e.StatementType\"\xad\x03\n\x0e\x43olumnMetaData\x12\x0f\n\x07ordinal\x18\x01 \x01(\r\x12\x16\n\x0e\x61uto_increment\x18\x02 \x01(\x08\x12\x16\n\x0e\x63\x61se_sensitive\x18\x03 \x01(\x08\x12\x12\n\nsearchable\x18\x04 \x01(\x08\x12\x10\n\x08\x63urrency\x18\x05 \x01(\x08\x12\x10\n\x08nullable\x18\x06 \x01(\r\x12\x0e\n\x06signed\x18\x07 \x01(\x08\x12\x14\n\x0c\x64isplay_size\x18\x08 \x01(\r\x12\r\n\x05label\x18\t \x01(\t\x12\x13\n\x0b\x63olumn_name\x18\n \x01(\t\x12\x13\n\x0bschema_name\x18\x0b \x01(\t\x12\x11\n\tprecision\x18\x0c \x01(\r\x12\r\n\x05scale\x18\r \x01(\r\x12\x12\n\ntable_name\x18\x0e \x01(\t\x12\x14\n\x0c\x63\x61talog_name\x18\x0f \x01(\t\x12\x11\n\tread_only\x18\x10 \x01(\x08\x12\x10\n\x08writable\x18\x11 \x01(\x08\x12\x1b\n\x13\x64\x65\x66initely_writable\x18\x12 \x01(\x08\x12\x19\n\x11\x63olumn_class_name\x18\x13 \x01(\t\x12\x1a\n\x04type\x18\x14 \x01(\x0b\x32\x0c.AvaticaType\"}\n\x0b\x41vaticaType\x12\n\n\x02id\x18\x01 \x01(\r\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x11\n\x03rep\x18\x03 \x01(\x0e\x32\x04.Rep\x12 \n\x07\x63olumns\x18\x04 \x03(\x0b\x32\x0f.ColumnMetaData\x12\x1f\n\tcomponent\x18\x05 \x01(\x0b\x32\x0c.AvaticaType\"\x91\x01\n\x10\x41vaticaParameter\x12\x0e\n\x06signed\x18\x01 \x01(\x08\x12\x11\n\tprecision\x18\x02 \x01(\r\x12\r\n\x05scale\x18\x03 \x01(\r\x12\x16\n\x0eparameter_type\x18\x04 \x01(\r\x12\x11\n\ttype_name\x18\x05 \x01(\t\x12\x12\n\nclass_name\x18\x06 \x01(\t\x12\x0c\n\x04name\x18\x07 \x01(\t\"\xb3\x01\n\rCursorFactory\x12#\n\x05style\x18\x01 \x01(\x0e\x32\x14.CursorFactory.Style\x12\x12\n\nclass_name\x18\x02 \x01(\t\x12\x13\n\x0b\x66ield_names\x18\x03 \x03(\t\"T\n\x05Style\x12\n\n\x06OBJECT\x10\x00\x12\n\n\x06RECORD\x10\x01\x12\x15\n\x11RECORD_PROJECTION\x10\x02\x12\t\n\x05\x41RRAY\x10\x03\x12\x08\n\x04LIST\x10\x04\x12\x07\n\x03MAP\x10\x05\"9\n\x05\x46rame\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\x12\x12\n\x04rows\x18\x03 \x03(\x0b\x32\x04.Row\"\"\n\x03Row\x12\x1b\n\x05value\x18\x01 \x03(\x0b\x32\x0c.ColumnValue\"3\n\x10\x44\x61tabaseProperty\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tfunctions\x18\x02 \x03(\t\"4\n\x0bWireMessage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0fwrapped_message\x18\x02 \x01(\x0c\"\x87\x01\n\x0b\x43olumnValue\x12\x1a\n\x05value\x18\x01 \x03(\x0b\x32\x0b.TypedValue\x12 \n\x0b\x61rray_value\x18\x02 \x03(\x0b\x32\x0b.TypedValue\x12\x17\n\x0fhas_array_value\x18\x03 \x01(\x08\x12!\n\x0cscalar_value\x18\x04 \x01(\x0b\x32\x0b.TypedValue\"\xf2\x01\n\nTypedValue\x12\x12\n\x04type\x18\x01 \x01(\x0e\x32\x04.Rep\x12\x12\n\nbool_value\x18\x02 \x01(\x08\x12\x14\n\x0cstring_value\x18\x03 \x01(\t\x12\x14\n\x0cnumber_value\x18\x04 \x01(\x12\x12\x13\n\x0b\x62ytes_value\x18\x05 \x01(\x0c\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x0c\n\x04null\x18\x07 \x01(\x08\x12 \n\x0b\x61rray_value\x18\x08 \x03(\x0b\x32\x0b.TypedValue\x12\x1c\n\x0e\x63omponent_type\x18\t \x01(\x0e\x32\x04.Rep\x12\x17\n\x0fimplicitly_null\x18\n \x01(\x08\"\xa6\x02\n\x19MetaDataOperationArgument\x12\x14\n\x0cstring_value\x18\x01 \x01(\t\x12\x12\n\nbool_value\x18\x02 \x01(\x08\x12\x11\n\tint_value\x18\x03 \x01(\x11\x12\x1b\n\x13string_array_values\x18\x04 \x03(\t\x12\x18\n\x10int_array_values\x18\x05 \x03(\x11\x12\x35\n\x04type\x18\x06 \x01(\x0e\x32\'.MetaDataOperationArgument.ArgumentType\"^\n\x0c\x41rgumentType\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\x07\n\x03INT\x10\x02\x12\x13\n\x0fREPEATED_STRING\x10\x03\x12\x10\n\x0cREPEATED_INT\x10\x04\x12\x08\n\x04NULL\x10\x05\"\xb0\x01\n\nQueryState\x12\x18\n\x04type\x18\x01 \x01(\x0e\x32\n.StateType\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12\x1e\n\x02op\x18\x03 \x01(\x0e\x32\x12.MetaDataOperation\x12(\n\x04\x61rgs\x18\x04 \x03(\x0b\x32\x1a.MetaDataOperationArgument\x12\x10\n\x08has_args\x18\x05 \x01(\x08\x12\x0f\n\x07has_sql\x18\x06 \x01(\x08\x12\x0e\n\x06has_op\x18\x07 \x01(\x08*\x9f\x01\n\rStatementType\x12\n\n\x06SELECT\x10\x00\x12\n\n\x06INSERT\x10\x01\x12\n\n\x06UPDATE\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\n\n\x06UPSERT\x10\x04\x12\t\n\x05MERGE\x10\x05\x12\r\n\tOTHER_DML\x10\x06\x12\n\n\x06\x43REATE\x10\x07\x12\x08\n\x04\x44ROP\x10\x08\x12\t\n\x05\x41LTER\x10\t\x12\r\n\tOTHER_DDL\x10\n\x12\x08\n\x04\x43\x41LL\x10\x0b*\xe2\x03\n\x03Rep\x12\x15\n\x11PRIMITIVE_BOOLEAN\x10\x00\x12\x12\n\x0ePRIMITIVE_BYTE\x10\x01\x12\x12\n\x0ePRIMITIVE_CHAR\x10\x02\x12\x13\n\x0fPRIMITIVE_SHORT\x10\x03\x12\x11\n\rPRIMITIVE_INT\x10\x04\x12\x12\n\x0ePRIMITIVE_LONG\x10\x05\x12\x13\n\x0fPRIMITIVE_FLOAT\x10\x06\x12\x14\n\x10PRIMITIVE_DOUBLE\x10\x07\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\x08\n\x04\x42YTE\x10\t\x12\r\n\tCHARACTER\x10\n\x12\t\n\x05SHORT\x10\x0b\x12\x0b\n\x07INTEGER\x10\x0c\x12\x08\n\x04LONG\x10\r\x12\t\n\x05\x46LOAT\x10\x0e\x12\n\n\x06\x44OUBLE\x10\x0f\x12\x0f\n\x0b\x42IG_INTEGER\x10\x19\x12\x0f\n\x0b\x42IG_DECIMAL\x10\x1a\x12\x11\n\rJAVA_SQL_TIME\x10\x10\x12\x16\n\x12JAVA_SQL_TIMESTAMP\x10\x11\x12\x11\n\rJAVA_SQL_DATE\x10\x12\x12\x12\n\x0eJAVA_UTIL_DATE\x10\x13\x12\x0f\n\x0b\x42YTE_STRING\x10\x14\x12\n\n\x06STRING\x10\x15\x12\n\n\x06NUMBER\x10\x16\x12\n\n\x06OBJECT\x10\x17\x12\x08\n\x04NULL\x10\x18\x12\t\n\x05\x41RRAY\x10\x1b\x12\n\n\x06STRUCT\x10\x1c\x12\x0c\n\x08MULTISET\x10\x1d*^\n\x08Severity\x12\x14\n\x10UNKNOWN_SEVERITY\x10\x00\x12\x12\n\x0e\x46\x41TAL_SEVERITY\x10\x01\x12\x12\n\x0e\x45RROR_SEVERITY\x10\x02\x12\x14\n\x10WARNING_SEVERITY\x10\x03*\xd7\x04\n\x11MetaDataOperation\x12\x12\n\x0eGET_ATTRIBUTES\x10\x00\x12\x1b\n\x17GET_BEST_ROW_IDENTIFIER\x10\x01\x12\x10\n\x0cGET_CATALOGS\x10\x02\x12\x1e\n\x1aGET_CLIENT_INFO_PROPERTIES\x10\x03\x12\x19\n\x15GET_COLUMN_PRIVILEGES\x10\x04\x12\x0f\n\x0bGET_COLUMNS\x10\x05\x12\x17\n\x13GET_CROSS_REFERENCE\x10\x06\x12\x15\n\x11GET_EXPORTED_KEYS\x10\x07\x12\x18\n\x14GET_FUNCTION_COLUMNS\x10\x08\x12\x11\n\rGET_FUNCTIONS\x10\t\x12\x15\n\x11GET_IMPORTED_KEYS\x10\n\x12\x12\n\x0eGET_INDEX_INFO\x10\x0b\x12\x14\n\x10GET_PRIMARY_KEYS\x10\x0c\x12\x19\n\x15GET_PROCEDURE_COLUMNS\x10\r\x12\x12\n\x0eGET_PROCEDURES\x10\x0e\x12\x16\n\x12GET_PSEUDO_COLUMNS\x10\x0f\x12\x0f\n\x0bGET_SCHEMAS\x10\x10\x12\x19\n\x15GET_SCHEMAS_WITH_ARGS\x10\x11\x12\x14\n\x10GET_SUPER_TABLES\x10\x12\x12\x13\n\x0fGET_SUPER_TYPES\x10\x13\x12\x18\n\x14GET_TABLE_PRIVILEGES\x10\x14\x12\x0e\n\nGET_TABLES\x10\x15\x12\x13\n\x0fGET_TABLE_TYPES\x10\x16\x12\x11\n\rGET_TYPE_INFO\x10\x17\x12\x0c\n\x08GET_UDTS\x10\x18\x12\x17\n\x13GET_VERSION_COLUMNS\x10\x19*\"\n\tStateType\x12\x07\n\x03SQL\x10\x00\x12\x0c\n\x08METADATA\x10\x01\x42\"\n org.apache.calcite.avatica.protob\x06proto3') - -_STATEMENTTYPE = DESCRIPTOR.enum_types_by_name['StatementType'] -StatementType = enum_type_wrapper.EnumTypeWrapper(_STATEMENTTYPE) -_REP = DESCRIPTOR.enum_types_by_name['Rep'] -Rep = enum_type_wrapper.EnumTypeWrapper(_REP) -_SEVERITY = DESCRIPTOR.enum_types_by_name['Severity'] -Severity = enum_type_wrapper.EnumTypeWrapper(_SEVERITY) -_METADATAOPERATION = DESCRIPTOR.enum_types_by_name['MetaDataOperation'] -MetaDataOperation = enum_type_wrapper.EnumTypeWrapper(_METADATAOPERATION) -_STATETYPE = DESCRIPTOR.enum_types_by_name['StateType'] -StateType = enum_type_wrapper.EnumTypeWrapper(_STATETYPE) -SELECT = 0 -INSERT = 1 -UPDATE = 2 -DELETE = 3 -UPSERT = 4 -MERGE = 5 -OTHER_DML = 6 -CREATE = 7 -DROP = 8 -ALTER = 9 -OTHER_DDL = 10 -CALL = 11 -PRIMITIVE_BOOLEAN = 0 -PRIMITIVE_BYTE = 1 -PRIMITIVE_CHAR = 2 -PRIMITIVE_SHORT = 3 -PRIMITIVE_INT = 4 -PRIMITIVE_LONG = 5 -PRIMITIVE_FLOAT = 6 -PRIMITIVE_DOUBLE = 7 -BOOLEAN = 8 -BYTE = 9 -CHARACTER = 10 -SHORT = 11 -INTEGER = 12 -LONG = 13 -FLOAT = 14 -DOUBLE = 15 -BIG_INTEGER = 25 -BIG_DECIMAL = 26 -JAVA_SQL_TIME = 16 -JAVA_SQL_TIMESTAMP = 17 -JAVA_SQL_DATE = 18 -JAVA_UTIL_DATE = 19 -BYTE_STRING = 20 -STRING = 21 -NUMBER = 22 -OBJECT = 23 -NULL = 24 -ARRAY = 27 -STRUCT = 28 -MULTISET = 29 -UNKNOWN_SEVERITY = 0 -FATAL_SEVERITY = 1 -ERROR_SEVERITY = 2 -WARNING_SEVERITY = 3 -GET_ATTRIBUTES = 0 -GET_BEST_ROW_IDENTIFIER = 1 -GET_CATALOGS = 2 -GET_CLIENT_INFO_PROPERTIES = 3 -GET_COLUMN_PRIVILEGES = 4 -GET_COLUMNS = 5 -GET_CROSS_REFERENCE = 6 -GET_EXPORTED_KEYS = 7 -GET_FUNCTION_COLUMNS = 8 -GET_FUNCTIONS = 9 -GET_IMPORTED_KEYS = 10 -GET_INDEX_INFO = 11 -GET_PRIMARY_KEYS = 12 -GET_PROCEDURE_COLUMNS = 13 -GET_PROCEDURES = 14 -GET_PSEUDO_COLUMNS = 15 -GET_SCHEMAS = 16 -GET_SCHEMAS_WITH_ARGS = 17 -GET_SUPER_TABLES = 18 -GET_SUPER_TYPES = 19 -GET_TABLE_PRIVILEGES = 20 -GET_TABLES = 21 -GET_TABLE_TYPES = 22 -GET_TYPE_INFO = 23 -GET_UDTS = 24 -GET_VERSION_COLUMNS = 25 -SQL = 0 -METADATA = 1 - - -_CONNECTIONPROPERTIES = DESCRIPTOR.message_types_by_name['ConnectionProperties'] -_STATEMENTHANDLE = DESCRIPTOR.message_types_by_name['StatementHandle'] -_SIGNATURE = DESCRIPTOR.message_types_by_name['Signature'] -_COLUMNMETADATA = DESCRIPTOR.message_types_by_name['ColumnMetaData'] -_AVATICATYPE = DESCRIPTOR.message_types_by_name['AvaticaType'] -_AVATICAPARAMETER = DESCRIPTOR.message_types_by_name['AvaticaParameter'] -_CURSORFACTORY = DESCRIPTOR.message_types_by_name['CursorFactory'] -_FRAME = DESCRIPTOR.message_types_by_name['Frame'] -_ROW = DESCRIPTOR.message_types_by_name['Row'] -_DATABASEPROPERTY = DESCRIPTOR.message_types_by_name['DatabaseProperty'] -_WIREMESSAGE = DESCRIPTOR.message_types_by_name['WireMessage'] -_COLUMNVALUE = DESCRIPTOR.message_types_by_name['ColumnValue'] -_TYPEDVALUE = DESCRIPTOR.message_types_by_name['TypedValue'] -_METADATAOPERATIONARGUMENT = DESCRIPTOR.message_types_by_name['MetaDataOperationArgument'] -_QUERYSTATE = DESCRIPTOR.message_types_by_name['QueryState'] -_CURSORFACTORY_STYLE = _CURSORFACTORY.enum_types_by_name['Style'] -_METADATAOPERATIONARGUMENT_ARGUMENTTYPE = _METADATAOPERATIONARGUMENT.enum_types_by_name['ArgumentType'] -ConnectionProperties = _reflection.GeneratedProtocolMessageType('ConnectionProperties', (_message.Message,), { - 'DESCRIPTOR' : _CONNECTIONPROPERTIES, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:ConnectionProperties) - }) -_sym_db.RegisterMessage(ConnectionProperties) - -StatementHandle = _reflection.GeneratedProtocolMessageType('StatementHandle', (_message.Message,), { - 'DESCRIPTOR' : _STATEMENTHANDLE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:StatementHandle) - }) -_sym_db.RegisterMessage(StatementHandle) - -Signature = _reflection.GeneratedProtocolMessageType('Signature', (_message.Message,), { - 'DESCRIPTOR' : _SIGNATURE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:Signature) - }) -_sym_db.RegisterMessage(Signature) - -ColumnMetaData = _reflection.GeneratedProtocolMessageType('ColumnMetaData', (_message.Message,), { - 'DESCRIPTOR' : _COLUMNMETADATA, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:ColumnMetaData) - }) -_sym_db.RegisterMessage(ColumnMetaData) - -AvaticaType = _reflection.GeneratedProtocolMessageType('AvaticaType', (_message.Message,), { - 'DESCRIPTOR' : _AVATICATYPE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:AvaticaType) - }) -_sym_db.RegisterMessage(AvaticaType) - -AvaticaParameter = _reflection.GeneratedProtocolMessageType('AvaticaParameter', (_message.Message,), { - 'DESCRIPTOR' : _AVATICAPARAMETER, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:AvaticaParameter) - }) -_sym_db.RegisterMessage(AvaticaParameter) - -CursorFactory = _reflection.GeneratedProtocolMessageType('CursorFactory', (_message.Message,), { - 'DESCRIPTOR' : _CURSORFACTORY, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:CursorFactory) - }) -_sym_db.RegisterMessage(CursorFactory) - -Frame = _reflection.GeneratedProtocolMessageType('Frame', (_message.Message,), { - 'DESCRIPTOR' : _FRAME, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:Frame) - }) -_sym_db.RegisterMessage(Frame) - -Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), { - 'DESCRIPTOR' : _ROW, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:Row) - }) -_sym_db.RegisterMessage(Row) - -DatabaseProperty = _reflection.GeneratedProtocolMessageType('DatabaseProperty', (_message.Message,), { - 'DESCRIPTOR' : _DATABASEPROPERTY, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:DatabaseProperty) - }) -_sym_db.RegisterMessage(DatabaseProperty) - -WireMessage = _reflection.GeneratedProtocolMessageType('WireMessage', (_message.Message,), { - 'DESCRIPTOR' : _WIREMESSAGE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:WireMessage) - }) -_sym_db.RegisterMessage(WireMessage) - -ColumnValue = _reflection.GeneratedProtocolMessageType('ColumnValue', (_message.Message,), { - 'DESCRIPTOR' : _COLUMNVALUE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:ColumnValue) - }) -_sym_db.RegisterMessage(ColumnValue) - -TypedValue = _reflection.GeneratedProtocolMessageType('TypedValue', (_message.Message,), { - 'DESCRIPTOR' : _TYPEDVALUE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:TypedValue) - }) -_sym_db.RegisterMessage(TypedValue) - -MetaDataOperationArgument = _reflection.GeneratedProtocolMessageType('MetaDataOperationArgument', (_message.Message,), { - 'DESCRIPTOR' : _METADATAOPERATIONARGUMENT, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:MetaDataOperationArgument) - }) -_sym_db.RegisterMessage(MetaDataOperationArgument) - -QueryState = _reflection.GeneratedProtocolMessageType('QueryState', (_message.Message,), { - 'DESCRIPTOR' : _QUERYSTATE, - '__module__' : 'common_pb2' - # @@protoc_insertion_point(class_scope:QueryState) - }) -_sym_db.RegisterMessage(QueryState) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n org.apache.calcite.avatica.proto' - _STATEMENTTYPE._serialized_start=2426 - _STATEMENTTYPE._serialized_end=2585 - _REP._serialized_start=2588 - _REP._serialized_end=3070 - _SEVERITY._serialized_start=3072 - _SEVERITY._serialized_end=3166 - _METADATAOPERATION._serialized_start=3169 - _METADATAOPERATION._serialized_end=3768 - _STATETYPE._serialized_start=3770 - _STATETYPE._serialized_end=3804 - _CONNECTIONPROPERTIES._serialized_start=17 - _CONNECTIONPROPERTIES._serialized_end=209 - _STATEMENTHANDLE._serialized_start=211 - _STATEMENTHANDLE._serialized_end=294 - _SIGNATURE._serialized_start=297 - _SIGNATURE._serialized_end=473 - _COLUMNMETADATA._serialized_start=476 - _COLUMNMETADATA._serialized_end=905 - _AVATICATYPE._serialized_start=907 - _AVATICATYPE._serialized_end=1032 - _AVATICAPARAMETER._serialized_start=1035 - _AVATICAPARAMETER._serialized_end=1180 - _CURSORFACTORY._serialized_start=1183 - _CURSORFACTORY._serialized_end=1362 - _CURSORFACTORY_STYLE._serialized_start=1278 - _CURSORFACTORY_STYLE._serialized_end=1362 - _FRAME._serialized_start=1364 - _FRAME._serialized_end=1421 - _ROW._serialized_start=1423 - _ROW._serialized_end=1457 - _DATABASEPROPERTY._serialized_start=1459 - _DATABASEPROPERTY._serialized_end=1510 - _WIREMESSAGE._serialized_start=1512 - _WIREMESSAGE._serialized_end=1564 - _COLUMNVALUE._serialized_start=1567 - _COLUMNVALUE._serialized_end=1702 - _TYPEDVALUE._serialized_start=1705 - _TYPEDVALUE._serialized_end=1947 - _METADATAOPERATIONARGUMENT._serialized_start=1950 - _METADATAOPERATIONARGUMENT._serialized_end=2244 - _METADATAOPERATIONARGUMENT_ARGUMENTTYPE._serialized_start=2150 - _METADATAOPERATIONARGUMENT_ARGUMENTTYPE._serialized_end=2244 - _QUERYSTATE._serialized_start=2247 - _QUERYSTATE._serialized_end=2423 -# @@protoc_insertion_point(module_scope) diff --git a/polypheny/avatica/protobuf/requests_pb2.py b/polypheny/avatica/protobuf/requests_pb2.py deleted file mode 100644 index c4091775..00000000 --- a/polypheny/avatica/protobuf/requests_pb2.py +++ /dev/null @@ -1,299 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: requests.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from . import common_pb2 as common__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0erequests.proto\x1a\x0c\x63ommon.proto\"(\n\x0f\x43\x61talogsRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"0\n\x17\x44\x61tabasePropertyRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"P\n\x0eSchemasRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x16\n\x0eschema_pattern\x18\x02 \x01(\t\x12\x15\n\rconnection_id\x18\x03 \x01(\t\"\x95\x01\n\rTablesRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x16\n\x0eschema_pattern\x18\x02 \x01(\t\x12\x1a\n\x12table_name_pattern\x18\x03 \x01(\t\x12\x11\n\ttype_list\x18\x04 \x03(\t\x12\x15\n\rhas_type_list\x18\x06 \x01(\x08\x12\x15\n\rconnection_id\x18\x07 \x01(\t\"*\n\x11TableTypesRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"\x89\x01\n\x0e\x43olumnsRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x16\n\x0eschema_pattern\x18\x02 \x01(\t\x12\x1a\n\x12table_name_pattern\x18\x03 \x01(\t\x12\x1b\n\x13\x63olumn_name_pattern\x18\x04 \x01(\t\x12\x15\n\rconnection_id\x18\x05 \x01(\t\"(\n\x0fTypeInfoRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"`\n\x12PrimaryKeysRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\ntable_name\x18\x03 \x01(\t\x12\x15\n\rconnection_id\x18\x04 \x01(\t\"a\n\x13ImportedKeysRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\ntable_name\x18\x03 \x01(\t\x12\x15\n\rconnection_id\x18\x04 \x01(\t\"a\n\x13\x45xportedKeysRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\ntable_name\x18\x03 \x01(\t\x12\x15\n\rconnection_id\x18\x04 \x01(\t\"\x83\x01\n\x10IndexInfoRequest\x12\x0f\n\x07\x63\x61talog\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\ntable_name\x18\x03 \x01(\t\x12\x0e\n\x06unique\x18\x04 \x01(\x08\x12\x13\n\x0b\x61pproximate\x18\x05 \x01(\x08\x12\x15\n\rconnection_id\x18\x06 \x01(\t\"\xa1\x01\n\x18PrepareAndExecuteRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12\x15\n\rmax_row_count\x18\x03 \x01(\x04\x12\x14\n\x0cstatement_id\x18\x04 \x01(\r\x12\x16\n\x0emax_rows_total\x18\x05 \x01(\x03\x12\x1c\n\x14\x66irst_frame_max_size\x18\x06 \x01(\x05\"c\n\x0ePrepareRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x0b\n\x03sql\x18\x02 \x01(\t\x12\x15\n\rmax_row_count\x18\x03 \x01(\x04\x12\x16\n\x0emax_rows_total\x18\x04 \x01(\x03\"\x80\x01\n\x0c\x46\x65tchRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x0e\n\x06offset\x18\x03 \x01(\x04\x12\x1b\n\x13\x66\x65tch_max_row_count\x18\x04 \x01(\r\x12\x16\n\x0e\x66rame_max_size\x18\x05 \x01(\x05\"/\n\x16\x43reateStatementRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"D\n\x15\x43loseStatementRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\"\x8b\x01\n\x15OpenConnectionRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12.\n\x04info\x18\x02 \x03(\x0b\x32 .OpenConnectionRequest.InfoEntry\x1a+\n\tInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"/\n\x16\x43loseConnectionRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"Y\n\x15\x43onnectionSyncRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12)\n\nconn_props\x18\x02 \x01(\x0b\x32\x15.ConnectionProperties\"\xc7\x01\n\x0e\x45xecuteRequest\x12)\n\x0fstatementHandle\x18\x01 \x01(\x0b\x32\x10.StatementHandle\x12%\n\x10parameter_values\x18\x02 \x03(\x0b\x32\x0b.TypedValue\x12\'\n\x1f\x64\x65precated_first_frame_max_size\x18\x03 \x01(\x04\x12\x1c\n\x14has_parameter_values\x18\x04 \x01(\x08\x12\x1c\n\x14\x66irst_frame_max_size\x18\x05 \x01(\x05\"m\n\x12SyncResultsRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x1a\n\x05state\x18\x03 \x01(\x0b\x32\x0b.QueryState\x12\x0e\n\x06offset\x18\x04 \x01(\x04\"&\n\rCommitRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"(\n\x0fRollbackRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\"b\n\x1dPrepareAndExecuteBatchRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x14\n\x0csql_commands\x18\x03 \x03(\t\"4\n\x0bUpdateBatch\x12%\n\x10parameter_values\x18\x01 \x03(\x0b\x32\x0b.TypedValue\"a\n\x13\x45xecuteBatchRequest\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x1d\n\x07updates\x18\x03 \x03(\x0b\x32\x0c.UpdateBatchB\"\n org.apache.calcite.avatica.protob\x06proto3') - - - -_CATALOGSREQUEST = DESCRIPTOR.message_types_by_name['CatalogsRequest'] -_DATABASEPROPERTYREQUEST = DESCRIPTOR.message_types_by_name['DatabasePropertyRequest'] -_SCHEMASREQUEST = DESCRIPTOR.message_types_by_name['SchemasRequest'] -_TABLESREQUEST = DESCRIPTOR.message_types_by_name['TablesRequest'] -_TABLETYPESREQUEST = DESCRIPTOR.message_types_by_name['TableTypesRequest'] -_COLUMNSREQUEST = DESCRIPTOR.message_types_by_name['ColumnsRequest'] -_TYPEINFOREQUEST = DESCRIPTOR.message_types_by_name['TypeInfoRequest'] -_PRIMARYKEYSREQUEST = DESCRIPTOR.message_types_by_name['PrimaryKeysRequest'] -_IMPORTEDKEYSREQUEST = DESCRIPTOR.message_types_by_name['ImportedKeysRequest'] -_EXPORTEDKEYSREQUEST = DESCRIPTOR.message_types_by_name['ExportedKeysRequest'] -_INDEXINFOREQUEST = DESCRIPTOR.message_types_by_name['IndexInfoRequest'] -_PREPAREANDEXECUTEREQUEST = DESCRIPTOR.message_types_by_name['PrepareAndExecuteRequest'] -_PREPAREREQUEST = DESCRIPTOR.message_types_by_name['PrepareRequest'] -_FETCHREQUEST = DESCRIPTOR.message_types_by_name['FetchRequest'] -_CREATESTATEMENTREQUEST = DESCRIPTOR.message_types_by_name['CreateStatementRequest'] -_CLOSESTATEMENTREQUEST = DESCRIPTOR.message_types_by_name['CloseStatementRequest'] -_OPENCONNECTIONREQUEST = DESCRIPTOR.message_types_by_name['OpenConnectionRequest'] -_OPENCONNECTIONREQUEST_INFOENTRY = _OPENCONNECTIONREQUEST.nested_types_by_name['InfoEntry'] -_CLOSECONNECTIONREQUEST = DESCRIPTOR.message_types_by_name['CloseConnectionRequest'] -_CONNECTIONSYNCREQUEST = DESCRIPTOR.message_types_by_name['ConnectionSyncRequest'] -_EXECUTEREQUEST = DESCRIPTOR.message_types_by_name['ExecuteRequest'] -_SYNCRESULTSREQUEST = DESCRIPTOR.message_types_by_name['SyncResultsRequest'] -_COMMITREQUEST = DESCRIPTOR.message_types_by_name['CommitRequest'] -_ROLLBACKREQUEST = DESCRIPTOR.message_types_by_name['RollbackRequest'] -_PREPAREANDEXECUTEBATCHREQUEST = DESCRIPTOR.message_types_by_name['PrepareAndExecuteBatchRequest'] -_UPDATEBATCH = DESCRIPTOR.message_types_by_name['UpdateBatch'] -_EXECUTEBATCHREQUEST = DESCRIPTOR.message_types_by_name['ExecuteBatchRequest'] -CatalogsRequest = _reflection.GeneratedProtocolMessageType('CatalogsRequest', (_message.Message,), { - 'DESCRIPTOR' : _CATALOGSREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:CatalogsRequest) - }) -_sym_db.RegisterMessage(CatalogsRequest) - -DatabasePropertyRequest = _reflection.GeneratedProtocolMessageType('DatabasePropertyRequest', (_message.Message,), { - 'DESCRIPTOR' : _DATABASEPROPERTYREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:DatabasePropertyRequest) - }) -_sym_db.RegisterMessage(DatabasePropertyRequest) - -SchemasRequest = _reflection.GeneratedProtocolMessageType('SchemasRequest', (_message.Message,), { - 'DESCRIPTOR' : _SCHEMASREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:SchemasRequest) - }) -_sym_db.RegisterMessage(SchemasRequest) - -TablesRequest = _reflection.GeneratedProtocolMessageType('TablesRequest', (_message.Message,), { - 'DESCRIPTOR' : _TABLESREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:TablesRequest) - }) -_sym_db.RegisterMessage(TablesRequest) - -TableTypesRequest = _reflection.GeneratedProtocolMessageType('TableTypesRequest', (_message.Message,), { - 'DESCRIPTOR' : _TABLETYPESREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:TableTypesRequest) - }) -_sym_db.RegisterMessage(TableTypesRequest) - -ColumnsRequest = _reflection.GeneratedProtocolMessageType('ColumnsRequest', (_message.Message,), { - 'DESCRIPTOR' : _COLUMNSREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:ColumnsRequest) - }) -_sym_db.RegisterMessage(ColumnsRequest) - -TypeInfoRequest = _reflection.GeneratedProtocolMessageType('TypeInfoRequest', (_message.Message,), { - 'DESCRIPTOR' : _TYPEINFOREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:TypeInfoRequest) - }) -_sym_db.RegisterMessage(TypeInfoRequest) - -PrimaryKeysRequest = _reflection.GeneratedProtocolMessageType('PrimaryKeysRequest', (_message.Message,), { - 'DESCRIPTOR' : _PRIMARYKEYSREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:PrimaryKeysRequest) - }) -_sym_db.RegisterMessage(PrimaryKeysRequest) - -ImportedKeysRequest = _reflection.GeneratedProtocolMessageType('ImportedKeysRequest', (_message.Message,), { - 'DESCRIPTOR' : _IMPORTEDKEYSREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:ImportedKeysRequest) - }) -_sym_db.RegisterMessage(ImportedKeysRequest) - -ExportedKeysRequest = _reflection.GeneratedProtocolMessageType('ExportedKeysRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXPORTEDKEYSREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:ExportedKeysRequest) - }) -_sym_db.RegisterMessage(ExportedKeysRequest) - -IndexInfoRequest = _reflection.GeneratedProtocolMessageType('IndexInfoRequest', (_message.Message,), { - 'DESCRIPTOR' : _INDEXINFOREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:IndexInfoRequest) - }) -_sym_db.RegisterMessage(IndexInfoRequest) - -PrepareAndExecuteRequest = _reflection.GeneratedProtocolMessageType('PrepareAndExecuteRequest', (_message.Message,), { - 'DESCRIPTOR' : _PREPAREANDEXECUTEREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:PrepareAndExecuteRequest) - }) -_sym_db.RegisterMessage(PrepareAndExecuteRequest) - -PrepareRequest = _reflection.GeneratedProtocolMessageType('PrepareRequest', (_message.Message,), { - 'DESCRIPTOR' : _PREPAREREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:PrepareRequest) - }) -_sym_db.RegisterMessage(PrepareRequest) - -FetchRequest = _reflection.GeneratedProtocolMessageType('FetchRequest', (_message.Message,), { - 'DESCRIPTOR' : _FETCHREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:FetchRequest) - }) -_sym_db.RegisterMessage(FetchRequest) - -CreateStatementRequest = _reflection.GeneratedProtocolMessageType('CreateStatementRequest', (_message.Message,), { - 'DESCRIPTOR' : _CREATESTATEMENTREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:CreateStatementRequest) - }) -_sym_db.RegisterMessage(CreateStatementRequest) - -CloseStatementRequest = _reflection.GeneratedProtocolMessageType('CloseStatementRequest', (_message.Message,), { - 'DESCRIPTOR' : _CLOSESTATEMENTREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:CloseStatementRequest) - }) -_sym_db.RegisterMessage(CloseStatementRequest) - -OpenConnectionRequest = _reflection.GeneratedProtocolMessageType('OpenConnectionRequest', (_message.Message,), { - - 'InfoEntry' : _reflection.GeneratedProtocolMessageType('InfoEntry', (_message.Message,), { - 'DESCRIPTOR' : _OPENCONNECTIONREQUEST_INFOENTRY, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:OpenConnectionRequest.InfoEntry) - }) - , - 'DESCRIPTOR' : _OPENCONNECTIONREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:OpenConnectionRequest) - }) -_sym_db.RegisterMessage(OpenConnectionRequest) -_sym_db.RegisterMessage(OpenConnectionRequest.InfoEntry) - -CloseConnectionRequest = _reflection.GeneratedProtocolMessageType('CloseConnectionRequest', (_message.Message,), { - 'DESCRIPTOR' : _CLOSECONNECTIONREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:CloseConnectionRequest) - }) -_sym_db.RegisterMessage(CloseConnectionRequest) - -ConnectionSyncRequest = _reflection.GeneratedProtocolMessageType('ConnectionSyncRequest', (_message.Message,), { - 'DESCRIPTOR' : _CONNECTIONSYNCREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:ConnectionSyncRequest) - }) -_sym_db.RegisterMessage(ConnectionSyncRequest) - -ExecuteRequest = _reflection.GeneratedProtocolMessageType('ExecuteRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXECUTEREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:ExecuteRequest) - }) -_sym_db.RegisterMessage(ExecuteRequest) - -SyncResultsRequest = _reflection.GeneratedProtocolMessageType('SyncResultsRequest', (_message.Message,), { - 'DESCRIPTOR' : _SYNCRESULTSREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:SyncResultsRequest) - }) -_sym_db.RegisterMessage(SyncResultsRequest) - -CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), { - 'DESCRIPTOR' : _COMMITREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:CommitRequest) - }) -_sym_db.RegisterMessage(CommitRequest) - -RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), { - 'DESCRIPTOR' : _ROLLBACKREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:RollbackRequest) - }) -_sym_db.RegisterMessage(RollbackRequest) - -PrepareAndExecuteBatchRequest = _reflection.GeneratedProtocolMessageType('PrepareAndExecuteBatchRequest', (_message.Message,), { - 'DESCRIPTOR' : _PREPAREANDEXECUTEBATCHREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:PrepareAndExecuteBatchRequest) - }) -_sym_db.RegisterMessage(PrepareAndExecuteBatchRequest) - -UpdateBatch = _reflection.GeneratedProtocolMessageType('UpdateBatch', (_message.Message,), { - 'DESCRIPTOR' : _UPDATEBATCH, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:UpdateBatch) - }) -_sym_db.RegisterMessage(UpdateBatch) - -ExecuteBatchRequest = _reflection.GeneratedProtocolMessageType('ExecuteBatchRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXECUTEBATCHREQUEST, - '__module__' : 'requests_pb2' - # @@protoc_insertion_point(class_scope:ExecuteBatchRequest) - }) -_sym_db.RegisterMessage(ExecuteBatchRequest) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n org.apache.calcite.avatica.proto' - _OPENCONNECTIONREQUEST_INFOENTRY._options = None - _OPENCONNECTIONREQUEST_INFOENTRY._serialized_options = b'8\001' - _CATALOGSREQUEST._serialized_start=32 - _CATALOGSREQUEST._serialized_end=72 - _DATABASEPROPERTYREQUEST._serialized_start=74 - _DATABASEPROPERTYREQUEST._serialized_end=122 - _SCHEMASREQUEST._serialized_start=124 - _SCHEMASREQUEST._serialized_end=204 - _TABLESREQUEST._serialized_start=207 - _TABLESREQUEST._serialized_end=356 - _TABLETYPESREQUEST._serialized_start=358 - _TABLETYPESREQUEST._serialized_end=400 - _COLUMNSREQUEST._serialized_start=403 - _COLUMNSREQUEST._serialized_end=540 - _TYPEINFOREQUEST._serialized_start=542 - _TYPEINFOREQUEST._serialized_end=582 - _PRIMARYKEYSREQUEST._serialized_start=584 - _PRIMARYKEYSREQUEST._serialized_end=680 - _IMPORTEDKEYSREQUEST._serialized_start=682 - _IMPORTEDKEYSREQUEST._serialized_end=779 - _EXPORTEDKEYSREQUEST._serialized_start=781 - _EXPORTEDKEYSREQUEST._serialized_end=878 - _INDEXINFOREQUEST._serialized_start=881 - _INDEXINFOREQUEST._serialized_end=1012 - _PREPAREANDEXECUTEREQUEST._serialized_start=1015 - _PREPAREANDEXECUTEREQUEST._serialized_end=1176 - _PREPAREREQUEST._serialized_start=1178 - _PREPAREREQUEST._serialized_end=1277 - _FETCHREQUEST._serialized_start=1280 - _FETCHREQUEST._serialized_end=1408 - _CREATESTATEMENTREQUEST._serialized_start=1410 - _CREATESTATEMENTREQUEST._serialized_end=1457 - _CLOSESTATEMENTREQUEST._serialized_start=1459 - _CLOSESTATEMENTREQUEST._serialized_end=1527 - _OPENCONNECTIONREQUEST._serialized_start=1530 - _OPENCONNECTIONREQUEST._serialized_end=1669 - _OPENCONNECTIONREQUEST_INFOENTRY._serialized_start=1626 - _OPENCONNECTIONREQUEST_INFOENTRY._serialized_end=1669 - _CLOSECONNECTIONREQUEST._serialized_start=1671 - _CLOSECONNECTIONREQUEST._serialized_end=1718 - _CONNECTIONSYNCREQUEST._serialized_start=1720 - _CONNECTIONSYNCREQUEST._serialized_end=1809 - _EXECUTEREQUEST._serialized_start=1812 - _EXECUTEREQUEST._serialized_end=2011 - _SYNCRESULTSREQUEST._serialized_start=2013 - _SYNCRESULTSREQUEST._serialized_end=2122 - _COMMITREQUEST._serialized_start=2124 - _COMMITREQUEST._serialized_end=2162 - _ROLLBACKREQUEST._serialized_start=2164 - _ROLLBACKREQUEST._serialized_end=2204 - _PREPAREANDEXECUTEBATCHREQUEST._serialized_start=2206 - _PREPAREANDEXECUTEBATCHREQUEST._serialized_end=2304 - _UPDATEBATCH._serialized_start=2306 - _UPDATEBATCH._serialized_end=2358 - _EXECUTEBATCHREQUEST._serialized_start=2360 - _EXECUTEBATCHREQUEST._serialized_end=2457 -# @@protoc_insertion_point(module_scope) diff --git a/polypheny/avatica/protobuf/responses_pb2.py b/polypheny/avatica/protobuf/responses_pb2.py deleted file mode 100644 index 4925892d..00000000 --- a/polypheny/avatica/protobuf/responses_pb2.py +++ /dev/null @@ -1,196 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: responses.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from . import common_pb2 as common__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fresponses.proto\x1a\x0c\x63ommon.proto\"\xc9\x01\n\x11ResultSetResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x15\n\rown_statement\x18\x03 \x01(\x08\x12\x1d\n\tsignature\x18\x04 \x01(\x0b\x32\n.Signature\x12\x1b\n\x0b\x66irst_frame\x18\x05 \x01(\x0b\x32\x06.Frame\x12\x14\n\x0cupdate_count\x18\x06 \x01(\x04\x12\x1e\n\x08metadata\x18\x07 \x01(\x0b\x32\x0c.RpcMetadata\"q\n\x0f\x45xecuteResponse\x12#\n\x07results\x18\x01 \x03(\x0b\x32\x12.ResultSetResponse\x12\x19\n\x11missing_statement\x18\x02 \x01(\x08\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"V\n\x0fPrepareResponse\x12#\n\tstatement\x18\x01 \x01(\x0b\x32\x10.StatementHandle\x12\x1e\n\x08metadata\x18\x02 \x01(\x0b\x32\x0c.RpcMetadata\"z\n\rFetchResponse\x12\x15\n\x05\x66rame\x18\x01 \x01(\x0b\x32\x06.Frame\x12\x19\n\x11missing_statement\x18\x02 \x01(\x08\x12\x17\n\x0fmissing_results\x18\x03 \x01(\x08\x12\x1e\n\x08metadata\x18\x04 \x01(\x0b\x32\x0c.RpcMetadata\"f\n\x17\x43reateStatementResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"8\n\x16\x43loseStatementResponse\x12\x1e\n\x08metadata\x18\x01 \x01(\x0b\x32\x0c.RpcMetadata\"8\n\x16OpenConnectionResponse\x12\x1e\n\x08metadata\x18\x01 \x01(\x0b\x32\x0c.RpcMetadata\"9\n\x17\x43loseConnectionResponse\x12\x1e\n\x08metadata\x18\x01 \x01(\x0b\x32\x0c.RpcMetadata\"c\n\x16\x43onnectionSyncResponse\x12)\n\nconn_props\x18\x01 \x01(\x0b\x32\x15.ConnectionProperties\x12\x1e\n\x08metadata\x18\x02 \x01(\x0b\x32\x0c.RpcMetadata\"u\n\x17\x44\x61tabasePropertyElement\x12\x1e\n\x03key\x18\x01 \x01(\x0b\x32\x11.DatabaseProperty\x12\x1a\n\x05value\x18\x02 \x01(\x0b\x32\x0b.TypedValue\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"c\n\x18\x44\x61tabasePropertyResponse\x12\'\n\x05props\x18\x01 \x03(\x0b\x32\x18.DatabasePropertyElement\x12\x1e\n\x08metadata\x18\x02 \x01(\x0b\x32\x0c.RpcMetadata\"\xb6\x01\n\rErrorResponse\x12\x12\n\nexceptions\x18\x01 \x03(\t\x12\x16\n\x0ehas_exceptions\x18\x07 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\x12\x1b\n\x08severity\x18\x03 \x01(\x0e\x32\t.Severity\x12\x12\n\nerror_code\x18\x04 \x01(\r\x12\x11\n\tsql_state\x18\x05 \x01(\t\x12\x1e\n\x08metadata\x18\x06 \x01(\x0b\x32\x0c.RpcMetadata\"f\n\x13SyncResultsResponse\x12\x19\n\x11missing_statement\x18\x01 \x01(\x08\x12\x14\n\x0cmore_results\x18\x02 \x01(\x08\x12\x1e\n\x08metadata\x18\x03 \x01(\x0b\x32\x0c.RpcMetadata\"%\n\x0bRpcMetadata\x12\x16\n\x0eserver_address\x18\x01 \x01(\t\"\x10\n\x0e\x43ommitResponse\"\x12\n\x10RollbackResponse\"\x95\x01\n\x14\x45xecuteBatchResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\r\x12\x15\n\rupdate_counts\x18\x03 \x03(\x04\x12\x19\n\x11missing_statement\x18\x04 \x01(\x08\x12\x1e\n\x08metadata\x18\x05 \x01(\x0b\x32\x0c.RpcMetadataB\"\n org.apache.calcite.avatica.protob\x06proto3') - - - -_RESULTSETRESPONSE = DESCRIPTOR.message_types_by_name['ResultSetResponse'] -_EXECUTERESPONSE = DESCRIPTOR.message_types_by_name['ExecuteResponse'] -_PREPARERESPONSE = DESCRIPTOR.message_types_by_name['PrepareResponse'] -_FETCHRESPONSE = DESCRIPTOR.message_types_by_name['FetchResponse'] -_CREATESTATEMENTRESPONSE = DESCRIPTOR.message_types_by_name['CreateStatementResponse'] -_CLOSESTATEMENTRESPONSE = DESCRIPTOR.message_types_by_name['CloseStatementResponse'] -_OPENCONNECTIONRESPONSE = DESCRIPTOR.message_types_by_name['OpenConnectionResponse'] -_CLOSECONNECTIONRESPONSE = DESCRIPTOR.message_types_by_name['CloseConnectionResponse'] -_CONNECTIONSYNCRESPONSE = DESCRIPTOR.message_types_by_name['ConnectionSyncResponse'] -_DATABASEPROPERTYELEMENT = DESCRIPTOR.message_types_by_name['DatabasePropertyElement'] -_DATABASEPROPERTYRESPONSE = DESCRIPTOR.message_types_by_name['DatabasePropertyResponse'] -_ERRORRESPONSE = DESCRIPTOR.message_types_by_name['ErrorResponse'] -_SYNCRESULTSRESPONSE = DESCRIPTOR.message_types_by_name['SyncResultsResponse'] -_RPCMETADATA = DESCRIPTOR.message_types_by_name['RpcMetadata'] -_COMMITRESPONSE = DESCRIPTOR.message_types_by_name['CommitResponse'] -_ROLLBACKRESPONSE = DESCRIPTOR.message_types_by_name['RollbackResponse'] -_EXECUTEBATCHRESPONSE = DESCRIPTOR.message_types_by_name['ExecuteBatchResponse'] -ResultSetResponse = _reflection.GeneratedProtocolMessageType('ResultSetResponse', (_message.Message,), { - 'DESCRIPTOR' : _RESULTSETRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:ResultSetResponse) - }) -_sym_db.RegisterMessage(ResultSetResponse) - -ExecuteResponse = _reflection.GeneratedProtocolMessageType('ExecuteResponse', (_message.Message,), { - 'DESCRIPTOR' : _EXECUTERESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:ExecuteResponse) - }) -_sym_db.RegisterMessage(ExecuteResponse) - -PrepareResponse = _reflection.GeneratedProtocolMessageType('PrepareResponse', (_message.Message,), { - 'DESCRIPTOR' : _PREPARERESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:PrepareResponse) - }) -_sym_db.RegisterMessage(PrepareResponse) - -FetchResponse = _reflection.GeneratedProtocolMessageType('FetchResponse', (_message.Message,), { - 'DESCRIPTOR' : _FETCHRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:FetchResponse) - }) -_sym_db.RegisterMessage(FetchResponse) - -CreateStatementResponse = _reflection.GeneratedProtocolMessageType('CreateStatementResponse', (_message.Message,), { - 'DESCRIPTOR' : _CREATESTATEMENTRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:CreateStatementResponse) - }) -_sym_db.RegisterMessage(CreateStatementResponse) - -CloseStatementResponse = _reflection.GeneratedProtocolMessageType('CloseStatementResponse', (_message.Message,), { - 'DESCRIPTOR' : _CLOSESTATEMENTRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:CloseStatementResponse) - }) -_sym_db.RegisterMessage(CloseStatementResponse) - -OpenConnectionResponse = _reflection.GeneratedProtocolMessageType('OpenConnectionResponse', (_message.Message,), { - 'DESCRIPTOR' : _OPENCONNECTIONRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:OpenConnectionResponse) - }) -_sym_db.RegisterMessage(OpenConnectionResponse) - -CloseConnectionResponse = _reflection.GeneratedProtocolMessageType('CloseConnectionResponse', (_message.Message,), { - 'DESCRIPTOR' : _CLOSECONNECTIONRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:CloseConnectionResponse) - }) -_sym_db.RegisterMessage(CloseConnectionResponse) - -ConnectionSyncResponse = _reflection.GeneratedProtocolMessageType('ConnectionSyncResponse', (_message.Message,), { - 'DESCRIPTOR' : _CONNECTIONSYNCRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:ConnectionSyncResponse) - }) -_sym_db.RegisterMessage(ConnectionSyncResponse) - -DatabasePropertyElement = _reflection.GeneratedProtocolMessageType('DatabasePropertyElement', (_message.Message,), { - 'DESCRIPTOR' : _DATABASEPROPERTYELEMENT, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:DatabasePropertyElement) - }) -_sym_db.RegisterMessage(DatabasePropertyElement) - -DatabasePropertyResponse = _reflection.GeneratedProtocolMessageType('DatabasePropertyResponse', (_message.Message,), { - 'DESCRIPTOR' : _DATABASEPROPERTYRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:DatabasePropertyResponse) - }) -_sym_db.RegisterMessage(DatabasePropertyResponse) - -ErrorResponse = _reflection.GeneratedProtocolMessageType('ErrorResponse', (_message.Message,), { - 'DESCRIPTOR' : _ERRORRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:ErrorResponse) - }) -_sym_db.RegisterMessage(ErrorResponse) - -SyncResultsResponse = _reflection.GeneratedProtocolMessageType('SyncResultsResponse', (_message.Message,), { - 'DESCRIPTOR' : _SYNCRESULTSRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:SyncResultsResponse) - }) -_sym_db.RegisterMessage(SyncResultsResponse) - -RpcMetadata = _reflection.GeneratedProtocolMessageType('RpcMetadata', (_message.Message,), { - 'DESCRIPTOR' : _RPCMETADATA, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:RpcMetadata) - }) -_sym_db.RegisterMessage(RpcMetadata) - -CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), { - 'DESCRIPTOR' : _COMMITRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:CommitResponse) - }) -_sym_db.RegisterMessage(CommitResponse) - -RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), { - 'DESCRIPTOR' : _ROLLBACKRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:RollbackResponse) - }) -_sym_db.RegisterMessage(RollbackResponse) - -ExecuteBatchResponse = _reflection.GeneratedProtocolMessageType('ExecuteBatchResponse', (_message.Message,), { - 'DESCRIPTOR' : _EXECUTEBATCHRESPONSE, - '__module__' : 'responses_pb2' - # @@protoc_insertion_point(class_scope:ExecuteBatchResponse) - }) -_sym_db.RegisterMessage(ExecuteBatchResponse) - -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n org.apache.calcite.avatica.proto' - _RESULTSETRESPONSE._serialized_start=34 - _RESULTSETRESPONSE._serialized_end=235 - _EXECUTERESPONSE._serialized_start=237 - _EXECUTERESPONSE._serialized_end=350 - _PREPARERESPONSE._serialized_start=352 - _PREPARERESPONSE._serialized_end=438 - _FETCHRESPONSE._serialized_start=440 - _FETCHRESPONSE._serialized_end=562 - _CREATESTATEMENTRESPONSE._serialized_start=564 - _CREATESTATEMENTRESPONSE._serialized_end=666 - _CLOSESTATEMENTRESPONSE._serialized_start=668 - _CLOSESTATEMENTRESPONSE._serialized_end=724 - _OPENCONNECTIONRESPONSE._serialized_start=726 - _OPENCONNECTIONRESPONSE._serialized_end=782 - _CLOSECONNECTIONRESPONSE._serialized_start=784 - _CLOSECONNECTIONRESPONSE._serialized_end=841 - _CONNECTIONSYNCRESPONSE._serialized_start=843 - _CONNECTIONSYNCRESPONSE._serialized_end=942 - _DATABASEPROPERTYELEMENT._serialized_start=944 - _DATABASEPROPERTYELEMENT._serialized_end=1061 - _DATABASEPROPERTYRESPONSE._serialized_start=1063 - _DATABASEPROPERTYRESPONSE._serialized_end=1162 - _ERRORRESPONSE._serialized_start=1165 - _ERRORRESPONSE._serialized_end=1347 - _SYNCRESULTSRESPONSE._serialized_start=1349 - _SYNCRESULTSRESPONSE._serialized_end=1451 - _RPCMETADATA._serialized_start=1453 - _RPCMETADATA._serialized_end=1490 - _COMMITRESPONSE._serialized_start=1492 - _COMMITRESPONSE._serialized_end=1508 - _ROLLBACKRESPONSE._serialized_start=1510 - _ROLLBACKRESPONSE._serialized_end=1528 - _EXECUTEBATCHRESPONSE._serialized_start=1531 - _EXECUTEBATCHRESPONSE._serialized_end=1680 -# @@protoc_insertion_point(module_scope) diff --git a/polypheny/connection.py b/polypheny/connection.py index e1443cac..aaa22f18 100644 --- a/polypheny/connection.py +++ b/polypheny/connection.py @@ -1,158 +1,339 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project +# Copyright 2024 The Polypheny Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - - -import uuid -import weakref -import logging as logger - -from polypheny.cursor import PolyphenyCursor -from polypheny.avatica.client import OPEN_CONNECTION_PROPERTIES -from polypheny.auth import AuthManager -from polypheny.environment import (CLIENT_NAME, CLIENT_VERSION,PLATFORM,OPERATING_SYSTEM,POLYPHENY_CONNECTOR_VERSION,PYTHON_VERSION) - -from polypheny.errors import * - -__all__ = ['Connection'] - -# According to https://www.python.org/dev/peps/pep-0249/#paramstyle -SUPPORTED_PARAMSTYLES = { - "qmark", - "numeric", - "named", - "format", - "pyformat", -} - - - - -class PolyphenyConnection(object): - """Implementation of the connection object for Polypheny-DB. - - Use connect(..) to get the object. - - You should not construct this object manually, use :func:`~polypheny.connect` instead. - - Attributes: - session_id: The session ID of the connection. - user: The user name used in the connection. - host: The host name the connection attempts to connect to. - port: The port to communicate with on the host - """ +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Any + +from org.polypheny.prism import relational_frame_pb2 +from polypheny import rpc +from polypheny.exceptions import * +from polypheny.serialize import * + + +class Connection: + def __init__(self, address, username, password, transport, kwargs): + self.cursors = set() + self.con = None # Needed so destructor works + + try: + self.con = rpc.Connection(address, transport, kwargs) + except ConnectionRefusedError: + raise Error("Connection refused") from None + + try: + resp = self.con.connect(username, password, False) + if not resp.is_compatible: + raise Error( + f"Client ({rpc.POLYPHENY_API_MAJOR}.{rpc.POLYPHENY_API_MINOR}) is incompatible with Server version ({resp.major_api_version}.{resp.minor_api_version})") + except Exception as e: + # This manual dance prevents the disconnect message from being sent + self.con.con.close() + self.con.con = None + self.con = None + raise Error(str(e)) + + def cursor(self): + if self.con is None: + raise ProgrammingError('Connection is closed') + cur = Cursor(self) + self.cursors.add(cur) + return cur - def __init__(self, client, **kwargs): + def commit(self): + """ + .. Note:: - self._client = client - self._closed = False + Performing a DDL automatically commits the transaction. + See the :py:meth:`rollback` method for an example what this + means. + """ + if self.con is None: + raise ProgrammingError('Connection is closed') + self.con.commit() - logger.info( - "Polypheny Connector for Python Version: %s, " - "Python Version: %s, Platform: %s", - POLYPHENY_CONNECTOR_VERSION, - PYTHON_VERSION, - PLATFORM, - ) + def rollback(self): + """ + .. Note:: + + It is not possible to rollback DDLs, as they commit automatically. + + >>> cur.execute('SELECT * FROM fruits WHERE name = ?', ('Pear',)) + >>> cur.fetchone() + >>> cur.execute('INSERT INTO fruits (id, name) VALUES (2, ?)', ('Pear',)) + >>> cur.execute('CREATE TABLE demo(id INTEGER PRIMARY KEY)') + >>> # Implicit commit here because of DDL + >>> con.rollback() + >>> cur.execute('SELECT name FROM fruits WHERE name = ?', ('Pear',)) + >>> print(cur.fetchone()) + ['Pear'] + """ + if self.con is None: + raise ProgrammingError('Connection is closed') + self.con.rollback() - self._cursors = [] + def __del__(self): + # TODO Thread-safety? + self.close() - # Extract properties to pass to OpenConnectionRequest - self._connection_args = {} - # The rest of the kwargs - self._filtered_args = {} - for k in kwargs: - if k in OPEN_CONNECTION_PROPERTIES: - self._connection_args[k] = kwargs[k] + def close(self): + if self.con is None: + assert len(self.cursors) == 0 + return + + for cur in list(self.cursors): # self.cursors is materialized because cur.close modifies it + cur.close() + assert len(self.cursors) == 0 + + try: + self.rollback() + finally: + self.con.close() + self.con = None + + +class ResultCursor: + def __init__(self, con, statement_id, frame, fetch_size): + self.con = con + self.statement_id = statement_id + self.closed = False + self.frame = frame + self.fetch_size = fetch_size + if frame is not None: + restype = self.frame.WhichOneof('result') + assert restype is not None + if restype == 'relational_frame': + self.rows = iter(self.frame.relational_frame.rows) + elif restype == 'document_frame': + self.rows = iter(self.frame.document_frame.documents) + elif restype == 'graph_frame': + graph_frame = self.frame.graph_frame + if len(graph_frame.elements) > 0: + self.rows = iter(self.frame.graph_frame.element) + return else: - self._filtered_args[k] = kwargs[k] - - - - logger.debug("Trying to connect to URL:'" + str(self._client.url) + "'" ) - self.open() - - - - def open(self): - """Effectiviely opens the connection.""" - self._id = str(uuid.uuid4()) - logger.debug("New connection with id: ", self._id) - self._client.open_connection(self._id, info=self._connection_args) - + self.closed = True + raise NotImplementedError(f'Resultset of type {restype} is not implemented') + def __del__(self): + assert self.closed + self.close() def close(self): - """Closes the connection. - No further operations are allowed, either on the connection or any - of its cursors, once the connection is closed. + if self.closed: + return + assert self.con.con is not None + try: + self.con.con.close_statement(self.statement_id) + finally: + self.con = None + self.closed = True + + def __next__(self): + # frame is None when there were no results + if self.frame is None: + raise Error("Previous statement did not produce any results") + + assert self.rows is not None + + try: + return next(self.rows) + except StopIteration: + if self.frame.is_last: + raise + return self.nextframe() + + def nextframe(self): + self.frame = self.con.con.fetch(self.statement_id, self.fetch_size) + self.rows = iter(self.frame.relational_frame.rows) # TODO result must not be relational + return next(self.rows) # TODO: What happens if this returns StopIteration, but another frame could be fetched? + +class Cursor: + def __init__(self, con): + self.con = con + self.result = None + self.reset() + + def reset(self): + if self.result is not None: + self.result.close() + self.description = None + self.rowcount = -1 + self.arraysize = 1 + self.result = None + + # def callproc(self): + # optional + + def __del__(self): + self.close() - If the connection is used in a ``with`` statement, this method will - be automatically called at the end of the ``with`` block. + def close(self): + # TODO: Error when already closed? + assert self.con is not None or self.result is None + if self.con is not None: + if self.result is not None: + self.result.close() + self.result = None + self.con.cursors.remove(self) + self.con = None + + def __iter__(self): + return self + + def __next__(self): + n = self.fetchone() + if n is None: + raise StopIteration + return n + + def derive_description(self, relframe): + self.description = [] + for column in relframe.column_meta: + self.description.append( + (column.column_label, None, None, None, None, column.precision, column.scale, column.is_nullable)) + + def execute(self, query: str, params: List[Any] = None, *, fetch_size: int = None): """ - if self._closed: - raise ProgrammingError('the connection is already closed') - for cursor_ref in self._cursors: - cursor = cursor_ref() - if cursor is not None and not cursor._closed: - cursor.close() - - # TODO check for open transactions and then call self.rollback() - - self._client.close_connection(self._id) - self._client.close() - self._closed = True - - - - def is_closed(self): - """Checks whether the connection has been closed.""" - return self._closed is None - - - - def commit(self): - """Commits the current transaction.""" - self._client.commit(self._id) - - if self._closed: - raise ProgrammingError('the connection is already closed') - - - - def rollback(self): - """Rolls back the current transaction.""" - self._client.rollback(self._id) - - if self._closed: - raise ProgrammingError('the connection is already closed') + Executes a SQL query. + """ + return self.executeany('sql', query, params, fetch_size=fetch_size) + def executemany(self, query: str, params: List[List[Any]]): + """ + Execute `query` once with each item in `params` as parameters. + """ + # TODO: Optimize, this is to exercise the execute code more + for param in params: + self.execute(query, param) + def executeany(self, lang: str, query: str, params: List[Any] = None, *, + fetch_size: int = None, namespace: str = None): + """ + This method is used to query Polypheny in any of the supported + languages. Dynamic parameter substitution is language + specific - def cursor(self, **kwargs): - if self._closed: - raise ProgrammingError('the connection is already closed') + :param lang: + :param query: + :param params: + :param namespace: Sets the default namespace for the query. - cursor = PolyphenyCursor(self,**kwargs) - self._cursors.append(weakref.ref(cursor, self._cursors.remove)) + .. Note:: - return cursor + Queries returning graphs are not supported yet. + To query Polypheny using the MongoQL: + >>> cur.executeany('mongo', 'db.fruits.find({"id": 1})') + >>> print(cur.fetchone()) + {'id': 1, 'name': 'Orange'} + """ - + if self.con is None: + raise Error("Cursor is closed") + + self.reset() + + if params is None: # Unparameterized query + r = self.con.con.execute_unparameterized_statement(lang, query, fetch_size, namespace) + assert r.HasField("result") # Is this always true? + statement_id = r.statement_id + result = r.result + elif type(params) == list or type(params) == tuple: + resp = self.con.con.prepare_indexed_statement(lang, query, namespace) + statement_id = resp.statement_id + result = self.con.con.execute_indexed_statement(statement_id, params, fetch_size) + elif type(params) == dict: + resp = self.con.con.prepare_named_statement(lang, query, namespace) + statement_id = resp.statement_id + result = self.con.con.execute_named_statement(statement_id, params, fetch_size) + else: + raise Error("Unexpected type for params " + str(type(params))) + + if result.HasField( + "frame"): # TODO Better Error when one of the fetch* methods is invoked. Empty fake result? + self.rowcount = -1 + if result.frame.WhichOneof('result') == 'relational_frame': + self.derive_description(result.frame.relational_frame) + frame = result.frame + else: + self.rowcount = result.scalar + frame = None + + self.result = ResultCursor(self.con, statement_id, frame, fetch_size) + + def fetchone(self): + if self.con is None: + raise ProgrammingError("Cursor is closed") + + if self.result is None: + raise ProgrammingError("No statement was yet executed") + + try: + n = next(self.result) + except StopIteration: + return None + + if isinstance(n, relational_frame_pb2.Row): + v = [] + for value in n.values: + v.append(proto2py(value)) + return v + elif isinstance(n, value_pb2.ProtoDocument): + value = value_pb2.ProtoValue() + value.document.CopyFrom(n) + return proto2py(value) + elif isinstance(n, value_pb2.ProtoNode): + value = n + return proto_node2py(value) + elif isinstance(n, value_pb2.ProtoEdge): + value = n + return proto_edge2py(value) + elif isinstance(n, value_pb2.ProtoPath): + raise Error("Paths are not supported yet.") + else: + raise Error(f"Unknown result of type {type(n)}") + + def fetchmany(self, size=None): + # TODO: Optimize, this is to exercise the fetch code more + if size is None: + size = self.arraysize + results = [] + for _ in range(size): + row = self.fetchone() + if row is None: + break + results.append(row) + return results + + def fetchall(self): + results = [] + while True: + row = self.fetchone() + if row is None: + break + results.append(row) + return results + + # optional + # def nextset(self): + # pass + + def setinputsizes(self, sizes): + """ This method is a no-op. """ + pass # We are free to do nothing + + def setoutputsize(self, sizes, column=None): + """ This method is a no-op """ + pass # We are free to do nothing diff --git a/polypheny/cursor.py b/polypheny/cursor.py deleted file mode 100644 index 90edfb0f..00000000 --- a/polypheny/cursor.py +++ /dev/null @@ -1,353 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging as logger -from polypheny.errors import * -from polypheny.types import TypeHelper -from polypheny.avatica.protobuf import common_pb2 - -from typing import ( - Any, - Dict, - Tuple, - Type, - Union, -) - -__all__ = ['PolyphenyCursor'] - -# Default configs, tuple of default variable and accepted types -DEFAULT_CURSOR_CONFIGURATION: Dict[str, Tuple[Any, Union[Type, Tuple[Type, ...]]]] = { - "language": ("sql", str), # standard could be extended with MQL. CQL, Cypher etc. -} - -class PolyphenyCursor: - """Implementation of database cursor object that is returned from Connection.cursor() method. - - You should not construct this object manually! - """ - - arraysize = 1 - """ - Read/write attribute specifying the number of rows to fetch - at a time with :meth:`fetchmany`. It defaults to 1 meaning to - fetch a single row at a time. - """ - - itersize = 2000 - - def __init__(self, connection, id=None): - self._connection = connection - self._id = id - self._signature = None - self._column_data_types = [] - self._frame = None - self._pos = None - self._closed = False - self.arraysize = self.__class__.arraysize - self.itersize = self.__class__.itersize - self._updatecount = -1 - - def __del__(self): - if not self._connection._closed and not self._closed: - self.close() - - - def __enter__(self): - return self - - - def __exit__(self, exc_type, exc_value, traceback): - if not self._closed: - self.close() - - - def __iter__(self): - return self - - - def __next__(self): - row = self.fetchone() - if row is None: - raise StopIteration - return row - - next = __next__ - - - - def close(self): - """Closes the cursor. - No further operations are allowed once the cursor is closed. - - If the cursor is used in a ``with`` statement, this method will - be automatically called at the end of the ``with`` block. - """ - if self._closed: - raise ProgrammingError('the cursor is already closed') - - if self._id is not None: - self._connection._client.close_statement(self._connection._id, self._id) - self._id = None - - self._signature = None - self._column_data_types = [] - self._frame = None - self._pos = None - self._closed = True - - - - @property - def is_closed(self): - """Read-only attribute specifying if the cursor is closed or not.""" - return self._closed - - - - def execute(self, command, parameters=None): - if self._closed: - raise ProgrammingError('the cursor is already closed') - - command = command.strip(" \t\n\r") if command else None - logger.debug("Executing command: ", command) - - if not command: - logger.warning("execute: no query is given to execute") - - if parameters is None: - if self._id is None: - self._set_id(self._connection._client.create_statement(self._connection._id)) - results = self._connection._client.prepare_and_execute( - self._connection._id, self._id, - command, first_frame_max_size=self.itersize) - - else: - statement = self._connection._client.prepare( - self._connection._id, command) - self._set_id(statement.id) - self._set_signature(statement.signature) - - results = self._connection._client.execute( - self._connection._id, self._id, - statement.signature, self._transform_parameters(parameters), - first_frame_max_size=self.itersize) - - self._process_results(results) - - - - def executemany(self, operation, seq_of_parameters): - if self._closed: - raise ProgrammingError('the cursor is already closed') - - self._updatecount = -1 - self._set_frame(None) - statement = self._connection._client.prepare( - self._connection._id, operation, max_rows_total=0) - - self._set_id(statement.id) - self._set_signature(statement.signature) - - for parameters in seq_of_parameters: - self._connection._client.execute( - self._connection._id, self._id, - statement.signature, self._transform_parameters(parameters), - first_frame_max_size=0) - - - - def fetchone(self): - if self._frame is None: - raise ProgrammingError('no select statement was executed') - - if self._pos is None: - return None - - rows = self._frame.rows - row = self._transform_row(rows[self._pos]) - self._pos += 1 - - if self._pos >= len(rows): - self._pos = None - if not self._frame.done: - self._fetch_next_frame() - - return row - - - def fetchmany(self, size=None): - if size is None: - size = self.arraysize - - rows = [] - - while size > 0: - row = self.fetchone() - - if row is None: - break - rows.append(row) - size -= 1 - - return rows - - - - def fetchall(self): - rows = [] - - while True: - row = self.fetchone() - - if row is None: - break - rows.append(row) - - return rows - - - - def _set_signature(self, signature): - self._signature = signature - self._column_data_types = [] - self._parameter_data_types = [] - if signature is None: - return - - for column in signature.columns: - dtype = TypeHelper.from_class(column.column_class_name) - self._column_data_types.append(dtype) - - '''for parameter in signature.parameters: - dtype = TypeHelper.from_class(parameter.class_name) - self._parameter_data_types.append(dtype) - ''' - - - - - def _set_frame(self, frame): - self._frame = frame - self._pos = None - - if frame is not None: - if frame.rows: - self._pos = 0 - elif not frame.done: - raise InternalError('got an empty frame, but the statement is not done yet') - - - - def _fetch_next_frame(self): - offset = self._frame.offset + len(self._frame.rows) - frame = self._connection._client.fetch( - self._connection._id, self._id, - offset=offset, frame_max_size=self.itersize) - - self._set_frame(frame) - - - - def _process_results(self, results): - if results: - result = results[0] - - if result.own_statement: - self._set_id(result.statement_id) - - # First Frame is currently skipped due to BUG described in: - # https://github.com/polypheny/Polypheny-DB/blame/0a51f433440e4e6086c66da19e5f4f85cac1995e/jdbc-interface/src/main/java/org/polypheny/db/jdbc/DbmsMeta.java#L1293 - # Therefore we have to immediately execute another feth operation - if result.HasField('first_frame'): - frame = result.first_frame - else: - # Needed for DQL only (SELECT, etc.) - if result.HasField('signature'): - frame = self._connection._client.fetch( - self._connection._id, self._id, - offset=0, frame_max_size=self.itersize) - - # For Non-DQL (DDL,DML,etc.) - else: - frame = None - - self._set_signature(result.signature if result.HasField('signature') else None) - self._set_frame(frame) - - self._updatecount = result.update_count - - - - def _transform_row(self, row): - """Transforms a Row into Python values. - :param row: - A ``common_pb2.Row`` object. - :returns: - A list of values casted into the correct Python types. - :raises: - NotImplementedError - """ - tmp_row = [] - - for i, column in enumerate(row.value): - - if column.has_array_value: - raise NotImplementedError('array types are not supported') - elif column.scalar_value.null: - tmp_row.append(None) - else: - field_name, rep, mutate_to, cast_from = self._column_data_types[i] - - # get the value from the field_name - value = getattr(column.scalar_value, field_name) - - # cast the value - if cast_from is not None: - value = cast_from(value) - - tmp_row.append(value) - - return tmp_row - - - def _transform_parameters(self, parameters): - typed_parameters = [] - for value, data_type in zip(parameters, self._parameter_data_types): - field_name, rep, mutate_to, cast_from = data_type - typed_value = common_pb2.TypedValue() - - if value is None: - typed_value.null = True - typed_value.type = common_pb2.NULL - else: - typed_value.null = False - - # use the mutator function - if mutate_to is not None: - value = mutate_to(value) - - typed_value.type = rep - setattr(typed_value, field_name, value) - - typed_parameters.append(typed_value) - return typed_parameters - - def _set_id(self, id): - if self._id is not None and self._id != id: - self._connection._client.close_statement(self._connection._id, self._id) - self._id = id diff --git a/polypheny/environment.py b/polypheny/environment.py deleted file mode 100644 index d1bb5283..00000000 --- a/polypheny/environment.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Various constants.""" - -import platform -import sys - -from .version import VERSION - -POLYPHENY_CONNECTOR_VERSION = ".".join(str(v) for v in VERSION[0:3]) -PYTHON_VERSION = ".".join(str(v) for v in sys.version_info[:3]) -OPERATING_SYSTEM = platform.system() -PLATFORM = platform.platform() - -CLIENT_NAME = "PolyphenyPythonConnector" # don't change! -CLIENT_VERSION = ".".join([str(v) for v in VERSION[:3]]) \ No newline at end of file diff --git a/polypheny/errors.py b/polypheny/errors.py deleted file mode 100644 index 4012af62..00000000 --- a/polypheny/errors.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# According do DB API 2.0 : https://www.python.org/dev/peps/pep-0249/#exceptions - -__all__ = [ - 'Warning', 'Error', 'InterfaceError', 'DatabaseError', 'DataError', - 'OperationalError', 'IntegrityError', 'InternalError', - 'ProgrammingError', 'NotSupportedError', -] - - -_StandardError = Exception - - -class Warning(_StandardError): - """Not yes used by this package, only defined for compatibility - with DB API 2.0.""" - - -class Error(_StandardError): - """Exception that is the base class of all other error exceptions. - You can use this to catch all errors with one single except statement.""" - - def __init__(self, message, code=None, sqlstate=None, cause=None): - super(_StandardError, self).__init__(message, code, sqlstate, cause) - - @property - def message(self): - return self.args[0] - - @property - def code(self): - return self.args[1] - - @property - def sqlstate(self): - return self.args[2] - - @property - def cause(self): - return self.args[3] - - -class InterfaceError(Error): - """Exception raised for errors that are related to the database - interface rather than the database itself.""" - - -class DatabaseError(Error): - """Exception raised for errors that are related to the database.""" - - -class DataError(DatabaseError): - """Exception raised for errors that are due to problems with the - processed data like division by zero, numeric value out of range, - etc.""" - - -class OperationalError(DatabaseError): - """Raised for errors that are related to the database's operation and not - necessarily under the control of the programmer, e.g. an unexpected - disconnect occurs, the data source name is not found, a transaction could - not be processed, a memory allocation error occurred during - processing, etc.""" - - -class IntegrityError(DatabaseError): - """Raised when the relational integrity of the database is affected, e.g. a foreign key check fails.""" - - -class InternalError(DatabaseError): - """Raised when the database encounters an internal problem.""" - - -class ProgrammingError(DatabaseError): - """Raises for programming errors, e.g. table not found, syntax error, etc.""" - - -class NotSupportedError(DatabaseError): - """Raised when using an API that is not supported by the database.""" diff --git a/polypheny/exceptions.py b/polypheny/exceptions.py new file mode 100644 index 00000000..5dc7911e --- /dev/null +++ b/polypheny/exceptions.py @@ -0,0 +1,52 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class Warning(Exception): + pass + + +class Error(Exception): + pass + + +class InterfaceError(Error): + pass + + +class DatabaseError(Error): + pass + + +class DataError(DatabaseError): + pass + + +class OperationalError(DatabaseError): + pass + + +class IntegrityError(DatabaseError): + pass + + +class InternalError(DatabaseError): + pass + + +class ProgrammingError(DatabaseError): + pass + + +class NotSupportedError(DatabaseError): + pass diff --git a/polypheny/interval.py b/polypheny/interval.py new file mode 100644 index 00000000..2fea24a2 --- /dev/null +++ b/polypheny/interval.py @@ -0,0 +1,37 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def simple_plural(count, word): + if count == 1: + return word + else: + return word + "s" + + +def simple_str(count, word): + return f"{count} {simple_plural(count, word)}" + + +class IntervalMonthMilliseconds: + def __init__(self, months, milliseconds): + #: The number of months + self.months = months + self.milliseconds = milliseconds + + def __eq__(self, other): + return isinstance(other, + IntervalMonthMilliseconds) and self.months == other.months and self.milliseconds == other.milliseconds + + def __str__(self): + return simple_str(self.months, "month") + " and " + simple_str(self.milliseconds, "millisecond") diff --git a/polypheny/rpc.py b/polypheny/rpc.py new file mode 100644 index 00000000..4e1d542c --- /dev/null +++ b/polypheny/rpc.py @@ -0,0 +1,238 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import socket + +from polypheny.exceptions import Error +from polypheny.serialize import * +from org.polypheny.prism import protointerface_pb2 +from org.polypheny.prism import statement_requests_pb2 +from org.polypheny.prism import transaction_requests_pb2 +from org.polypheny.prism import connection_requests_pb2 +from org.polypheny.prism import version + +POLYPHENY_API_MAJOR = version.MAJOR_VERSION +POLYPHENY_API_MINOR = version.MINOR_VERSION + + +class PlainTransport: + VERSION = "plain-v1@polypheny.com" + + def __init__(self, address): + self.con = socket.create_connection(address) + self.con.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + self.exchange_version(self.VERSION) + + def exchange_version(self, version): + bl = self.con.recv(1) + if len(bl) != 1: + raise EOFError + n = int.from_bytes(bl, byteorder='little') + if n > 127: + raise Error("Invalid version length") + remote_version = self.con.recv(n) + if remote_version[-1] != 0x0a: + raise Error("Invalid version message") + + if remote_version[0:-1] != version.encode(): + raise Error(f"Unsupported version: {repr(remote_version[0:-1])} expected {version.encode()}") + + self.con.sendall(bl + remote_version) + + def send_msg(self, serialized): + n = len(serialized) + bl = n.to_bytes(length=8, byteorder='little') + self.con.sendall(bl + serialized) + + def recv_msg(self): + bl = self.con.recv(8) + n = int.from_bytes(bl, 'little') + raw = self.con.recv(n) + if len(raw) != n: + raise EOFError + return raw + + def close(self): + if self.con is not None: + self.con.close() + self.con = None + + +class UnixTransport(PlainTransport): + VERSION = "unix-v1@polypheny.com" + + def __init__(self, path): + self.con = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + if path is None: + path = os.path.expanduser("~/.polypheny/polypheny-prism.sock") + self.con.connect(path) + self.exchange_version(self.VERSION) + + +class Connection: + def __init__(self, address, transport, kwargs): + if transport == "plain": + self.con = PlainTransport(address) + elif transport == "unix": + self.con = UnixTransport(address) + else: + raise Exception("Unknown transport: " + transport) + self.id = 1 + + def close(self): + if self.con is None: + return + try: + self.disconnect() + except Exception as e: + pass + + try: + self.con.close() + except Exception: + pass + self.con = None + + def new_request(self): + msg = protointerface_pb2.Request() + msg.id = self.id + self.id += 1 + return msg + + def send_msg(self, msg): + self.con.send_msg(msg.SerializeToString()) + + def recv_msg(self): + r = protointerface_pb2.Response() + r.ParseFromString(self.con.recv_msg()) + if r.WhichOneof('type') == 'error_response': + # TODO: Add to error_response something to decide if this is necessary + # self.con.close() + # self.con = None + raise Error(r.error_response.message) + return r + + def call(self, msg): + self.send_msg(msg) + response = self.recv_msg() + assert response.id == msg.id + assert response.last + return response + + def connect(self, username, password, auto_commit): + msg = self.new_request() + req = msg.connection_request + if username is not None: + req.username = username + if password is not None: + req.password = password + req.major_api_version = POLYPHENY_API_MAJOR + req.minor_api_version = POLYPHENY_API_MINOR + connection_properties = { + "auto_commit": "true" if auto_commit else "false" + } + req.properties.update(connection_properties) + req.features = [] # features for the feature vector would be set here + return self.call(msg).connection_response + + def disconnect(self): + msg = self.new_request() + req = connection_requests_pb2.DisconnectRequest() + msg.disconnect_request.MergeFrom(req) + + return self.call(msg).disconnect_response + + def commit(self): + msg = self.new_request() + req = transaction_requests_pb2.CommitRequest() + msg.commit_request.MergeFrom(req) + + return self.call(msg).commit_response + + def rollback(self): + msg = self.new_request() + req = transaction_requests_pb2.RollbackRequest() + msg.rollback_request.MergeFrom(req) + return self.call(msg).rollback_response + + def execute_unparameterized_statement(self, language_name, statement, fetch_size, namespace): + msg = self.new_request() + req = statement_requests_pb2.ExecuteUnparameterizedStatementRequest() + req.language_name = language_name + req.statement = statement + if fetch_size: + req.fetch_size = fetch_size + if namespace: + req.namespace_name = namespace + + msg.execute_unparameterized_statement_request.MergeFrom(req) + + self.send_msg(msg) + self.recv_msg() + r = self.recv_msg() + assert r.id == msg.id + assert r.last + return r.statement_response + + def prepare_indexed_statement(self, language_name, statement, namespace): + msg = self.new_request() + req = msg.prepare_indexed_statement_request + req.language_name = language_name + req.statement = statement + if namespace: + req.namespace_name = namespace + return self.call(msg).prepared_statement_signature + + def execute_indexed_statement(self, statement_id, params, fetch_size): + msg = self.new_request() + req = msg.execute_indexed_statement_request + req.statement_id = statement_id + req.parameters.parameters.extend(list(map(py2proto, params))) + if fetch_size: + req.fetch_size = fetch_size + return self.call(msg).statement_result + + def prepare_named_statement(self, language_name, statement, namespace): + msg = self.new_request() + req = msg.prepare_named_statement_request + req.language_name = language_name + req.statement = statement + if namespace: + req.namespace_name = namespace + return self.call(msg).prepared_statement_signature + + def execute_named_statement(self, statement_id, params, fetch_size): + msg = self.new_request() + req = msg.execute_named_statement_request + req.statement_id = statement_id + if fetch_size: + req.fetch_size = fetch_size + for k, v in params.items(): + py2proto(v, req.parameters.parameters[k]) + return self.call(msg).statement_result + + def fetch(self, statement_id, fetch_size): + msg = self.new_request() + req = msg.fetch_request + req.statement_id = statement_id + if fetch_size: + req.fetch_size = fetch_size + + return self.call(msg).frame + + def close_statement(self, statement_id): + msg = self.new_request() + msg.close_statement_request.statement_id = statement_id + return self.call(msg).close_statement_response diff --git a/polypheny/serialize.py b/polypheny/serialize.py new file mode 100644 index 00000000..55cc9ca7 --- /dev/null +++ b/polypheny/serialize.py @@ -0,0 +1,162 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import decimal +from functools import reduce + +from org.polypheny.prism import value_pb2 + +import polypheny.interval as interval + + +def serialize_big_decimal(v, value): + sign, digits, exponent = value.as_tuple() + sign = -2 * sign + 1 + unscaled = sign * reduce(lambda r, d: r * 10 + d, digits) + l = unscaled.bit_length() + 1 # add sign bit + n = (l + 8) >> 3 + v.big_decimal.unscaled_value = unscaled.to_bytes(n, byteorder='big', signed=True) + v.big_decimal.scale = -exponent + + +# See ProtoValueDeserializer +def py2proto(value, v=None): + if v is None: + v = value_pb2.ProtoValue() + if type(value) is bool: + v.boolean.boolean = value + elif type(value) is int: + if -2 ** 31 <= value <= 2 ** 31 - 1: + v.integer.integer = value + elif -2 ** 63 <= value <= 2 ** 63 - 1: + v.long.long = value + else: + serialize_big_decimal(v, decimal.Decimal(value)) + elif type(value) is float: + # TODO: Always use decimal? + v.double.double = value + elif type(value) is decimal.Decimal: + serialize_big_decimal(v, value) + elif type(value) is datetime.date: + diff = value - datetime.date(1970, 1, 1) + v.date.date = diff.days + elif type(value) is datetime.time: + v.time.time = (value.hour * 3600 + value.minute * 60 + value.second) * 1000 + value.microsecond * 10 + elif type(value) is datetime.datetime: + v.timestamp.timestamp = int(value.timestamp() * 1000) + elif type(value) is str: + v.string.string = value + elif type(value) is bytes: + v.binary.binary = value + elif value is None: + v.null.CopyFrom(value_pb2.ProtoNull()) + elif type(value) is list: + for element in value: + v.list.values.append(py2proto(element)) + else: + raise NotImplementedError + + return v + + +def parse_big_decimal(value): + raw = value.unscaled_value + scale = value.scale + n = int.from_bytes(raw, byteorder='big', signed=True) + sign = 0 + if n < 0: + sign = 1 + n = -n + return decimal.Decimal((sign, tuple(map(int, str(n))), -scale)) + + +def proto2py(value): + name = value.WhichOneof("value") + assert name is not None + if name == "boolean": + return value.boolean.boolean + elif name == "integer": + return value.integer.integer + elif name == "long": + return value.long.long + elif name == "big_decimal": + return parse_big_decimal(value.big_decimal) + elif name == "float": + return value.float.float + elif name == "double": + return value.double.double + elif name == "date": + return datetime.date(1970, 1, 1) + datetime.timedelta(days=value.date.date) + elif name == "time": + t = value.time.time + millis = t % 1000 + t = t / 1000 + hour = int(t / 3600) + t = t % 3600 + minute = int(t / 60) + t = t % 60 + second = int(t) + return datetime.time(hour, minute, second, microsecond=int(millis * 1000)) + elif name == "timestamp": + return datetime.datetime.fromtimestamp(value.timestamp.timestamp / 1000, datetime.timezone.utc) + elif name == "interval": + return interval.IntervalMonthMilliseconds(value.interval.months, value.interval.milliseconds) + elif name == "string": + return value.string.string + elif name == "binary": + return value.binary.binary + elif name == "null": + return None + elif name == "list": + return list(map(lambda e: proto2py(e), value.list.values)) + elif name == "document": + res = {} + for entry in value.document.entries: + k = proto2py(entry.key) + assert isinstance(k, str) # TODO: Correct? + v = proto2py(entry.value) + res[k] = v + return res + else: + raise RuntimeError("Unhandled value type") + + +def proto_node2py(proto_node): + properties_dict = {} + for entry in proto_node.properties: + properties_dict[proto2py(entry.key)] = proto2py(entry.value) + + return { + 'id': proto_node.id, + 'name': proto_node.name, + 'properties': properties_dict, + 'labels': list(proto_node.labels) + } + + +def proto_edge2py(proto_edge): + properties_dict = {} + for entry in proto_edge.properties: + properties_dict[proto2py(entry.key)] = proto2py(entry.value) + + return { + 'id': proto_edge.id, + 'name': proto_edge.name, + 'properties': properties_dict, + 'labels': list(proto_edge.labels), + 'source': proto_edge.source, + 'target': proto_edge.target, + 'direction': proto_edge.direction + } diff --git a/polypheny/types.py b/polypheny/types.py deleted file mode 100644 index 8028f4ff..00000000 --- a/polypheny/types.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import sys -import time -import datetime -from decimal import Decimal -from polypheny.avatica.protobuf import common_pb2 - -__all__ = [ - 'Date', 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks', - 'Binary', 'STRING', 'BINARY', 'NUMBER', 'DATETIME', 'ROWID', 'BOOLEAN', - 'JAVA_CLASSES', 'JAVA_CLASSES_MAP', 'TypeHelper', -] - - -def Date(year, month, day): - """Constructs an object holding a date value.""" - return datetime.date(year, month, day) - - -def Time(hour, minute, second): - """Constructs an object holding a time value.""" - return datetime.time(hour, minute, second) - - -def Timestamp(year, month, day, hour, minute, second): - """Constructs an object holding a datetime/timestamp value.""" - return datetime.datetime(year, month, day, hour, minute, second) - - -def DateFromTicks(ticks): - """Constructs an object holding a date value from the given UNIX timestamp.""" - return Date(*time.localtime(ticks)[:3]) - - -def TimeFromTicks(ticks): - """Constructs an object holding a time value from the given UNIX timestamp.""" - return Time(*time.localtime(ticks)[3:6]) - - -def TimestampFromTicks(ticks): - """Constructs an object holding a datetime/timestamp value from the given UNIX timestamp.""" - return Timestamp(*time.localtime(ticks)[:6]) - - -def Binary(value): - """Constructs an object capable of holding a binary (long) string value.""" - return bytes(value) - - -def time_from_java_sql_time(n): - dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=n) - return dt.time() - - -def time_to_java_sql_time(t): - return ((t.hour * 60 + t.minute) * 60 + t.second) * 1000 + t.microsecond // 1000 - - -def date_from_java_sql_date(n): - return datetime.date(1970, 1, 1) + datetime.timedelta(days=n) - - -def date_to_java_sql_date(d): - if isinstance(d, datetime.datetime): - d = d.date() - td = d - datetime.date(1970, 1, 1) - return td.days - - -def datetime_from_java_sql_timestamp(n): - return datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=n) - - -def datetime_to_java_sql_timestamp(d): - td = d - datetime.datetime(1970, 1, 1) - return td.microseconds // 1000 + (td.seconds + td.days * 24 * 3600) * 1000 - - -class ColumnType(object): - - def __init__(self, eq_types): - self.eq_types = tuple(eq_types) - self.eq_types_set = set(eq_types) - - def __eq__(self, other): - return other in self.eq_types_set - - def __cmp__(self, other): - if other in self.eq_types_set: - return 0 - if other < self.eq_types: - return 1 - else: - return -1 - - -STRING = ColumnType(['VARCHAR', 'CHAR']) -"""Type object that can be used to describe string-based columns.""" - -BINARY = ColumnType(['BINARY', 'VARBINARY']) -"""Type object that can be used to describe (long) binary columns.""" - -NUMBER = ColumnType([ - 'INTEGER', 'UNSIGNED_INT', 'BIGINT', 'UNSIGNED_LONG', 'TINYINT', 'UNSIGNED_TINYINT', - 'SMALLINT', 'UNSIGNED_SMALLINT', 'FLOAT', 'UNSIGNED_FLOAT', 'DOUBLE', 'UNSIGNED_DOUBLE', 'DECIMAL' -]) -"""Type object that can be used to describe numeric columns.""" - -DATETIME = ColumnType(['TIME', 'DATE', 'TIMESTAMP', 'UNSIGNED_TIME', 'UNSIGNED_DATE', 'UNSIGNED_TIMESTAMP']) -"""Type object that can be used to describe date/time columns.""" - -ROWID = ColumnType([]) -"""Only implemented for DB API 2.0 compatibility, not used.""" - -BOOLEAN = ColumnType(['BOOLEAN']) -"""Type object that can be used to describe boolean columns. This is a phoenixdb-specific extension.""" - - -# XXX ARRAY - -if sys.version_info[0] < 3: - _long = long # noqa: F821 -else: - _long = int - -JAVA_CLASSES = { - 'bool_value': [ - ('java.lang.Boolean', common_pb2.BOOLEAN, None, None), - ], - 'string_value': [ - ('java.lang.Character', common_pb2.CHARACTER, None, None), - ('java.lang.String', common_pb2.STRING, None, None), - ('java.math.BigDecimal', common_pb2.BIG_DECIMAL, str, Decimal), - ], - 'number_value': [ - ('java.lang.Integer', common_pb2.INTEGER, None, int), - ('java.lang.Short', common_pb2.SHORT, None, int), - ('java.lang.Long', common_pb2.LONG, None, _long), - ('java.lang.Byte', common_pb2.BYTE, None, int), - ('java.sql.Time', common_pb2.JAVA_SQL_TIME, time_to_java_sql_time, time_from_java_sql_time), - ('java.sql.Date', common_pb2.JAVA_SQL_DATE, date_to_java_sql_date, date_from_java_sql_date), - ('java.sql.Timestamp', common_pb2.JAVA_SQL_TIMESTAMP, datetime_to_java_sql_timestamp, datetime_from_java_sql_timestamp), - ], - 'bytes_value': [ - ('[B', common_pb2.BYTE_STRING, Binary, None), - ], - 'double_value': [ - # if common_pb2.FLOAT is used, incorrect values are sent - ('java.lang.Float', common_pb2.DOUBLE, float, float), - ('java.lang.Double', common_pb2.DOUBLE, float, float), - ] -} -"""Groups of Java classes.""" - -JAVA_CLASSES_MAP = dict((v[0], (k, v[1], v[2], v[3])) for k in JAVA_CLASSES for v in JAVA_CLASSES[k]) -"""Flips the available types to allow for faster lookup by Java class. -This mapping should be structured as: - { - 'java.math.BigDecimal': ('string_value', common_pb2.BIG_DECIMAL, str, Decimal),), - ... - '': (, , , ), - } -""" - - -class TypeHelper(object): - @staticmethod - def from_class(klass): - """Retrieves a Rep and functions to cast to/from based on the Java class. - :param klass: - The string of the Java class for the column or parameter. - :returns: tuple ``(field_name, rep, mutate_to, cast_from)`` - WHERE - ``field_name`` is the attribute in ``common_pb2.TypedValue`` - ``rep`` is the common_pb2.Rep enum - ``mutate_to`` is the function to cast values into Phoenix values, if any - ``cast_from`` is the function to cast from the Phoenix value to the Python value, if any - :raises: - NotImplementedError - """ - # TODO add BsicPolyType to supported classes for parameters - if klass not in JAVA_CLASSES_MAP: - raise NotImplementedError('type {} is not supported'.format(klass)) - - return JAVA_CLASSES_MAP[klass] \ No newline at end of file diff --git a/polypheny/version.py b/polypheny/version.py deleted file mode 100644 index b8274b68..00000000 --- a/polypheny/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# Update this for the versions -# Don't change the forth version number from None -VERSION = (0, 2, 0, None) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..022a99ca --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +polypheny-prism-api==1.10 +pytest +coverage +Sphinx +myst-parser diff --git a/setup.py b/setup.py index 514acba9..a3427ed6 100644 --- a/setup.py +++ b/setup.py @@ -1,44 +1,36 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright 2019-2021 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - import os import sys - from setuptools import setup, find_packages THIS_DIR = os.path.dirname(os.path.realpath(__file__)) CONNECTOR_SRC_DIR = os.path.join(THIS_DIR, "polypheny") +VERSION = (0, 0, 0, None) # Default +VERSION = "v0.0.0" # Default + + +version_file = 'polypheny-connector-version.txt' + +# necessary for automated build pipeline +if os.path.exists(version_file): + with open(version_file, 'r') as f: + version = f.read().strip() -VERSION = (1, 1, 1, None) # Default +else: + version = VERSION + #raise ValueError(f"Version file '{version_file}' not found. Please create the file with the version number.") -try: - with open( - os.path.join(CONNECTOR_SRC_DIR, "generated_version.py"), encoding="utf-8" - ) as f: - exec(f.read()) -except Exception: - with open(os.path.join(CONNECTOR_SRC_DIR, "version.py"), encoding="utf-8") as f: - exec(f.read()) -version = ".".join([str(v) for v in VERSION if v is not None]) +#print(f"Building version: {version}") -# Parse command line flags +if not version.startswith('v'): + raise ValueError(f"Invalid version format: {version}. Expected format 'v0.0.0'.") + +# Strip the 'v' prefix for the version +version = version[1:] + + +### Parse command line flags # This list defines the options definitions in a set options_def = { @@ -61,18 +53,18 @@ def readme(): setup( - name="polypheny", + name='polypheny', version=version, - description="Polypheny Connector for Python", + description='Driver for Polypheny', long_description=readme(), long_description_content_type='text/markdown', author="The Polypheny Project", author_email="mail@polypheny.org", - url="https://polypheny.org/", + url="https://polypheny.com/", project_urls={ - "Documentation": "https://polypheny.org/documentation/", + "Documentation": "https://docs.polypheny.com/en/latest/drivers/python/overview", "Code": "https://github.com/polypheny/Polypheny-Connector-Python", - "Issue tracker": "https://github.com/polypheny/Polypheny-DB/labels/A-python", + "Issue tracker": "https://github.com/polypheny/Polypheny-DB/labels/A-python" }, license="Apache License, Version 2.0", packages=find_packages(), @@ -83,11 +75,8 @@ def readme(): 'release': ('setup.py', version), }, }, - classifiers=[ - 'Programming Language :: Python :: 3.6', - ], - python_requires=">=3.6", + python_requires=">=3.8", install_requires=[ - 'protobuf>=3.0.0', - ] + "polypheny-prism-api==1.9", + ], ) diff --git a/tests/test_conn.py b/tests/test_conn.py new file mode 100644 index 00000000..e9f2224c --- /dev/null +++ b/tests/test_conn.py @@ -0,0 +1,120 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import polypheny +import pytest +import time + +from test_helper import con, cur + +# Heartbeat: isActive is only set to true, if the checkConnection +# call is made --- not for any of the others. This would test +# that an active client would be kicked +#def test_heartbeat(cur): +# while True: +# cur.execute("SELECT 1"); +# for row in cur: +# assert row[0] == 1 +# time.sleep(0.3) + +# tests the con fixture helper works +def test_conn(con): + pass + +def test_commit(con): + cur = con.cursor() + + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER)') + cur.execute('INSERT INTO t(id, a) VALUES (1, 2)') + con.commit() + + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == 2 + +def test_rollback(con): + cur = con.cursor() + + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER)') + cur.execute('INSERT INTO t(id, a) VALUES (1, 2)') + con.rollback() + + cur.execute('SELECT a FROM t') + assert cur.fetchone() is None + +def test_fetch_size(con): + cur = con.cursor() + + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER)') + for i in range(30): + cur.execute('INSERT INTO t(id, a) VALUES (?, ?)', (i, i)) + con.commit() + + for i in (1, 5, 30, 60): + cur.execute('SELECT id, a FROM t', fetch_size=i) + assert len(cur.fetchall()) == 30 + +def test_fetch_insert(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER)') + cur.execute('INSERT INTO t(id, a) VALUES (1, 1)') + with pytest.raises(polypheny.Error): + cur.fetchone() + +def test_error_on_closed_con(con): + con.close() + with pytest.raises(polypheny.ProgrammingError): + con.cursor() + with pytest.raises(polypheny.ProgrammingError): + con.commit() + with pytest.raises(polypheny.ProgrammingError): + con.rollback() + # Raises no exception + con.close() + +def test_cursor_autoclose(con): + cur = con.cursor() + cur.execute('SELECT 1') + con.close() + +def test_execute_wrongparams(cur): + with pytest.raises(polypheny.Error): + cur.execute('SELECT ?', 5) + +def test_execute_closed_cursor(cur): + with pytest.raises(polypheny.Error): + cur.close() + cur.executeany('mql', 'db.abc.find()') + +def test_fetch_closed_cursor(cur): + with pytest.raises(polypheny.Error): + cur.execute('SELECT 1') + cur.close() + cur.fetchone() + +def test_invalid_creds(): + with pytest.raises(polypheny.Error): + polypheny.connect(('127.0.0.1', 20590), username='unknown', password='', transport='plain') + +def test_invalid_version(): + import polypheny.rpc as rpc + major = rpc.POLYPHENY_API_MAJOR + try: + with pytest.raises(polypheny.Error): + rpc.POLYPHENY_API_MAJOR = major - 1 + polypheny.connect(('127.0.0.1', 20590), username='pa', password='', transport='plain') + finally: + rpc.POLYPHENY_API_MAJOR = major diff --git a/tests/test_future.py b/tests/test_future.py new file mode 100644 index 00000000..c9e159ae --- /dev/null +++ b/tests/test_future.py @@ -0,0 +1,37 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This file contains tests that do not work yet, but should work in future versions of Polypheny. +""" +import datetime + +import polypheny +import pytest + +from test_helper import con, cur, cur_with_data + +def test_cypher(cur_with_data): + cur = cur_with_data + with pytest.raises(polypheny.Error): + cur.executeany('cypher', 'MATCH (e:customers) WHERE e.id = 1 RETURN e.name') + assert cur.fetchone()[0] == 'Maria' + +def test_serialize_time_with_micros(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a TIME(3) NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (datetime.time(15, 19, 10, 12),)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] != datetime.time(15, 19, 10, 12) # This should be equal + assert cur.fetchone() is None diff --git a/tests/test_graph.py b/tests/test_graph.py new file mode 100644 index 00000000..f2370801 --- /dev/null +++ b/tests/test_graph.py @@ -0,0 +1,77 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from test_helper import con + + +def test_nodes_match_and_order_by(con): + cur = con.cursor() + cur.execute("DROP NAMESPACE IF EXISTS cyphertest") + cur.execute("CREATE GRAPH NAMESPACE cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 1, name: 'Alice'})", namespace="cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 2, name: 'Bob'})", namespace="cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 3, name: 'Charlie'})", namespace="cyphertest") + con.commit() + cur.executeany("cypher", 'MATCH (n:Person) RETURN n ORDER BY n.id', namespace="cyphertest") + result = cur.fetchall() + + expected = [ + {'id': 1, 'name': 'Alice'}, + {'id': 2, 'name': 'Bob'}, + {'id': 3, 'name': 'Charlie'} + ] + assert (sorted(result, key=lambda x: x['id']), expected) + cur.close() + con.close() + + +def test_nodes(con): + cur = con.cursor() + cur.execute("DROP NAMESPACE IF EXISTS cyphertest") + cur.execute("CREATE GRAPH NAMESPACE cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 1, name: 'Alice'})", namespace="cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 2, name: 'Bob'})", namespace="cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 3, name: 'Charlie'})", namespace="cyphertest") + cur.executeany("cypher", "MATCH (a:Person {name: 'Alice'}), (b:Person {name: 'Bob'}) CREATE (a)-[:KNOWS]->(b)", + namespace="cyphertest") + cur.executeany("cypher", "MATCH (b:Person {name: 'Bob'}), (c:Person {name: 'Charlie'}) CREATE (b)-[:KNOWS]->(c)", + namespace="cyphertest") + con.commit() + cur.executeany("cypher", 'MATCH ()-[r:KNOWS]->() RETURN r', namespace="cyphertest") + result = cur.fetchall() + + expected = [ + {'type': 'KNOWS'}, + {'type': 'KNOWS'} + ] + assert (sorted(result, key=lambda x: x['id']), expected) + cur.close() + con.close() + + +def test_nodes_relational(con): + cur = con.cursor() + cur.execute("DROP NAMESPACE IF EXISTS cyphertest") + cur.execute("CREATE GRAPH NAMESPACE cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 1, name: 'Alice'})", namespace="cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 2, name: 'Bob'})", namespace="cyphertest") + cur.executeany("cypher", "CREATE (:Person {id: 3, name: 'Charlie'})", namespace="cyphertest") + con.commit() + cur.executeany("cypher", "MATCH (n:Person {name: 'Alice'}) RETURN n.name, n.id", namespace="cyphertest") + result = cur.fetchall() + + assert len(result) == 1 + row = result[0] + assert row[0] == "Alice" + assert row[1] == "1" diff --git a/tests/test_helper.py b/tests/test_helper.py new file mode 100644 index 00000000..66b340d3 --- /dev/null +++ b/tests/test_helper.py @@ -0,0 +1,57 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +import polypheny +import pytest + +@pytest.fixture +def con(): + if sys.platform == 'win32': + con = polypheny.connect(('127.0.0.1', 20590), username='pa', password='', transport='plain') + else: + con = polypheny.connect() + #con = polypheny.connect(('127.0.0.1', 2020), username='pa', password='', transport='noise', transport_params={'insecure': True}) + yield con + con.close() + +@pytest.fixture +def cur(con): + yield con.cursor() + +@pytest.fixture +def cur_with_data(con, cur): + cur.execute('DROP TABLE IF EXISTS customers') + cur.execute(""" + CREATE TABLE customers( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + year_joined INTEGER NOT NULL + )""") + cur.executemany( + 'INSERT INTO customers(id, name, year_joined) VALUES (?, ?, ?)', + [(1, 'Maria', 2012), + (2, 'Daniel', 2020), + (3, 'Peter', 2001), + (4, 'Anna', 2001), + (5, 'Thomas', 2004), + (6, 'Andreas', 2014), + (7, 'Michael', 2010)] + ) + con.commit() + + yield cur + + cur.execute('DROP TABLE customers') diff --git a/tests/test_interval.py b/tests/test_interval.py new file mode 100644 index 00000000..a405e31f --- /dev/null +++ b/tests/test_interval.py @@ -0,0 +1,32 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from polypheny.interval import IntervalMonthMilliseconds +from polypheny.serialize import proto2py +from polypheny import Error +from org.polypheny.prism import value_pb2 + +import pytest + +def test_zero_months(): + m = IntervalMonthMilliseconds(0, 0) + assert str(m) == "0 months and 0 milliseconds" + +def test_one_month(): + m = IntervalMonthMilliseconds(1, 1) + assert str(m) == "1 month and 1 millisecond" + +def test_thirteen_months(): + m = IntervalMonthMilliseconds(2, 2) + assert str(m) == "2 months and 2 milliseconds" diff --git a/tests/test_langs.py b/tests/test_langs.py new file mode 100644 index 00000000..32c27213 --- /dev/null +++ b/tests/test_langs.py @@ -0,0 +1,28 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import polypheny +import pytest + +from test_helper import con, cur, cur_with_data + +def test_cql(cur_with_data): + cur = cur_with_data + cur.executeany('cql', "public.customers.id == 1 project public.customers.name") + assert cur.fetchone()[0] == 'Maria' + +def test_pig(cur_with_data): + cur = cur_with_data + cur.executeany('pig', "A = LOAD 'customers'; B = FILTER A BY id == 1; DUMP B;") + assert cur.fetchone()[1] == 'Maria' diff --git a/tests/test_mql.py b/tests/test_mql.py new file mode 100644 index 00000000..8db5cbf4 --- /dev/null +++ b/tests/test_mql.py @@ -0,0 +1,37 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import polypheny + +from test_helper import con, cur + +def test_getstar(con): + cur = con.cursor() + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a INTEGER NOT NULL, PRIMARY KEY(i))') + cur.executemany('INSERT INTO t(i, a) VALUES (?, ?)', [(0, 1), (1, 2), (2, 3)]) + con.commit() + cur.executeany('mongo', 'db.t.find()') + assert list(sorted(cur.fetchall(), key=lambda a: a['i'] )) == [{'i': 0, 'a': 1}, {'i': 1, 'a': 2}, {'i': 2, 'a': 3}] + +def test_namespace(con): + cur = con.cursor() + cur.execute('DROP NAMESPACE IF EXISTS demo') + cur.execute('CREATE RELATIONAL NAMESPACE demo') + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE demo.t(i INTEGER NOT NULL, a INTEGER NOT NULL, PRIMARY KEY(i))') + cur.executemany('INSERT INTO demo.t(i, a) VALUES (?, ?)', [(0, 1), (1, 2), (2, 3)]) + con.commit() + cur.executeany('mongo', 'db.t.find()', namespace='demo') + assert list(sorted(cur.fetchall(), key=lambda a: a['i'] )) == [{'i': 0, 'a': 1}, {'i': 1, 'a': 2}, {'i': 2, 'a': 3}] diff --git a/tests/test_pep.py b/tests/test_pep.py new file mode 100644 index 00000000..d89d1ca1 --- /dev/null +++ b/tests/test_pep.py @@ -0,0 +1,1548 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +import polypheny +import pytest + +from test_helper import con, cur, cur_with_data + +# See https://peps.python.org/pep-0249/ +# PEP: 249 +# Title: Python Database API Specification v2.0 +# Author: Marc-André Lemburg +# Discussions-To: db-sig@python.org +# Status: Final +# Type: Informational +# Content-Type: text/x-rst +# Created: 12-Apr-1999 +# Post-History: +# Replaces: 248 +# +# +# Introduction +# ============ +# +# This API has been defined to encourage similarity between the Python +# modules that are used to access databases. By doing this, we hope to +# achieve a consistency leading to more easily understood modules, code +# that is generally more portable across databases, and a broader reach +# of database connectivity from Python. +# +# Comments and questions about this specification may be directed to the +# `SIG for Database Interfacing with Python `__. +# +# For more information on database interfacing with Python and available +# packages see the `Database Topic Guide +# `__. +# +# This document describes the Python Database API Specification 2.0 and +# a set of common optional extensions. The previous version 1.0 version +# is still available as reference, in :PEP:`248`. Package writers are +# encouraged to use this version of the specification as basis for new +# interfaces. +# +# +# Module Interface +# ================= +# +# Constructors +# ------------ +# +# Access to the database is made available through connection +# objects. The module must provide the following constructor for these: +# +# .. _connect: +# +# `connect`_\ ( *parameters...* ) +# Constructor for creating a connection to the database. +# +# Returns a Connection_ Object. It takes a number of parameters +# which are database dependent. [1]_ +def test_connection(): + if sys.platform == 'win32': + con = polypheny.connect(('127.0.0.1', 20590), username='pa', password='', transport='plain') + else: + con = polypheny.connect() + assert type(con) == polypheny.Connection + con.close() +# +# +# Globals +# ------- +# +# These module globals must be defined: +# +# .. _apilevel: +# +# `apilevel`_ +# String constant stating the supported DB API level. +# +# Currently only the strings "``1.0``" and "``2.0``" are allowed. +# If not given, a DB-API 1.0 level interface should be assumed. +def test_apilevel(): + assert polypheny.apilevel == '2.0' +# +# +# .. _threadsafety: +# +# `threadsafety`_ +# Integer constant stating the level of thread safety the interface +# supports. Possible values are: +# +# ============ ======================================================= +# threadsafety Meaning +# ============ ======================================================= +# 0 Threads may not share the module. +# 1 Threads may share the module, but not connections. +# 2 Threads may share the module and connections. +# 3 Threads may share the module, connections and cursors. +# ============ ======================================================= +# +# Sharing in the above context means that two threads may use a +# resource without wrapping it using a mutex semaphore to implement +# resource locking. Note that you cannot always make external +# resources thread safe by managing access using a mutex: the +# resource may rely on global variables or other external sources +# that are beyond your control. +def test_threadsafety(): + assert polypheny.threadsafety == 0 +# +# +# .. _paramstyle: +# +# `paramstyle`_ +# String constant stating the type of parameter marker formatting +# expected by the interface. Possible values are [2]_: +# +# ============ ============================================================== +# paramstyle Meaning +# ============ ============================================================== +# ``qmark`` Question mark style, e.g. ``...WHERE name=?`` +# ``numeric`` Numeric, positional style, e.g. ``...WHERE name=:1`` +# ``named`` Named style, e.g. ``...WHERE name=:name`` +# ``format`` ANSI C printf format codes, e.g. ``...WHERE name=%s`` +# ``pyformat`` Python extended format codes, e.g. ``...WHERE name=%(name)s`` +# ============ ============================================================== +def test_paramstyle(): + assert polypheny.paramstyle == 'qmark' +# +# +# Exceptions +# ---------- +# +# The module should make all error information available through these +# exceptions or subclasses thereof: +# +# .. _Warning: +# +# `Warning`_ +# Exception raised for important warnings like data truncations +# while inserting, etc. It must be a subclass of the Python +# ``Exception`` class [10]_ [11]_. +def test_warning(): + assert Exception in polypheny.Warning.__bases__ +# +# +# .. _Error: +# +# `Error`_ +# Exception that is the base class of all other error +# exceptions. You can use this to catch all errors with one single +# ``except`` statement. Warnings are not considered errors and thus +# should not use this class as base. It must be a subclass of the +# Python ``Exception`` class [10]_. +def test_error(): + assert Exception in polypheny.Error.__bases__ +# +# +# .. _InterfaceError: +# +# `InterfaceError`_ +# Exception raised for errors that are related to the database +# interface rather than the database itself. It must be a subclass +# of Error_. +def test_interfaceerror(): + assert polypheny.Error in polypheny.InterfaceError.__bases__ +# +# +# .. _DatabaseError: +# +# `DatabaseError`_ +# Exception raised for errors that are related to the database. It +# must be a subclass of Error_. +def test_databaseerror(): + assert polypheny.Error in polypheny.DatabaseError.__bases__ +# +# +# .. _DataError: +# +# `DataError`_ +# Exception raised for errors that are due to problems with the +# processed data like division by zero, numeric value out of range, +# etc. It must be a subclass of DatabaseError_. +def test_dataerror(): + assert polypheny.DatabaseError in polypheny.DataError.__bases__ +# +# +# .. _OperationalError: +# +# `OperationalError`_ +# Exception raised for errors that are related to the database's +# operation and not necessarily under the control of the programmer, +# e.g. an unexpected disconnect occurs, the data source name is not +# found, a transaction could not be processed, a memory allocation +# error occurred during processing, etc. It must be a subclass of +# DatabaseError_. +def test_operationalerror(): + assert polypheny.DatabaseError in polypheny.OperationalError.__bases__ +# +# +# .. _IntegrityError: +# +# `IntegrityError`_ +# Exception raised when the relational integrity of the database is +# affected, e.g. a foreign key check fails. It must be a subclass +# of DatabaseError_. +def test_integrityerror(): + assert polypheny.DatabaseError in polypheny.IntegrityError.__bases__ +# +# +# .. _InternalError: +# +# `InternalError`_ +# Exception raised when the database encounters an internal error, +# e.g. the cursor is not valid anymore, the transaction is out of +# sync, etc. It must be a subclass of DatabaseError_. +def test_internalerror(): + assert polypheny.DatabaseError in polypheny.InternalError.__bases__ +# +# +# .. _ProgrammingError: +# +# `ProgrammingError`_ +# Exception raised for programming errors, e.g. table not found or +# already exists, syntax error in the SQL statement, wrong number of +# parameters specified, etc. It must be a subclass of +# DatabaseError_. +def test_programmingerror(): + assert polypheny.DatabaseError in polypheny.ProgrammingError.__bases__ +# +# +# .. _NotSupportedError: +# +# `NotSupportedError`_ +# Exception raised in case a method or database API was used which +# is not supported by the database, e.g. requesting a +# `.rollback()`_ on a connection that does not support transaction +# or has transactions turned off. It must be a subclass of +# DatabaseError_. +def test_notsupportederror(): + assert polypheny.DatabaseError in polypheny.NotSupportedError.__bases__ +# +# This is the exception inheritance layout [10]_ [11]_: +# +# .. code-block:: text +# +# Exception +# |__Warning +# |__Error +# |__InterfaceError +# |__DatabaseError +# |__DataError +# |__OperationalError +# |__IntegrityError +# |__InternalError +# |__ProgrammingError +# |__NotSupportedError +# +# .. Note:: +# The values of these exceptions are not defined. They should give the user +# a fairly good idea of what went wrong, though. +# +# +# .. _Connection: +# +# Connection Objects +# ================== +# +# Connection objects should respond to the following methods. +# +# +# Connection methods +# ------------------ +# +# .. .close(): +# .. _Connection.close: +# +# `.close() <#Connection.close>`_ +# Close the connection now (rather than whenever ``.__del__()`` is +# called). +# +# The connection will be unusable from this point forward; an Error_ +# (or subclass) exception will be raised if any operation is +# attempted with the connection. The same applies to all cursor +# objects trying to use the connection. Note that closing a +# connection without committing the changes first will cause an +# implicit rollback to be performed. +def test_con_close(con): + con.close() + err = None + try: + con.rollback() + except polypheny.Error as e: + err = e + + assert err is not None + +# +# +# .. _.commit: +# .. _.commit(): +# +# `.commit`_\ () +# Commit any pending transaction to the database. +# +# Note that if the database supports an auto-commit feature, this must be +# initially off. An interface method may be provided to turn it back on. +# +# Database modules that do not support transactions should implement this +# method with void functionality. +def test_con_commit(con): + con.commit() +# +# +# .. _.rollback: +# .. _.rollback(): +# +# `.rollback`_\ () +# This method is optional since not all databases provide transaction +# support. [3]_ +# +# In case a database does provide transactions this method causes the +# database to roll back to the start of any pending transaction. Closing a +# connection without committing the changes first will cause an implicit +# rollback to be performed. +def test_con_rollback(con): + con.rollback() +# +# +# .. _.cursor: +# +# `.cursor`_\ () +# Return a new Cursor_ Object using the connection. +# +# If the database does not provide a direct cursor concept, the module will +# have to emulate cursors using other means to the extent needed by this +# specification. [4]_ +def test_cursor(con): + cur = con.cursor() + assert type(cur) == polypheny.Cursor + cur.close() + +# +# +# +# .. _Cursor: +# +# Cursor Objects +# ============== +# +# These objects represent a database cursor, which is used to manage the +# context of a fetch operation. Cursors created from the same connection +# are not isolated, *i.e.*, any changes done to the database by a cursor +# are immediately visible by the other cursors. Cursors created from +# different connections can or can not be isolated, depending on how the +# transaction support is implemented (see also the connection's +# `.rollback`_\ () and `.commit`_\ () methods). +# +# Cursor Objects should respond to the following methods and attributes. +# +# +# Cursor attributes +# ----------------- +# +# .. _.description: +# +# `.description`_ +# This read-only attribute is a sequence of 7-item sequences. +# +# Each of these sequences contains information describing one result +# column: +# +# * ``name`` +# * ``type_code`` +# * ``display_size`` +# * ``internal_size`` +# * ``precision`` +# * ``scale`` +# * ``null_ok`` +# +# The first two items (``name`` and ``type_code``) are mandatory, +# the other five are optional and are set to ``None`` if no +# meaningful values can be provided. +# +# This attribute will be ``None`` for operations that do not return +# rows or if the cursor has not had an operation invoked via the +# `.execute*()`_ method yet. +# +# The ``type_code`` can be interpreted by comparing it to the `Type +# Objects`_ specified in the section below. +def test_description(cur): + assert cur.description is None +# +# +# .. _.rowcount: +# +# `.rowcount`_ +# This read-only attribute specifies the number of rows that the last +# `.execute*()`_ produced (for DQL statements like ``SELECT``) or affected +# (for DML statements like ``UPDATE`` or ``INSERT``). [9]_ +# +# The attribute is -1 in case no `.execute*()`_ has been performed +# on the cursor or the rowcount of the last operation is cannot be +# determined by the interface. [7]_ +# +# .. note:: +# Future versions of the DB API specification could redefine the +# latter case to have the object return ``None`` instead of -1. +def test_rowcount(cur): + assert cur.rowcount == -1 + +# +# +# Cursor methods +# -------------- +# +# .. _.callproc: +# .. _.callproc(): +# +# `.callproc`_\ ( *procname* [, *parameters* ] ) +# (This method is optional since not all databases provide stored +# procedures. [3]_) +# +# Call a stored database procedure with the given name. The sequence +# of parameters must contain one entry for each argument that the +# procedure expects. The result of the call is returned as modified +# copy of the input sequence. Input parameters are left untouched, +# output and input/output parameters replaced with possibly new +# values. +# +# The procedure may also provide a result set as output. This must +# then be made available through the standard `.fetch*()`_ methods. +# +# +# .. .close: +# .. _Cursor.close: +# .. _Cursor.close(): +# +# `.close <#Cursor.close>`_\ () +# Close the cursor now (rather than whenever ``__del__`` is called). +# +# The cursor will be unusable from this point forward; an Error_ (or +# subclass) exception will be raised if any operation is attempted +# with the cursor. +def test_cursor_close(cur): + cur.close() + err = None + try: + cur.execute('SELECT 1') + except polypheny.Error as e: + err = e + assert err is not None + +# +# +# .. _.execute*: +# .. _.execute*(): +# +# .. _.execute: +# .. _.execute(): +# +# `.execute`_\ (*operation* [, *parameters*]) +# Prepare and execute a database operation (query or command). +# +# Parameters may be provided as sequence or mapping and will be +# bound to variables in the operation. Variables are specified in a +# database-specific notation (see the module's paramstyle_ attribute +# for details). [5]_ +def test_cursor_execute(cur_with_data): + pytest.skip('dict example does not work') + cur = cur_with_data + cur.execute('SELECT * FROM customers') + cur.execute('SELECT * FROM customers WHERE year_joined > ?', (2007,)) + cur.execute('SELECT * FROM customers WHERE year_joined > :year', + {'year': 2007}) + +# +# A reference to the operation will be retained by the cursor. If +# the same operation object is passed in again, then the cursor can +# optimize its behavior. This is most effective for algorithms +# where the same operation is used, but different parameters are +# bound to it (many times). +# +# For maximum efficiency when reusing an operation, it is best to +# use the `.setinputsizes()`_ method to specify the parameter types +# and sizes ahead of time. It is legal for a parameter to not match +# the predefined information; the implementation should compensate, +# possibly with a loss of efficiency. +# +# The parameters may also be specified as list of tuples to +# e.g. insert multiple rows in a single operation, but this kind of +# usage is deprecated: `.executemany()`_ should be used instead. +# +# Return values are not defined. +# +# +# .. _.executemany: +# .. _.executemany(): +# +# `.executemany`_\ ( *operation*, *seq_of_parameters* ) +# Prepare a database operation (query or command) and then execute it +# against all parameter sequences or mappings found in the sequence +# *seq_of_parameters*. +# +# Modules are free to implement this method using multiple calls to +# the `.execute()`_ method or by using array operations to have the +# database process the sequence as a whole in one call. +# +# Use of this method for an operation which produces one or more +# result sets constitutes undefined behavior, and the implementation +# is permitted (but not required) to raise an exception when it +# detects that a result set has been created by an invocation of the +# operation. +# +# The same comments as for `.execute()`_ also apply accordingly to +# this method. +# +# Return values are not defined. +def test_cursor_executemany(cur_with_data): + cur = cur_with_data + cur.executemany('INSERT INTO customers(id, name, year_joined) VALUES (?, ?, ?)', + [(8, 'Ruth', 2012,), (9, 'Claudia', 2016,)]) + +# +# +# .. _.fetch*: +# .. _.fetch*(): +# +# .. _.fetchone: +# .. _.fetchone(): +# +# `.fetchone`_\ () +# Fetch the next row of a query result set, returning a single +# sequence, or ``None`` when no more data is available. [6]_ +# +# An Error_ (or subclass) exception is raised if the previous call +# to `.execute*()`_ did not produce any result set or no call was +# issued yet. +def test_cursor_fetchone(cur_with_data): + cur = cur_with_data + err = None + try: + cur.fetchone() + except polypheny.Error as e: + err = e + assert err is not None + cur.execute('SELECT * FROM customers') + for row in cur: + _ = row + cur.execute('SELECT * FROM customers WHERE year_joined = 2000') + assert cur.fetchone() is None + + cur.execute('DELETE FROM customers WHERE year_joined = 2010') + assert cur.rowcount == 1 + with pytest.raises(polypheny.Error): + cur.fetchone() + +# +# +# .. _.fetchmany: +# .. _.fetchmany(): +# +# `.fetchmany`_\ ([*size=cursor.arraysize*]) +# Fetch the next set of rows of a query result, returning a sequence +# of sequences (e.g. a list of tuples). An empty sequence is +# returned when no more rows are available. +# +# The number of rows to fetch per call is specified by the +# parameter. If it is not given, the cursor's arraysize determines +# the number of rows to be fetched. The method should try to fetch +# as many rows as indicated by the size parameter. If this is not +# possible due to the specified number of rows not being available, +# fewer rows may be returned. +# +# An Error_ (or subclass) exception is raised if the previous call +# to `.execute*()`_ did not produce any result set or no call was +# issued yet. +# +# Note there are performance considerations involved with the *size* +# parameter. For optimal performance, it is usually best to use the +# `.arraysize`_ attribute. If the size parameter is used, then it +# is best for it to retain the same value from one `.fetchmany()`_ +# call to the next. +def test_cursor_fetchmany(cur_with_data): + cur = cur_with_data + with pytest.raises(polypheny.Error): + cur.fetchmany() + + # customers has seven entries + cur.execute('SELECT * FROM customers') + cur.arraysize = 1 + assert len(cur.fetchmany(2)) == 2 + assert len(cur.fetchmany()) == 1 + cur.arraysize = 2 + assert len(cur.fetchmany()) == 2 + assert len(cur.fetchmany(100)) == 2 + + cur.execute('SELECT * FROM customers WHERE year_joined = 2000') + assert len(cur.fetchmany()) == 0 + + cur.execute('DELETE FROM customers WHERE year_joined = 2010') + assert cur.rowcount == 1 + with pytest.raises(polypheny.Error): + cur.fetchmany() + +# +# +# .. _.fetchall: +# .. _.fetchall(): +# +# `.fetchall`_\ () +# Fetch all (remaining) rows of a query result, returning them as a +# sequence of sequences (e.g. a list of tuples). Note that the +# cursor's arraysize attribute can affect the performance of this +# operation. +# +# An Error_ (or subclass) exception is raised if the previous call +# to `.execute*()`_ did not produce any result set or no call was +# issued yet. +def test_cursor_fetchall(cur_with_data): + cur = cur_with_data + with pytest.raises(polypheny.Error): + cur.fetchall() + + cur.execute('SELECT * FROM customers') + cur.fetchall() + assert cur.fetchone() is None + + cur.execute('SELECT * FROM customers WHERE year_joined = 2000') + assert len(cur.fetchall()) == 0 + +# +# +# .. _.nextset: +# .. _.nextset(): +# +# `.nextset`_\ () +# (This method is optional since not all databases support multiple +# result sets. [3]_) +# +# This method will make the cursor skip to the next available set, +# discarding any remaining rows from the current set. +# +# If there are no more sets, the method returns ``None``. Otherwise, +# it returns a true value and subsequent calls to the `.fetch*()`_ +# methods will return rows from the next result set. +# +# An Error_ (or subclass) exception is raised if the previous call +# to `.execute*()`_ did not produce any result set or no call was +# issued yet. +# +# +# .. _.arraysize: +# +# `.arraysize`_ +# This read/write attribute specifies the number of rows to fetch at +# a time with `.fetchmany()`_. It defaults to 1 meaning to fetch a +# single row at a time. +# +# Implementations must observe this value with respect to the +# `.fetchmany()`_ method, but are free to interact with the database +# a single row at a time. It may also be used in the implementation +# of `.executemany()`_. +def test_cursor_arraysize(cur): + assert cur.arraysize == 1 +# +# +# .. _.setinputsizes: +# .. _.setinputsizes(): +# +# `.setinputsizes`_\ (*sizes*) +# This can be used before a call to `.execute*()`_ to predefine +# memory areas for the operation's parameters. +# +# *sizes* is specified as a sequence — one item for each input +# parameter. The item should be a Type Object that corresponds to +# the input that will be used, or it should be an integer specifying +# the maximum length of a string parameter. If the item is +# ``None``, then no predefined memory area will be reserved for that +# column (this is useful to avoid predefined areas for large +# inputs). +# +# This method would be used before the `.execute*()`_ method is +# invoked. +# +# Implementations are free to have this method do nothing and users +# are free to not use it. +def test_setinputsizes(cur): + cur.setinputsizes([]) +# +# +# .. _.setoutputsize: +# .. _.setoutputsize(): +# +# `.setoutputsize`_\ (*size* [, *column*]) +# Set a column buffer size for fetches of large columns +# (e.g. ``LONG``\s, ``BLOB``\s, etc.). The column is specified as +# an index into the result sequence. Not specifying the column will +# set the default size for all large columns in the cursor. +# +# This method would be used before the `.execute*()`_ method is +# invoked. +# +# Implementations are free to have this method do nothing and users +# are free to not use it. +def test_setoutputsize(cur): + cur.setoutputsize([]) + cur.setoutputsize([], []) +# +# +# .. _Type Objects: +# +# Type Objects and Constructors +# ============================= +# +# Many databases need to have the input in a particular format for +# binding to an operation's input parameters. For example, if an input +# is destined for a ``DATE`` column, then it must be bound to the +# database in a particular string format. Similar problems exist for +# "Row ID" columns or large binary items (e.g. blobs or ``RAW`` +# columns). This presents problems for Python since the parameters to +# the `.execute*()`_ method are untyped. When the database module sees +# a Python string object, it doesn't know if it should be bound as a +# simple ``CHAR`` column, as a raw ``BINARY`` item, or as a ``DATE``. +# +# To overcome this problem, a module must provide the constructors +# defined below to create objects that can hold special values. When +# passed to the cursor methods, the module can then detect the proper +# type of the input parameter and bind it accordingly. +# +# A Cursor_ Object's description attribute returns information about +# each of the result columns of a query. The ``type_code`` must compare +# equal to one of Type Objects defined below. Type Objects may be equal +# to more than one type code (e.g. ``DATETIME`` could be equal to the +# type codes for date, time and timestamp columns; see the +# `Implementation Hints`_ below for details). +# +# The module exports the following constructors and singletons: +# +# .. _Date: +# +# `Date`_\ (*year*, *month*, *day*) +# This function constructs an object holding a date value. +def test_date(): + polypheny.Date(2023, 12, 12) +# +# +# .. _Time: +# +# `Time`_\ (*hour*, *minute*, *second*) +# This function constructs an object holding a time value. +def test_time(): + polypheny.Time(16, 49, 14) +# +# +# .. _Timestamp: +# +# `Timestamp`_\ (*year*, *month*, *day*, *hour*, *minute*, *second*) +# This function constructs an object holding a time stamp value. +def test_timestamp(): + polypheny.Timestamp(2023, 12, 12, 16, 49, 14) +# +# +# .. _DateFromTicks: +# +# `DateFromTicks`_\ (*ticks*) +# This function constructs an object holding a date value from the +# given ticks value (number of seconds since the epoch; see the +# documentation of `the standard Python time module +# `__ for details). +def test_datefromticks(): + polypheny.DateFromTicks(42) +# +# .. _TimeFromTicks: +# +# `TimeFromTicks`_\ (*ticks*) +# This function constructs an object holding a time value from the +# given ticks value (number of seconds since the epoch; see the +# documentation of the standard Python time module for details). +def test_timefromticks(): + polypheny.TimeFromTicks(42) +# +# +# .. _TimeStampFromTicks: +# +# `TimestampFromTicks`_\ (*ticks*) +# This function constructs an object holding a time stamp value from +# the given ticks value (number of seconds since the epoch; see the +# documentation of the standard Python time module for details). +def test_timestampfromticks(): + polypheny.TimestampFromTicks(42) +# +# +# .. _Binary: +# +# `Binary`_\ (*string*) +# This function constructs an object capable of holding a binary +# (long) string value. +def test_binary(): + polypheny.Binary("Hello World") +# +# +# .. _STRING: +# +# `STRING`_ type +# This type object is used to describe columns in a database that +# are string-based (e.g. ``CHAR``). +# +# +# .. _Binary type: +# +# `BINARY`_ type +# This type object is used to describe (long) binary columns in a +# database (e.g. ``LONG``, ``RAW``, ``BLOB``\s). +# +# +# .. _NUMBER: +# +# `NUMBER`_ type +# This type object is used to describe numeric columns in a +# database. +# +# +# .. _DATETIME: +# +# `DATETIME`_ type +# This type object is used to describe date/time columns in a +# database. +# +# .. _ROWID: +# +# `ROWID`_ type +# This type object is used to describe the "Row ID" column in a +# database. +# +# +# SQL ``NULL`` values are represented by the Python ``None`` singleton +# on input and output. +# +# .. Note:: +# Usage of Unix ticks for database interfacing can cause troubles +# because of the limited date range they cover. +# +# +# +# .. _Implementation Hints: +# +# Implementation Hints for Module Authors +# ======================================= +# +# * Date/time objects can be implemented as `Python datetime module +# `__ objects (available +# since Python 2.3, with a C API since 2.4) or using the `mxDateTime +# `_ package +# (available for all Python versions since 1.5.2). They both provide +# all necessary constructors and methods at Python and C level. +# +# * Here is a sample implementation of the Unix ticks based constructors +# for date/time delegating work to the generic constructors:: +# +# import time +# +# def DateFromTicks(ticks): +# return Date(*time.localtime(ticks)[:3]) +# +# def TimeFromTicks(ticks): +# return Time(*time.localtime(ticks)[3:6]) +# +# def TimestampFromTicks(ticks): +# return Timestamp(*time.localtime(ticks)[:6]) +# +# * The preferred object type for Binary objects are the buffer types +# available in standard Python starting with version 1.5.2. Please +# see the Python documentation for details. For information about the +# C interface have a look at ``Include/bufferobject.h`` and +# ``Objects/bufferobject.c`` in the Python source distribution. +# +# * This Python class allows implementing the above type objects even +# though the description type code field yields multiple values for on +# type object:: +# +# class DBAPITypeObject: +# def __init__(self,*values): +# self.values = values +# def __cmp__(self,other): +# if other in self.values: +# return 0 +# if other < self.values: +# return 1 +# else: +# return -1 +# +# The resulting type object compares equal to all values passed to the +# constructor. +# +# * Here is a snippet of Python code that implements the exception +# hierarchy defined above [10]_:: +# +# class Error(Exception): +# pass +# +# class Warning(Exception): +# pass +# +# class InterfaceError(Error): +# pass +# +# class DatabaseError(Error): +# pass +# +# class InternalError(DatabaseError): +# pass +# +# class OperationalError(DatabaseError): +# pass +# +# class ProgrammingError(DatabaseError): +# pass +# +# class IntegrityError(DatabaseError): +# pass +# +# class DataError(DatabaseError): +# pass +# +# class NotSupportedError(DatabaseError): +# pass +# +# In C you can use the ``PyErr_NewException(fullname, base, NULL)`` +# API to create the exception objects. +# +# +# Optional DB API Extensions +# ========================== +# +# During the lifetime of DB API 2.0, module authors have often extended +# their implementations beyond what is required by this DB API +# specification. To enhance compatibility and to provide a clean upgrade +# path to possible future versions of the specification, this section +# defines a set of common extensions to the core DB API 2.0 +# specification. +# +# As with all DB API optional features, the database module authors are +# free to not implement these additional attributes and methods (using +# them will then result in an ``AttributeError``) or to raise a +# NotSupportedError_ in case the availability can only be checked at +# run-time. +# +# It has been proposed to make usage of these extensions optionally +# visible to the programmer by issuing Python warnings through the +# Python warning framework. To make this feature useful, the warning +# messages must be standardized in order to be able to mask them. These +# standard messages are referred to below as *Warning Message*. +# +# +# .. _.rownumber: +# +# Cursor\ `.rownumber`_ +# This read-only attribute should provide the current 0-based index +# of the cursor in the result set or ``None`` if the index cannot be +# determined. +# +# The index can be seen as index of the cursor in a sequence (the +# result set). The next fetch operation will fetch the row indexed +# by `.rownumber`_ in that sequence. +# +# *Warning Message:* "DB-API extension cursor.rownumber used" +# +# +# .. _Connection.Error: +# .. _Connection.ProgrammingError: +# +# `Connection.Error`_, `Connection.ProgrammingError`_, etc. +# All exception classes defined by the DB API standard should be +# exposed on the Connection_ objects as attributes (in addition to +# being available at module scope). +# +# These attributes simplify error handling in multi-connection +# environments. +# +# *Warning Message:* "DB-API extension connection. used" +# +# +# .. _.connection: +# +# Cursor\ `.connection`_ +# This read-only attribute return a reference to the Connection_ +# object on which the cursor was created. +# +# The attribute simplifies writing polymorph code in +# multi-connection environments. +# +# *Warning Message:* "DB-API extension cursor.connection used" +# +# +# .. _.scroll: +# .. _.scroll(): +# +# Cursor\ `.scroll`_\ (*value* [, *mode='relative'* ]) +# Scroll the cursor in the result set to a new position according to +# *mode*. +# +# If mode is ``relative`` (default), value is taken as offset to the +# current position in the result set, if set to ``absolute``, value +# states an absolute target position. +# +# An ``IndexError`` should be raised in case a scroll operation +# would leave the result set. In this case, the cursor position is +# left undefined (ideal would be to not move the cursor at all). +# +# .. Note:: +# This method should use native scrollable cursors, if available, +# or revert to an emulation for forward-only scrollable +# cursors. The method may raise NotSupportedError_ to signal +# that a specific operation is not supported by the database +# (e.g. backward scrolling). +# +# *Warning Message:* "DB-API extension cursor.scroll() used" +# +# +# .. _Cursor.messages: +# +# `Cursor.messages`_ +# This is a Python list object to which the interface appends tuples +# (exception class, exception value) for all messages which the +# interfaces receives from the underlying database for this cursor. +# +# The list is cleared by all standard cursor methods calls (prior to +# executing the call) except for the `.fetch*()`_ calls +# automatically to avoid excessive memory usage and can also be +# cleared by executing ``del cursor.messages[:]``. +# +# All error and warning messages generated by the database are +# placed into this list, so checking the list allows the user to +# verify correct operation of the method calls. +# +# The aim of this attribute is to eliminate the need for a Warning +# exception which often causes problems (some warnings really only +# have informational character). +# +# *Warning Message:* "DB-API extension cursor.messages used" +# +# +# .. _Connection.messages: +# +# `Connection.messages`_ +# Same as Cursor.messages_ except that the messages in the list are +# connection oriented. +# +# The list is cleared automatically by all standard connection +# methods calls (prior to executing the call) to avoid excessive +# memory usage and can also be cleared by executing ``del +# connection.messages[:]``. +# +# *Warning Message:* "DB-API extension connection.messages used" +# +# +# .. _.next: +# .. _.next(): +# +# Cursor\ `.next`_\ () +# Return the next row from the currently executing SQL statement +# using the same semantics as `.fetchone()`_. A ``StopIteration`` +# exception is raised when the result set is exhausted for Python +# versions 2.2 and later. Previous versions don't have the +# ``StopIteration`` exception and so the method should raise an +# ``IndexError`` instead. +# +# *Warning Message:* "DB-API extension cursor.next() used" +# +# +# .. _.__iter__: +# .. _.__iter__(): +# +# Cursor\ `.__iter__`_\ () +# Return self to make cursors compatible to the iteration protocol +# [8]_. +# +# *Warning Message:* "DB-API extension cursor.__iter__() used" +# +# +# .. _.lastrowid: +# +# Cursor\ `.lastrowid`_ +# This read-only attribute provides the rowid of the last modified +# row (most databases return a rowid only when a single ``INSERT`` +# operation is performed). If the operation does not set a rowid or +# if the database does not support rowids, this attribute should be +# set to ``None``. +# +# The semantics of ``.lastrowid`` are undefined in case the last +# executed statement modified more than one row, e.g. when using +# ``INSERT`` with ``.executemany()``. +# +# *Warning Message:* "DB-API extension cursor.lastrowid used" +# +# +# .. _Connection.autocommit: +# .. _.autocommit: +# +# Connection\ `.autocommit`_ +# Attribute to query and set the autocommit mode of the connection. +# +# Return ``True`` if the connection is operating in autocommit +# (non-transactional) mode. Return ``False`` if the connection is +# operating in manual commit (transactional) mode. +# +# Setting the attribute to ``True`` or ``False`` adjusts the +# connection's mode accordingly. +# +# Changing the setting from ``True`` to ``False`` (disabling +# autocommit) will have the database leave autocommit mode and start +# a new transaction. Changing from ``False`` to ``True`` (enabling +# autocommit) has database dependent semantics with respect to how +# pending transactions are handled. [12]_ +# +# *Deprecation notice*: Even though several database modules implement +# both the read and write nature of this attribute, setting the +# autocommit mode by writing to the attribute is deprecated, since +# this may result in I/O and related exceptions, making it difficult +# to implement in an async context. [13]_ +# +# *Warning Message:* "DB-API extension connection.autocommit used" +# +# +# Optional Error Handling Extensions +# ================================== +# +# The core DB API specification only introduces a set of exceptions +# which can be raised to report errors to the user. In some cases, +# exceptions may be too disruptive for the flow of a program or even +# render execution impossible. +# +# For these cases and in order to simplify error handling when dealing +# with databases, database module authors may choose to implement user +# definable error handlers. This section describes a standard way of +# defining these error handlers. +# +# .. _Connection.errorhandler: +# .. _Cursor.errorhandler: +# +# `Connection.errorhandler`_, `Cursor.errorhandler`_ +# Read/write attribute which references an error handler to call in +# case an error condition is met. +# +# The handler must be a Python callable taking the following arguments: +# +# .. parsed-literal:: +# +# errorhandler(*connection*, *cursor*, *errorclass*, *errorvalue*) +# +# where connection is a reference to the connection on which the +# cursor operates, cursor a reference to the cursor (or ``None`` in +# case the error does not apply to a cursor), *errorclass* is an +# error class which to instantiate using *errorvalue* as +# construction argument. +# +# The standard error handler should add the error information to the +# appropriate ``.messages`` attribute (`Connection.messages`_ or +# `Cursor.messages`_) and raise the exception defined by the given +# *errorclass* and *errorvalue* parameters. +# +# If no ``.errorhandler`` is set (the attribute is ``None``), the +# standard error handling scheme as outlined above, should be +# applied. +# +# *Warning Message:* "DB-API extension .errorhandler used" +# +# Cursors should inherit the ``.errorhandler`` setting from their +# connection objects at cursor creation time. +# +# +# Optional Two-Phase Commit Extensions +# ==================================== +# +# Many databases have support for two-phase commit (TPC) which allows +# managing transactions across multiple database connections and other +# resources. +# +# If a database backend provides support for two-phase commit and the +# database module author wishes to expose this support, the following +# API should be implemented. NotSupportedError_ should be raised, if the +# database backend support for two-phase commit can only be checked at +# run-time. +# +# TPC Transaction IDs +# ------------------- +# +# As many databases follow the XA specification, transaction IDs are +# formed from three components: +# +# * a format ID +# * a global transaction ID +# * a branch qualifier +# +# For a particular global transaction, the first two components should +# be the same for all resources. Each resource in the global +# transaction should be assigned a different branch qualifier. +# +# The various components must satisfy the following criteria: +# +# * format ID: a non-negative 32-bit integer. +# +# * global transaction ID and branch qualifier: byte strings no +# longer than 64 characters. +# +# Transaction IDs are created with the `.xid()`_ Connection method: +# +# +# .. _.xid: +# .. _.xid(): +# +# `.xid`_\ (*format_id*, *global_transaction_id*, *branch_qualifier*) +# Returns a transaction ID object suitable for passing to the +# `.tpc_*()`_ methods of this connection. +# +# If the database connection does not support TPC, a +# NotSupportedError_ is raised. +# +# The type of the object returned by `.xid()`_ is not defined, but +# it must provide sequence behaviour, allowing access to the three +# components. A conforming database module could choose to +# represent transaction IDs with tuples rather than a custom object. +# +# +# TPC Connection Methods +# ---------------------- +# +# .. _.tpc_*: +# .. _.tpc_*(): +# +# .. _.tpc_begin: +# .. _.tpc_begin(): +# +# `.tpc_begin`_\ (*xid*) +# Begins a TPC transaction with the given transaction ID *xid*. +# +# This method should be called outside of a transaction (*i.e.* +# nothing may have executed since the last `.commit()`_ or +# `.rollback()`_). +# +# Furthermore, it is an error to call `.commit()`_ or `.rollback()`_ +# within the TPC transaction. A ProgrammingError_ is raised, if the +# application calls `.commit()`_ or `.rollback()`_ during an active +# TPC transaction. +# +# If the database connection does not support TPC, a +# NotSupportedError_ is raised. +# +# +# .. _.tpc_prepare: +# .. _.tpc_prepare(): +# +# `.tpc_prepare`_\ () +# Performs the first phase of a transaction started with +# `.tpc_begin()`_. A ProgrammingError_ should be raised if this +# method outside of a TPC transaction. +# +# After calling `.tpc_prepare()`_, no statements can be executed +# until `.tpc_commit()`_ or `.tpc_rollback()`_ have been called. +# +# +# .. _.tpc_commit: +# .. _.tpc_commit(): +# +# `.tpc_commit`_\ ([ *xid* ]) +# When called with no arguments, `.tpc_commit()`_ commits a TPC +# transaction previously prepared with `.tpc_prepare()`_. +# +# If `.tpc_commit()`_ is called prior to `.tpc_prepare()`_, a single +# phase commit is performed. A transaction manager may choose to do +# this if only a single resource is participating in the global +# transaction. +# +# When called with a transaction ID *xid*, the database commits the +# given transaction. If an invalid transaction ID is provided, a +# ProgrammingError_ will be raised. This form should be called +# outside of a transaction, and is intended for use in recovery. +# +# On return, the TPC transaction is ended. +# +# +# .. _.tpc_rollback: +# .. _.tpc_rollback(): +# +# `.tpc_rollback`_\ ([ *xid* ]) +# When called with no arguments, `.tpc_rollback()`_ rolls back a TPC +# transaction. It may be called before or after `.tpc_prepare()`_. +# +# When called with a transaction ID *xid*, it rolls back the given +# transaction. If an invalid transaction ID is provided, a +# ProgrammingError_ is raised. This form should be called outside +# of a transaction, and is intended for use in recovery. +# +# On return, the TPC transaction is ended. +# +# .. _.tpc_recover: +# .. _.tpc_recover(): +# +# `.tpc_recover`_\ () +# Returns a list of pending transaction IDs suitable for use with +# ``.tpc_commit(xid)`` or ``.tpc_rollback(xid)``. +# +# If the database does not support transaction recovery, it may +# return an empty list or raise NotSupportedError_. +# +# +# +# Frequently Asked Questions +# ========================== +# +# The database SIG often sees reoccurring questions about the DB API +# specification. This section covers some of the issues people sometimes +# have with the specification. +# +# **Question:** +# +# How can I construct a dictionary out of the tuples returned by +# `.fetch*()`_: +# +# **Answer:** +# +# There are several existing tools available which provide helpers for +# this task. Most of them use the approach of using the column names +# defined in the cursor attribute `.description`_ as basis for the keys +# in the row dictionary. +# +# Note that the reason for not extending the DB API specification to +# also support dictionary return values for the `.fetch*()`_ methods is +# that this approach has several drawbacks: +# +# * Some databases don't support case-sensitive column names or +# auto-convert them to all lowercase or all uppercase characters. +# +# * Columns in the result set which are generated by the query (e.g. +# using SQL functions) don't map to table column names and databases +# usually generate names for these columns in a very database specific +# way. +# +# As a result, accessing the columns through dictionary keys varies +# between databases and makes writing portable code impossible. +# +# +# +# Major Changes from Version 1.0 to Version 2.0 +# ============================================= +# +# The Python Database API 2.0 introduces a few major changes compared to +# the 1.0 version. Because some of these changes will cause existing DB +# API 1.0 based scripts to break, the major version number was adjusted +# to reflect this change. +# +# These are the most important changes from 1.0 to 2.0: +# +# * The need for a separate dbi module was dropped and the functionality +# merged into the module interface itself. +# +# * New constructors and `Type Objects`_ were added for date/time +# values, the ``RAW`` Type Object was renamed to ``BINARY``. The +# resulting set should cover all basic data types commonly found in +# modern SQL databases. +# +# * New constants (apilevel_, threadsafety_, paramstyle_) and methods +# (`.executemany()`_, `.nextset()`_) were added to provide better +# database bindings. +# +# * The semantics of `.callproc()`_ needed to call stored procedures are +# now clearly defined. +# +# * The definition of the `.execute()`_ return value changed. +# Previously, the return value was based on the SQL statement type +# (which was hard to implement right) — it is undefined now; use the +# more flexible `.rowcount`_ attribute instead. Modules are free to +# return the old style return values, but these are no longer mandated +# by the specification and should be considered database interface +# dependent. +# +# * Class based exceptions_ were incorporated into the specification. +# Module implementors are free to extend the exception layout defined +# in this specification by subclassing the defined exception classes. +# +# +# Post-publishing additions to the DB API 2.0 specification: +# +# * Additional optional DB API extensions to the set of core +# functionality were specified. +# +# +# Open Issues +# =========== +# +# Although the version 2.0 specification clarifies a lot of questions +# that were left open in the 1.0 version, there are still some remaining +# issues which should be addressed in future versions: +# +# * Define a useful return value for `.nextset()`_ for the case where a +# new result set is available. +# +# * Integrate the `decimal module +# `__ ``Decimal`` object +# for use as loss-less monetary and decimal interchange format. +# +# +# +# Footnotes +# ========= +# +# .. [1] As a guideline the connection constructor parameters should be +# implemented as keyword parameters for more intuitive use and +# follow this order of parameters: +# +# ============= ==================================== +# Parameter Meaning +# ============= ==================================== +# ``dsn`` Data source name as string +# ``user`` User name as string (optional) +# ``password`` Password as string (optional) +# ``host`` Hostname (optional) +# ``database`` Database name (optional) +# ============= ==================================== +# +# E.g. a connect could look like this:: +# +# connect(dsn='myhost:MYDB', user='guido', password='234$') +# +# Also see [13]_ regarding planned future additions to this list. +# +# .. [2] Module implementors should prefer ``numeric``, ``named`` or +# ``pyformat`` over the other formats because these offer more +# clarity and flexibility. +# +# +# .. [3] If the database does not support the functionality required by +# the method, the interface should throw an exception in case the +# method is used. +# +# The preferred approach is to not implement the method and thus have +# Python generate an ``AttributeError`` in case the method is +# requested. This allows the programmer to check for database +# capabilities using the standard ``hasattr()`` function. +# +# For some dynamically configured interfaces it may not be +# appropriate to require dynamically making the method +# available. These interfaces should then raise a +# ``NotSupportedError`` to indicate the non-ability to perform the +# roll back when the method is invoked. +# +# .. [4] A database interface may choose to support named cursors by +# allowing a string argument to the method. This feature is not part +# of the specification, since it complicates semantics of the +# `.fetch*()`_ methods. +# +# .. [5] The module will use the ``__getitem__`` method of the +# parameters object to map either positions (integers) or names +# (strings) to parameter values. This allows for both sequences and +# mappings to be used as input. +# +# The term *bound* refers to the process of binding an input value +# to a database execution buffer. In practical terms, this means +# that the input value is directly used as a value in the operation. +# The client should not be required to "escape" the value so that it +# can be used — the value should be equal to the actual database +# value. +# +# .. [6] Note that the interface may implement row fetching using arrays +# and other optimizations. It is not guaranteed that a call to this +# method will only move the associated cursor forward by one row. +# +# .. [7] The ``rowcount`` attribute may be coded in a way that updates +# its value dynamically. This can be useful for databases that +# return usable ``rowcount`` values only after the first call to a +# `.fetch*()`_ method. +# +# .. [8] Implementation Note: Python C extensions will have to implement +# the ``tp_iter`` slot on the cursor object instead of the +# ``.__iter__()`` method. +# +# .. [9] The term *number of affected rows* generally refers to the +# number of rows deleted, updated or inserted by the last statement +# run on the database cursor. Most databases will return the total +# number of rows that were found by the corresponding ``WHERE`` +# clause of the statement. Some databases use a different +# interpretation for ``UPDATE``\s and only return the number of rows +# that were changed by the ``UPDATE``, even though the ``WHERE`` +# clause of the statement may have found more matching rows. +# Database module authors should try to implement the more common +# interpretation of returning the total number of rows found by the +# ``WHERE`` clause, or clearly document a different interpretation +# of the ``.rowcount`` attribute. +# +# .. [10] In Python 2 and earlier versions of this PEP, ``StandardError`` +# was used as the base class for all DB-API exceptions. Since +# ``StandardError`` was removed in Python 3, database modules +# targeting Python 3 should use ``Exception`` as base class instead. +# The PEP was updated to use ``Exception`` throughout the text, to +# avoid confusion. The change should not affect existing modules or +# uses of those modules, since all DB-API error exception classes are +# still rooted at the ``Error`` or ``Warning`` classes. +# +# .. [11] In a future revision of the DB-API, the base class for +# ``Warning`` will likely change to the builtin ``Warning`` class. At +# the time of writing of the DB-API 2.0 in 1999, the warning framework +# in Python did not yet exist. +# +# .. [12] Many database modules implementing the autocommit attribute will +# automatically commit any pending transaction and then enter +# autocommit mode. It is generally recommended to explicitly +# `.commit()`_ or `.rollback()`_ transactions prior to changing the +# autocommit setting, since this is portable across database modules. +# +# .. [13] In a future revision of the DB-API, we are going to introduce a +# new method ``.setautocommit(value)``, which will allow setting the +# autocommit mode, and make ``.autocommit`` a read-only attribute. +# Additionally, we are considering to add a new standard keyword +# parameter ``autocommit`` to the Connection constructor. Modules +# authors are encouraged to add these changes in preparation for this +# change. +# +# Acknowledgements +# ================ +# +# Many thanks go to Andrew Kuchling who converted the Python Database +# API Specification 2.0 from the original HTML format into the PEP +# format in 2001. +# +# Many thanks to James Henstridge for leading the discussion which led to +# the standardization of the two-phase commit API extensions in 2008. +# +# Many thanks to Daniele Varrazzo for converting the specification from +# text PEP format to ReST PEP format, which allows linking to various +# parts in 2012. +# +# Copyright +# ========= +# +# This document has been placed in the Public Domain. diff --git a/tests/test_plain.py b/tests/test_plain.py new file mode 100644 index 00000000..3c155fc3 --- /dev/null +++ b/tests/test_plain.py @@ -0,0 +1,19 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import polypheny + +def test_connect_plain(): + con = polypheny.connect(('127.0.0.1', 20590), username='pa', password='', transport='plain') + con.close() diff --git a/tests/test_serialize.py b/tests/test_serialize.py new file mode 100644 index 00000000..56a77e44 --- /dev/null +++ b/tests/test_serialize.py @@ -0,0 +1,252 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import decimal +import math +import os + +import polypheny +import pytest + +from polypheny.interval import IntervalMonthMilliseconds + +from test_helper import con, cur + +def test_serialize_bool(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a BOOLEAN, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (True,)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == True + assert cur.fetchone() is None + +def test_serialize_number(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a BIGINT NOT NULL, PRIMARY KEY(i))') + ints = {1, 2**42, -1, (2**63)-1, -2**63} + for n, i in enumerate(ints): + cur.execute('INSERT INTO t(i, a) VALUES (?, ?)', (n, i,)) + cur.execute('SELECT a FROM t') + res = set(map(lambda x: x[0], cur.fetchall())) + assert ints == res + +def test_serialize_decimal_local(): + decimals = {2**64, -2**64, 0, 0.49, 0.5, 0.51, -0.49, -0.5, -0.51} + for d in map(decimal.Decimal, decimals): + assert polypheny.serialize.proto2py(polypheny.serialize.py2proto(d)) == d + +def test_serialize_floats(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a DOUBLE NOT NULL, PRIMARY KEY(i))') + floats = {0, 0.49, 0.5, 0.51, -0.49, -0.5, -0.51} + for i, f in enumerate(floats): + cur.execute('INSERT INTO t(i, a) VALUES (?, ?)', (i, f,)) + cur.execute('SELECT a FROM t WHERE i = ?', (i,)) + assert cur.fetchone()[0] == f + assert cur.fetchone() is None + +def test_serialize_decimal(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a DECIMAL(3, 2) NOT NULL, PRIMARY KEY(i))') + decimals = {'0', '0.49', '0.5', '0.51', '-0.49', '-0.5', '-0.51'} + for i, d in enumerate(map(decimal.Decimal, decimals)): + cur.execute('INSERT INTO t(i, a) VALUES (?, ?)', (i, d,)) + cur.execute('SELECT a FROM t WHERE i = ?', (i,)) + assert cur.fetchone()[0] == d + assert cur.fetchone() is None + +def test_serialize_string(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a VARCHAR(255) NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', ('Hello World',)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == 'Hello World' + assert cur.fetchone() is None + +def test_serialize_binary(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a FILE NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (b'Hello World',)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == b'Hello World' + assert cur.fetchone() is None + +def test_serialize_date(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a DATE NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (datetime.date(2024, 3, 8),)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == datetime.date(2024, 3, 8) + assert cur.fetchone() is None + +def test_serialize_float(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a DOUBLE NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (2.71,)) + cur.execute('SELECT a FROM t') + assert math.isclose(cur.fetchone()[0], 2.71) + assert cur.fetchone() is None + +def test_serialize_time(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a TIME NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (datetime.time(15, 19, 10),)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == datetime.time(15, 19, 10) + assert cur.fetchone() is None + +def test_serialize_timestamp(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a TIMESTAMP NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (datetime.datetime(2024, 3, 8, 15, 19, 10),)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == datetime.datetime(2024, 3, 8, 15, 19, 10).astimezone(datetime.timezone.utc) + assert cur.fetchone() is None + +def test_serialize_interval(cur): + cur.execute("SELECT INTERVAL '3' SECOND") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 3000) + cur.execute("SELECT INTERVAL '3:7' MINUTE TO SECOND") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 187000) + cur.execute("SELECT INTERVAL '3' MINUTE") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 180000) + cur.execute("SELECT INTERVAL '3:0:7' HOUR TO SECOND") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 10807000) + cur.execute("SELECT INTERVAL '3:7' HOUR TO MINUTE") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 11220000) + cur.execute("SELECT INTERVAL '3' HOUR") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 10800000) + cur.execute("SELECT INTERVAL '3 0:0:7' DAY TO SECOND") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 259207000) + cur.execute("SELECT INTERVAL '3 0:7' DAY TO MINUTE") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 259620000) + cur.execute("SELECT INTERVAL '3 7' DAY TO HOUR") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 284400000) + cur.execute("SELECT INTERVAL '3' DAY") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(0, 259200000) + cur.execute("SELECT INTERVAL '3' MONTH") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(3, 0) + cur.execute("SELECT INTERVAL '3-7' YEAR TO MONTH") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(43, 0) + cur.execute("SELECT INTERVAL '3' YEAR") + assert cur.fetchone()[0] == IntervalMonthMilliseconds(36, 0) + +def test_serialize_null(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a INTEGER, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (None,)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == None + assert cur.fetchone() is None + +def test_serialize_list(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a INTEGER ARRAY(1, 3), PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', ([1, 2, 3],)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == [1, 2, 3] + assert cur.fetchone() is None + +def test_serialize_decimal_large(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a DECIMAL(1) NOT NULL, PRIMARY KEY(i))') + with pytest.raises(polypheny.Error): + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (2**77,)) + +def test_serialize_decimal_large2(cur): + if os.getenv('DEFAULT_STORE', '') != 'monetdb': + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a DECIMAL NOT NULL, PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (2**77,)) + cur.execute('SELECT a FROM t') + + assert cur.fetchone()[0] == 151115727451828646838272 + assert cur.fetchone() is None + + +def test_deserialize_number(cur): + cur.execute('SELECT 1') + assert cur.fetchone()[0] == 1 + +def test_deserialize_float(cur): + cur.execute('SELECT CAST(0.05 AS FLOAT)') + assert cur.fetchone()[0] == decimal.Decimal('0.05') + +def test_deserialize_real(cur): + cur.execute('SELECT 0.05') + assert cur.fetchone()[0] == decimal.Decimal('0.05') + +def test_insert_double(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER)') + cur.execute('INSERT INTO t(id, a) VALUES (1, 2), (?, ?)', (2, 3)) + cur.execute('SELECT id, a FROM t ORDER BY id') + assert cur.fetchone() == [1, 2] + assert cur.fetchone() == [2, 3] + +def test_deserialize_string(cur): + cur.execute("SELECT 'Hello World'") + assert cur.fetchone()[0] == 'Hello World' + +def test_serialize_null_string(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a VARCHAR(255), PRIMARY KEY(i))') + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (None,)) + cur.execute('SELECT a FROM t') + + assert cur.fetchone()[0] == None + assert cur.fetchone() is None + +def test_deserialize_null(cur): + cur.execute("SELECT NULL") + + assert cur.fetchone()[0] == None + +def test_serialize_varbinary(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a BINARY VARYING NOT NULL, PRIMARY KEY(i))') + + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', (b'Hello World',)) + cur.execute('SELECT a FROM t') + assert cur.fetchone()[0] == b'Hello World' + assert cur.fetchone() is None + +def test_serialize_not_implemented(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a INTEGER, PRIMARY KEY(i))') + with pytest.raises(NotImplementedError): + cur.execute('INSERT INTO t(i, a) VALUES (0, ?)', ({'a': 'b'},)) + +def test_trailing_semicolon(cur): + cur.execute("SELECT 1;") + +def test_with_superfluous_param(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(id INTEGER PRIMARY KEY, a BOOLEAN)') + cur.execute('INSERT INTO t(id, a) VALUES (0, ?)', (True,)) + cur.execute('SELECT a FROM t', (True,)) + assert cur.fetchone()[0] == True + +def test_no_error_when_invalid_create(cur): + cur.execute('DROP TABLE IF EXISTS t') + with pytest.raises(polypheny.Error): + cur.execute('CREATE TABLE t(a BOOLEAN)') + +def test_dynamic_text_parameter(cur): + cur.execute('DROP TABLE IF EXISTS t') + cur.execute('CREATE TABLE t(i INTEGER NOT NULL, a TEXT NOT NULL, PRIMARY KEY(i))') + cur.execute('SELECT a FROM t WHERE a = ?', ('Hello World',)) + + assert cur.fetchone() is None diff --git a/tests/test_unix.py b/tests/test_unix.py new file mode 100644 index 00000000..188e958a --- /dev/null +++ b/tests/test_unix.py @@ -0,0 +1,37 @@ +# Copyright 2024 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +import pytest + +import polypheny + +def unix_connect(args): + if args: + con = polypheny.connect(os.path.expanduser("~/.polypheny/polypheny-prism.sock"), username='pa', password='', transport='unix') + else: + con = polypheny.connect() + con.close() + +def test_connect_unix(): + if sys.platform == 'win32': # TODO: Once this works the documentation needs to be updated + with pytest.raises(AttributeError): + unix_connect(False) + with pytest.raises(AttributeError): + unix_connect(True) + else: + unix_connect(True) + unix_connect(False) diff --git a/utils/generate_protobuf.sh b/utils/generate_protobuf.sh deleted file mode 100644 index 1a8a8642..00000000 --- a/utils/generate_protobuf.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# -# ADMIN UTILITY -# -# This script is only used during the development process and aids to recreated -# the protobug related python files which correspond to the RPC's server defined .proto files. - -# You DO NOT need to invoke this script manually. -# This is only needed id the proto files on avatica-core-${VERSION}-POLYPHENY have changed - -# Copyright 2019-2021 The Polypheny Project -AUTHOR="Marc Hennemann" - - - -set -e - -# Retrieve latest version -AVATICA_VER="v1.17.2" - - -# Cleanup old environment -rm -rf polypheny-avatica-tmp - -# Recreate new environemnt -mkdir polypheny-avatica-tmp -cd polypheny-avatica-tmp - -# Get latest version of polypheny-avatica -wget -O polypheny-avatica.tar.gz https://github.com/polypheny/Avatica/archive/refs/tags/${AVATICA_VER}.tar.gz -tar -x --strip-components=1 -f polypheny-avatica.tar.gz - - - -rm -f ../polypheny/avatica/protobuf/*_pb2.py -protoc --proto_path=polypheny-avatica-tmp/core/src/main/protobuf/ --python_out=polypheny/avatica/protobuf polypheny-avatica-tmp/core/src/main/protobuf/*.proto -protoc --proto_path=polypheny-avatica-tmp/ --python_out=polypheny/avatica/protobuf polypheny-avatica-tmp/*.proto -sed -i 's/import common_pb2/from . import common_pb2/' ../polypheny/avatica/protobuf/*_pb2.py - -rm -rf polypheny-avatica-tmp