diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index eab067c3..74868c18 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @g-despot @Josipmrden @BorisTasevski @katarinasupe @brunos252 +* @g-despot @Josipmrden @BorisTasevski @katarinasupe @brunos252 @niko4299 diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 3f014924..7e8a4959 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -1,7 +1,7 @@ name: Build and Test env: - MG_VERSION: "2.1.1" + MG_VERSION: "2.3.0" POETRY_VERSION: "1.1.4" on: @@ -23,25 +23,39 @@ jobs: steps: - name: Checkout Repository uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.py-version }} + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} + - name: Set up pip and install packages + run: | + python -m pip install -U pip + sudo -H pip install networkx numpy scipy - name: Setup poetry uses: abatilo/actions-poetry@v2.0.0 with: poetry-version: ${{ env.POETRY_VERSION }} - - name: Run memgraph instance - run: | - docker run -d -p 7687:7687 memgraph/memgraph --telemetry-enabled=False - name: Install Memgraph run: | - curl -O https://download.memgraph.com/memgraph/v2.2.1/ubuntu-20.04/memgraph_2.2.1-1_amd64.deb - sudo dpkg -i memgraph_2.2.1-1_amd64.deb + mkdir /home/runner/memgraph + curl -L https://download.memgraph.com/memgraph/v${{env.MG_VERSION}}/ubuntu-20.04/memgraph_${{env.MG_VERSION}}-1_amd64.deb --output /home/runner/memgraph/memgraph-community.deb + sudo dpkg -i /home/runner/memgraph/memgraph-community.deb + sudo systemctl stop memgraph + sudo runuser -l memgraph -c '/usr/lib/memgraph/memgraph --bolt-port 7687 --bolt-session-inactivity-timeout=300 --data-directory="/var/lib/memgraph/data" --storage-properties-on-edges=true --storage-snapshot-interval-sec=0 --storage-wal-enabled=false --storage-recover-on-startup=false --storage-snapshot-on-exit=false --telemetry-enabled=false --log-level=TRACE --also-log-to-stderr=true --log-file=/var/log/memgraph/memgraph-ubuntu-${{ matrix.python-version }}.log' & + sleep 1 # Wait for Memgraph a bit. + - name: Install Neo4j + run: | + docker run -p 7474:7474 -p 7688:7687 -d -v $HOME/neo4j/data:/data -v $HOME/neo4j/logs:/logs -v $HOME/neo4j/import:/var/lib/neo4j/import -v $HOME/neo4j/plugins:/plugins --env NEO4J_AUTH=neo4j/test neo4j:4.4.7 - name: Test project run: | poetry install - poetry run pytest -vvv -m "not slow" + poetry run pytest -vvv -m "not slow and not ubuntu and not docker" + - name: Use the Upload Artifact GitHub Action + uses: actions/upload-artifact@v3 + if: always() + with: + name: assets-for-download + path: /var/log/memgraph build_and_test_windows: if: github.event.pull_request.draft == false @@ -53,21 +67,44 @@ jobs: steps: - name: Checkout Repository uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.py-version }} + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} + - name: Set up pip and install packages + run: | + python -m pip install -U pip + python -m pip install networkx numpy scipy - uses: Vampire/setup-wsl@v1 with: distribution: Ubuntu-20.04 - name: Download, install and run Memgraph under WSL shell: wsl-bash {0} # root shell run: | - mkdir ~/memgraph - curl -L https://download.memgraph.com/memgraph/v${{env.MG_VERSION}}/ubuntu-20.04/memgraph_${{env.MG_VERSION}}-1_amd64.deb --output ~/memgraph/memgraph-community.deb - dpkg -i ~/memgraph/memgraph-community.deb - nohup /usr/lib/memgraph/memgraph --bolt-port 7687 --data-directory="~/memgraph/data" --storage-properties-on-edges=true --storage-snapshot-interval-sec=0 --storage-wal-enabled=false --storage-recover-on-startup=false --storage-snapshot-on-exit=false --telemetry-enabled=false --log-file='' & + sudo apt-get update + sudo apt-get -y install python3 python3-pip ipython3 + pip3 install networkx numpy scipy + mkdir /memgraph + curl -L https://download.memgraph.com/memgraph/v${{env.MG_VERSION}}/ubuntu-20.04/memgraph_${{env.MG_VERSION}}-1_amd64.deb --output /memgraph/memgraph-community.deb + dpkg -i /memgraph/memgraph-community.deb + mkdir /mnt/c/memgraph + runuser -l memgraph -c '/usr/lib/memgraph/memgraph --bolt-port 7687 --bolt-session-inactivity-timeout=300 --data-directory="/mnt/c/memgraph/data" --storage-properties-on-edges=true --storage-snapshot-interval-sec=0 --storage-wal-enabled=false --storage-recover-on-startup=false --storage-snapshot-on-exit=false --telemetry-enabled=false --log-level=TRACE --also-log-to-stderr=true --log-file=/mnt/c/memgraph/memgraph-windows-${{ matrix.python-version }}.log' & sleep 1 # Wait for Memgraph a bit. + - name: Download, install and run Neo4j under WSL + shell: wsl-bash {0} # root shell + run: | + curl -fsSL https://debian.neo4j.com/neotechnology.gpg.key | apt-key add - + add-apt-repository "deb https://debian.neo4j.com stable 4.2" + sudo apt-get -y install neo4j=1:4.2.1 + neo4j-admin set-initial-password test + mkdir -p /mnt/c/neo4j/${{ matrix.python-version }} + sed -i 's/#dbms.connector.bolt.listen_address=:7687/dbms.connector.bolt.listen_address=:7688/g' /etc/neo4j/neo4j.conf + sed -i 's/#dbms.connector.bolt.advertised_address=:7687/dbms.connector.bolt.advertised_address=:7688/g' /etc/neo4j/neo4j.conf + sed -i 's/#dbms.default_listen_address=0.0.0.0/dbms.default_listen_address=0.0.0.0/g' /etc/neo4j/neo4j.conf + sed -i 's/dbms.directories.logs=\/var\/log\/neo4j/dbms.directories.logs=\/mnt\/c\/neo4j\/${{ matrix.python-version }}/g' /etc/neo4j/neo4j.conf + cp /etc/neo4j/neo4j.conf /mnt/c/neo4j/${{ matrix.python-version }}/neo4j.conf + sudo service neo4j start + sleep 10 # Wait for Neo4j a bit. - name: Setup poetry uses: abatilo/actions-poetry@v2.0.0 with: @@ -76,3 +113,15 @@ jobs: run: | poetry install poetry run pytest -vvv -m "not slow and not ubuntu and not docker" + - name: Save Memgraph Logs + uses: actions/upload-artifact@v3 + if: always() + with: + name: memgraph-log + path: C:\memgraph + - name: Save Neo4j Logs + uses: actions/upload-artifact@v3 + if: always() + with: + name: neo4j-log + path: C:\neo4j diff --git a/README.md b/README.md index 59d12894..bfee4b62 100644 --- a/README.md +++ b/README.md @@ -91,10 +91,11 @@ When building a Cypher query, you can use a set of methods that are wrappers aro ```python from gqlalchemy import create, match +from gqlalchemy.query_builder import Operator query_create = create() .node(labels="Person", name="Leslie") - .to(edge_label="FRIENDS_WITH") + .to(relationship_type="FRIENDS_WITH") .node(labels="Person", name="Ron") .execute() @@ -102,8 +103,8 @@ query_match = match() .node(labels="Person", variable="p1") .to() .node(labels="Person", variable="p2") - .where(item="p1.name", operator="=", literal="Leslie") - .return_({"p1":"p1"}) + .where(item="p1.name", operator=Operator.EQUAL, literal="Leslie") + .return_(results=["p1", ("p2", "second")]) .execute() ``` diff --git a/docs/reference/gqlalchemy/graph_algorithms/integrated_algorithms.md b/docs/reference/gqlalchemy/graph_algorithms/integrated_algorithms.md new file mode 100644 index 00000000..a71ef17b --- /dev/null +++ b/docs/reference/gqlalchemy/graph_algorithms/integrated_algorithms.md @@ -0,0 +1,166 @@ +--- +sidebar_label: integrated_algorithms +title: gqlalchemy.graph_algorithms.integrated_algorithms +--- + +## IntegratedAlgorithm Objects + +```python +class IntegratedAlgorithm(ABC) +``` + +Abstract class modeling Memgraph's built-in graph algorithms. + +These algorithms are integrated into Memgraph's codebase and are called +within a relationship part of a query. For instance: +MATCH p = (:City {name: "Paris"}) + -[:Road * bfs (r, n | r.length <= 200 AND n.name != "Metz")]-> + (:City {name: "Berlin"}) + +#### \_\_str\_\_ + +```python +@abstractmethod +def __str__() -> str +``` + +Instance of IntegratedAlgorithm subclass is used as a string + +#### to\_cypher\_lambda + +```python +@staticmethod +def to_cypher_lambda(expression: str) -> str +``` + +Method for creating a general lambda expression. + +Variables `r` and `n` stand for relationship and node. The expression is +used e.g. for a filter lambda, to use only relationships of length less +than 200: +expression="r.length < 200" +with the filter lambda being: +(r, n | r.length < 200) + +**Arguments**: + +- `expression` - Lambda conditions or statements. + +## BreadthFirstSearch Objects + +```python +class BreadthFirstSearch(IntegratedAlgorithm) +``` + +Build a BFS call for a Cypher query. + +The Breadth-first search can be called in Memgraph with Cypher queries such +as: `MATCH (a {id: 723})-[*BFS ..10 (r, n | r.x > 12 AND n.y < 3)]-() RETURN *;` +It is called inside the relationship clause, `*BFS` naming the algorithm, +`..10` specifying depth bounds, and `(r, n | <expression>)` is a filter +lambda. + +#### \_\_init\_\_ + +```python +def __init__(lower_bound: int = None, upper_bound: int = None, condition: str = None) -> None +``` + +**Arguments**: + +- `lower_bound` - Lower bound for path depth. Defaults to `None`. +- `upper_bound` - Upper bound for path depth. Defaults to `None`. +- `condition` - Filter through nodes and relationships that pass this + condition. Defaults to `None`. + +#### \_\_str\_\_ + +```python +def __str__() -> str +``` + +Get a Cypher query string for this algorithm. + +#### to\_cypher\_bounds + +```python +def to_cypher_bounds() -> str +``` + +If bounds are specified, returns them in grammar-defined form. + +## DepthFirstSearch Objects + +```python +class DepthFirstSearch(IntegratedAlgorithm) +``` + +Build a DFS call for a Cypher query. +The Depth-First Search can be called in Memgraph with Cypher queries +such as: +MATCH (a {id: 723})-[* ..10 (r, n | r.x > 12 AND n.y < 3)]-() RETURN *; +It is called inside the relationship clause, "*" naming the algorithm +("*" without "DFS" because it is defined like such in openCypher), +"..10" specifying depth bounds, and "(r, n | <expression>)" is a filter +lambda. + +#### \_\_init\_\_ + +```python +def __init__(lower_bound: int = None, upper_bound: int = None, condition: str = None) -> None +``` + +**Arguments**: + +- `lower_bound` - Lower bound for path depth. Defaults to None. +- `upper_bound` - Upper bound for path depth. Defaults to None. +- `condition` - Filter through nodes and relationships that pass this + condition. Defaults to None. + +#### \_\_str\_\_ + +```python +def __str__() -> str +``` + +get Cypher query string for this algorithm. + +#### to\_cypher\_bounds + +```python +def to_cypher_bounds() -> str +``` + +If bounds are specified, returns them in grammar-defined form. + +## WeightedShortestPath Objects + +```python +class WeightedShortestPath(IntegratedAlgorithm) +``` + +Build a Djikstra shortest path call for a Cypher query +The weighted shortest path algorithm can be called in Memgraph with Cypher +queries such as: +" MATCH (a {id: 723})-[r *WSHORTEST 10 (r, n | r.weight) weight_sum + (r, n | r.x > 12 AND r.y < 3)]-(b {id: 882}) RETURN * " +It is called inside the relationship clause, "*WSHORTEST" naming the +algorithm, "10" specifying search depth bounds, and "(r, n | <expression>)" +is a filter lambda, used to filter which relationships and nodes to use. + +#### \_\_init\_\_ + +```python +def __init__(upper_bound: int = None, condition: str = None, total_weight_var: str = DEFAULT_TOTAL_WEIGHT, weight_property: str = DEFAULT_WEIGHT_PROPERTY) -> None +``` + +**Arguments**: + +- `upper_bound` - Upper bound for path depth. Defaults to None. +- `condition` - Filter through nodes and relationships that pass this + condition. Defaults to None. +- `total_weight_var` - Variable defined as the sum of all weights on + path being returned. Defaults to "total_weight". +- `weight_property` - property being used as weight. Defaults to + "r.weight". + diff --git a/docs/reference/gqlalchemy/graph_algorithms/query_builder.md b/docs/reference/gqlalchemy/graph_algorithms/query_builder.md new file mode 100644 index 00000000..4a668b6e --- /dev/null +++ b/docs/reference/gqlalchemy/graph_algorithms/query_builder.md @@ -0,0 +1,25 @@ +--- +sidebar_label: query_builder +title: gqlalchemy.graph_algorithms.query_builder +--- + +## MemgraphQueryBuilder Objects + +```python +class MemgraphQueryBuilder(QueryBuilder) +``` + +This query builder extends the usual Cypher query builder capabilities with Memgraph's query modules. +User gets with this module autocomplete features of graph algorithms. +Documentation on the methods can be found on Memgraph's web page. + +## MageQueryBuilder Objects + +```python +class MageQueryBuilder(MemgraphQueryBuilder) +``` + +This query builder extends the Memgraph query builder with Memgraph MAGE graph algorithm Cypher options. +User gets with this module autocomplete features of graph algorithms written in MAGE library. +Documentation on the methods can be found on Memgraph's web page. + diff --git a/docs/reference/gqlalchemy/graph_algorithms/query_modules.md b/docs/reference/gqlalchemy/graph_algorithms/query_modules.md new file mode 100644 index 00000000..415f2f2b --- /dev/null +++ b/docs/reference/gqlalchemy/graph_algorithms/query_modules.md @@ -0,0 +1,78 @@ +--- +sidebar_label: query_modules +title: gqlalchemy.graph_algorithms.query_modules +--- + +## QueryModule Objects + +```python +class QueryModule() +``` + +Class representing a single MAGE query module. + +#### set\_argument\_values + +```python +def set_argument_values(**kwargs) -> None +``` + +Set values for QueryModule arguments so the module can be called. + +Kwargs: +Named arguments in self.arguments. + +**Raises**: + +- `KeyError` - Passed an argument not in the self.arguments list. + +#### get\_arguments\_for\_call + +```python +def get_arguments_for_call() -> str +``` + +return inputs in form "value1, value2, ..." for QueryBuilder call() +method. + +**Raises**: + +- `KeyError` - Cannot get all values of arguments because one or more is + not set. + +#### parse\_query\_module\_signature + +```python +def parse_query_module_signature(signature: str) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]] +``` + +Query Modules signatures received from Memgraph are parsed into a +list of dictionaries. + +One list is for arguments and another for returns. +For instance, if a query module signature is: +dummy_module.dummy(lst :: LIST OF STRING, num = 3 :: NUMBER) :: (ret :: STRING) +the method should return a list of arguments: +[{"name": "lst", "type": "LIST OF STRING"}, {"name": "num", "type": "NUMBER", "default": 3}] +and a list of returns: +[{"name": "ret", "type": "STRING"}] + +Dictionary consists of fields: "name" - argument name, "type" - data +type of argument and "default" where default argument value is given + +**Arguments**: + +- `signature` - module signature as returned by Cypher CALL operation + +#### parse\_field + +```python +def parse_field(vars_field: str, name_type_delimiter: str = NAME_TYPE_DELIMITIER, default_value_delimiter: str = EQUALS_DELIMITER) -> List[Dict[str, str]] +``` + +Parse a field of arguments or returns from Query Module signature. + +**Arguments**: + +- `vars_field` - signature field inside parentheses + diff --git a/docs/reference/gqlalchemy/instance_runner.md b/docs/reference/gqlalchemy/instance_runner.md index 43ae2353..54d4b4cc 100644 --- a/docs/reference/gqlalchemy/instance_runner.md +++ b/docs/reference/gqlalchemy/instance_runner.md @@ -6,7 +6,7 @@ title: gqlalchemy.instance_runner #### wait\_for\_port ```python -def wait_for_port(host: str = "127.0.0.1", port: int = 7687, delay: float = 0.01, timeout: float = 5.0) -> None +def wait_for_port(host: str = LOOPBACK_ADDRESS, port: int = MEMGRAPH_DEFAULT_PORT, delay: float = 0.01, timeout: float = 5.0, backoff: int = 2) -> None ``` Wait for a TCP port to become available. @@ -17,6 +17,7 @@ Wait for a TCP port to become available. - `port` - A string representing the port that is being checked. - `delay` - A float that defines how long to wait between retries. - `timeout` - A float that defines how long to wait for the port. +- `backoff` - An integer used for multiplying the delay. **Raises**: @@ -27,7 +28,7 @@ Wait for a TCP port to become available. #### wait\_for\_docker\_container ```python -def wait_for_docker_container(container: "docker.Container", delay: float = 0.01, timeout: float = 5.0) -> None +def wait_for_docker_container(container: "docker.Container", delay: float = 0.01, timeout: float = 5.0, backoff: int = 2) -> None ``` Wait for a Docker container to enter the status `running`. @@ -37,6 +38,7 @@ Wait for a Docker container to enter the status `running`. - `container` - The Docker container to wait for. - `delay` - A float that defines how long to wait between retries. - `timeout` - A float that defines how long to wait for the status. +- `backoff` - An integer used for multiplying the delay. **Raises**: @@ -44,29 +46,33 @@ Wait for a Docker container to enter the status `running`. - `TimeoutError` - Raises an error when the container isn't running after the timeout period has passed. -## MemgraphInstanceBinary Objects +## MemgraphInstance Objects ```python -class MemgraphInstanceBinary(MemgraphInstance) +class MemgraphInstance(ABC) ``` -A class for managing Memgraph instances started from binary files on Unix -systems. +#### start\_and\_connect + +```python +def start_and_connect(restart: bool = False) -> "Memgraph" +``` + +Start the Memgraph instance and return the +connection object. **Attributes**: -- `binary_path` - A string representing the path to a Memgraph binary - file. -- `user` - A string representing the user that should start the Memgraph - process. +- `restart` - A bool indicating if the instance should be + restarted if it's already running. #### start ```python -def start(restart: bool = False) -> "Memgraph" +def start(restart: bool = False) -> None ``` -Start the Memgraph instance and return the connection object. +Start the Memgraph instance. **Attributes**: @@ -76,11 +82,27 @@ Start the Memgraph instance and return the connection object. #### stop ```python -def stop() -> int +def stop() -> Any ``` Stop the Memgraph instance. +## MemgraphInstanceBinary Objects + +```python +class MemgraphInstanceBinary(MemgraphInstance) +``` + +A class for managing Memgraph instances started from binary files on Unix +systems. + +**Attributes**: + +- `binary_path` - A string representing the path to a Memgraph binary + file. +- `user` - A string representing the user that should start the Memgraph + process. + #### is\_running ```python @@ -103,27 +125,6 @@ A class for managing Memgraph instances started in Docker containers. `DockerImage.MEMGRAPH` and `DockerImage.MAGE`. - `docker_image_tag` - A string representing the tag of the Docker image. -#### start - -```python -def start(restart: bool = False) -> "Memgraph" -``` - -Start the Memgraph instance and return the connection object. - -**Attributes**: - -- `restart` - A bool indicating if the instance should be - restarted if it's already running. - -#### stop - -```python -def stop() -> Dict -``` - -Stop the Memgraph instance. - #### is\_running ```python diff --git a/docs/reference/gqlalchemy/loaders.md b/docs/reference/gqlalchemy/loaders.md new file mode 100644 index 00000000..afaa6b5a --- /dev/null +++ b/docs/reference/gqlalchemy/loaders.md @@ -0,0 +1,712 @@ +--- +sidebar_label: loaders +title: gqlalchemy.loaders +--- + +## ForeignKeyMapping Objects + +```python +@dataclass(frozen=True) +class ForeignKeyMapping() +``` + +Class that contains the full description of a single foreign key in a table. + +**Attributes**: + +- `column_name` - Column name that holds the foreign key. +- `reference_table` - Name of a table from which the foreign key is taken. +- `reference_key` - Column name in the referenced table from which the foreign key is taken. + +## OneToManyMapping Objects + +```python +@dataclass(frozen=True) +class OneToManyMapping() +``` + +Class that holds the full description of a single one to many mapping in a table. + +**Attributes**: + +- `foreign_key` - Foreign key used for mapping. +- `label` - Label which will be applied to the relationship created from this object. +- `from_entity` - Direction of the relationship created from the mapping object. +- `parameters` - Parameters that will be added to the relationship created from this object (Optional). + +## ManyToManyMapping Objects + +```python +@dataclass(frozen=True) +class ManyToManyMapping() +``` + +Class that holds the full description of a single many to many mapping in a table. +Many to many mapping is intended to be used in case of associative tables. + +**Attributes**: + +- `foreign_key_from` - Describes the source of the relationship. +- `foreign_key_to` - Describes the destination of the relationship. +- `label` - Label to be applied to the newly created relationship. +- `parameters` - Parameters that will be added to the relationship created from this object (Optional). + +## TableMapping Objects + +```python +@dataclass +class TableMapping() +``` + +Class that holds the full description of all of the mappings for a single table. + +**Attributes**: + +- `table_name` - Name of the table. +- `mapping` - All of the mappings in the table (Optional). +- `indices` - List of the indices to be created for this table (Optional). + +## NameMappings Objects + +```python +@dataclass(frozen=True) +class NameMappings() +``` + +Class that contains new label name and all of the column name mappings for a single table. + +**Attributes**: + +- `label` - New label (Optional). +- `column_names_mapping` - Dictionary containing key-value pairs in form ("column name", "property name") (Optional). + +## NameMapper Objects + +```python +class NameMapper() +``` + +Class that holds all name mappings for all of the collections. + +#### get\_label + +```python +def get_label(collection_name: str) -> str +``` + +Returns label for given collection. + +**Arguments**: + +- `collection_name` - Original collection name. + +#### get\_property\_name + +```python +def get_property_name(collection_name: str, column_name: str) -> str +``` + +Returns property name for column from collection. + +**Arguments**: + +- `collection_name` - Original collection name. +- `column_name` - Original column name. + +## FileSystemHandler Objects + +```python +class FileSystemHandler(ABC) +``` + +Abstract class for defining FileSystemHandler. + +Inherit this class, define a custom data source and initialize the +connection. + +#### get\_path + +```python +@abstractmethod +def get_path(collection_name: str) -> str +``` + +Returns complete path in specific file system. Used to read the file system +for a specific file. + +## S3FileSystemHandler Objects + +```python +class S3FileSystemHandler(FileSystemHandler) +``` + +Handles connection to Amazon S3 service via PyArrow. + +#### \_\_init\_\_ + +```python +def __init__(bucket_name: str, **kwargs) +``` + +Initializes connection and data bucket. + +**Arguments**: + +- `bucket_name` - Name of the bucket on S3 from which to read the data + + Kwargs: +- `access_key` - S3 access key. +- `secret_key` - S3 secret key. +- `region` - S3 region. +- `session_token` - S3 session token (Optional). + + +**Raises**: + +- `KeyError` - kwargs doesn't contain necessary fields. + +#### get\_path + +```python +def get_path(collection_name: str) -> str +``` + +Get file path in file system. + +**Arguments**: + +- `collection_name` - Name of the file to read. + +## AzureBlobFileSystemHandler Objects + +```python +class AzureBlobFileSystemHandler(FileSystemHandler) +``` + +Handles connection to Azure Blob service via adlfs package. + +#### \_\_init\_\_ + +```python +def __init__(container_name: str, **kwargs) -> None +``` + +Initializes connection and data container. + +**Arguments**: + +- `container_name` - Name of the Blob container storing data. + + Kwargs: +- `account_name` - Account name from Azure Blob. +- `account_key` - Account key for Azure Blob (Optional - if using sas_token). +- `sas_token` - Shared access signature token for authentification (Optional). + + +**Raises**: + +- `KeyError` - kwargs doesn't contain necessary fields. + +#### get\_path + +```python +def get_path(collection_name: str) -> str +``` + +Get file path in file system. + +**Arguments**: + +- `collection_name` - Name of the file to read. + +## LocalFileSystemHandler Objects + +```python +class LocalFileSystemHandler(FileSystemHandler) +``` + +Handles a local filesystem. + +#### \_\_init\_\_ + +```python +def __init__(path: str) -> None +``` + +Initializes an fsspec local file system and sets path to data. + +**Arguments**: + +- `path` - path to the local storage location. + +#### get\_path + +```python +def get_path(collection_name: str) -> str +``` + +Get file path in the local file system. + +**Arguments**: + +- `collection_name` - Name of the file to read. + +## DataLoader Objects + +```python +class DataLoader(ABC) +``` + +Implements loading of a data type from file system service to TableToGraphImporter. + +#### \_\_init\_\_ + +```python +def __init__(file_extension: str, file_system_handler: FileSystemHandler) -> None +``` + +**Arguments**: + +- `file_extension` - File format to be read. +- `file_system_handler` - Object for handling of the file system service. + +#### load\_data + +```python +@abstractmethod +def load_data(collection_name: str, is_cross_table: bool = False) -> None +``` + +Override this method in the derived class. Intended to be used for reading data from data format. + +**Arguments**: + +- `collection_name` - Name of the file to read. +- `is_cross_table` - Indicate whether or not the collection contains associative table (default=False). + + +**Raises**: + +- `NotImplementedError` - The method is not implemented in the extended class. + +## PyArrowFileTypeEnum Objects + +```python +class PyArrowFileTypeEnum(Enum) +``` + +Enumerates file types supported by PyArrow + +## PyArrowDataLoader Objects + +```python +class PyArrowDataLoader(DataLoader) +``` + +Loads data using PyArrow. + +PyArrow currently supports "parquet", "ipc"/"arrow"/"feather", "csv", +and "orc", see pyarrow.dataset.dataset for up-to-date info. +ds.dataset in load_data accepts any fsspec subclass, making this DataLoader +compatible with fsspec-compatible filesystems. + +#### \_\_init\_\_ + +```python +def __init__(file_extension_enum: PyArrowFileTypeEnum, file_system_handler: FileSystemHandler) -> None +``` + +**Arguments**: + +- `file_extension_enum` - The file format to be read. +- `file_system_handler` - Object for handling of the file system service. + +#### load\_data + +```python +def load_data(collection_name: str, is_cross_table: bool = False, columns: Optional[List[str]] = None) -> None +``` + +Generator for loading data. + +**Arguments**: + +- `collection_name` - Name of the file to read. +- `is_cross_table` - Flag signifying whether it is a cross table. +- `columns` - Table columns to read. + +## TableToGraphImporter Objects + +```python +class TableToGraphImporter() +``` + +Implements translation of table data to graph data, and imports it to Memgraph. + +#### \_\_init\_\_ + +```python +def __init__(data_loader: DataLoader, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `data_loader` - Object for loading data. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). + +#### translate + +```python +def translate(drop_database_on_start: bool = True) -> None +``` + +Performs the translations. + +**Arguments**: + +- `drop_database_on_start` - Indicate whether or not the database should be dropped prior to the start of the translations. + +## PyArrowImporter Objects + +```python +class PyArrowImporter(TableToGraphImporter) +``` + +TableToGraphImporter wrapper for use with PyArrow for reading data. + +#### \_\_init\_\_ + +```python +def __init__(file_system_handler: str, file_extension_enum: PyArrowFileTypeEnum, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `file_system_handler` - File system to read from. +- `file_extension_enum` - File format to be read. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). + + +**Raises**: + +- `ValueError` - PyArrow doesn't support ORC on Windows. + +## PyArrowS3Importer Objects + +```python +class PyArrowS3Importer(PyArrowImporter) +``` + +PyArrowImporter wrapper for use with the Amazon S3 File System. + +#### \_\_init\_\_ + +```python +def __init__(bucket_name: str, file_extension_enum: PyArrowFileTypeEnum, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs, ,) -> None +``` + +**Arguments**: + +- `bucket_name` - Name of the bucket in S3 to read from. +- `file_extension_enum` - File format to be read. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for S3FileSystem. + +## PyArrowAzureBlobImporter Objects + +```python +class PyArrowAzureBlobImporter(PyArrowImporter) +``` + +PyArrowImporter wrapper for use with the Azure Blob File System. + +#### \_\_init\_\_ + +```python +def __init__(container_name: str, file_extension_enum: PyArrowFileTypeEnum, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs, ,) -> None +``` + +**Arguments**: + +- `container_name` - Name of the container in Azure Blob to read from. +- `file_extension_enum` - File format to be read. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for AzureBlobFileSystem. + +## PyArrowLocalFileSystemImporter Objects + +```python +class PyArrowLocalFileSystemImporter(PyArrowImporter) +``` + +PyArrowImporter wrapper for use with the Local File System. + +#### \_\_init\_\_ + +```python +def __init__(path: str, file_extension_enum: PyArrowFileTypeEnum, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `path` - Full path to the directory to read from. +- `file_extension_enum` - File format to be read. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). + +## ParquetS3FileSystemImporter Objects + +```python +class ParquetS3FileSystemImporter(PyArrowS3Importer) +``` + +PyArrowS3Importer wrapper for use with the S3 file system and the parquet file type. + +#### \_\_init\_\_ + +```python +def __init__(bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `bucket_name` - Name of the bucket in S3 to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for S3FileSystem. + +## CSVS3FileSystemImporter Objects + +```python +class CSVS3FileSystemImporter(PyArrowS3Importer) +``` + +PyArrowS3Importer wrapper for use with the S3 file system and the CSV file type. + +#### \_\_init\_\_ + +```python +def __init__(bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `bucket_name` - Name of the bucket in S3 to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for S3FileSystem. + +## ORCS3FileSystemImporter Objects + +```python +class ORCS3FileSystemImporter(PyArrowS3Importer) +``` + +PyArrowS3Importer wrapper for use with the S3 file system and the ORC file type. + +#### \_\_init\_\_ + +```python +def __init__(bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `bucket_name` - Name of the bucket in S3 to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for S3FileSystem. + +## FeatherS3FileSystemImporter Objects + +```python +class FeatherS3FileSystemImporter(PyArrowS3Importer) +``` + +PyArrowS3Importer wrapper for use with the S3 file system and the feather file type. + +#### \_\_init\_\_ + +```python +def __init__(bucket_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `bucket_name` - Name of the bucket in S3 to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for S3FileSystem. + +## ParquetAzureBlobFileSystemImporter Objects + +```python +class ParquetAzureBlobFileSystemImporter(PyArrowAzureBlobImporter) +``` + +PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the parquet file type. + +#### \_\_init\_\_ + +```python +def __init__(container_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `container_name` - Name of the container in Azure Blob storage to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for AzureBlobFileSystem. + +## CSVAzureBlobFileSystemImporter Objects + +```python +class CSVAzureBlobFileSystemImporter(PyArrowAzureBlobImporter) +``` + +PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the CSV file type. + +#### \_\_init\_\_ + +```python +def __init__(container_name: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `container_name` - Name of the container in Azure Blob storage to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for AzureBlobFileSystem. + +## ORCAzureBlobFileSystemImporter Objects + +```python +class ORCAzureBlobFileSystemImporter(PyArrowAzureBlobImporter) +``` + +PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the CSV file type. + +#### \_\_init\_\_ + +```python +def __init__(container_name, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `container_name` - Name of the container in Blob storage to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for AzureBlobFileSystem. + +## FeatherAzureBlobFileSystemImporter Objects + +```python +class FeatherAzureBlobFileSystemImporter(PyArrowAzureBlobImporter) +``` + +PyArrowAzureBlobImporter wrapper for use with the Azure Blob file system and the Feather file type. + +#### \_\_init\_\_ + +```python +def __init__(container_name, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None, **kwargs) -> None +``` + +**Arguments**: + +- `container_name` - Name of the container in Blob storage to read from. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for AzureBlobFileSystem. + +## ParquetLocalFileSystemImporter Objects + +```python +class ParquetLocalFileSystemImporter(PyArrowLocalFileSystemImporter) +``` + +PyArrowLocalFileSystemImporter wrapper for use with the local file system and the parquet file type. + +#### \_\_init\_\_ + +```python +def __init__(path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `path` - Full path to directory. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for LocalFileSystem. + +## CSVLocalFileSystemImporter Objects + +```python +class CSVLocalFileSystemImporter(PyArrowLocalFileSystemImporter) +``` + +PyArrowLocalFileSystemImporter wrapper for use with the local file system and the CSV file type. + +#### \_\_init\_\_ + +```python +def __init__(path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `path` - Full path to directory. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for LocalFileSystem. + +## ORCLocalFileSystemImporter Objects + +```python +class ORCLocalFileSystemImporter(PyArrowLocalFileSystemImporter) +``` + +PyArrowLocalFileSystemImporter wrapper for use with the local file system and the ORC file type. + +#### \_\_init\_\_ + +```python +def __init__(path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `path` - Full path to directory. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for LocalFileSystem. + +## FeatherLocalFileSystemImporter Objects + +```python +class FeatherLocalFileSystemImporter(PyArrowLocalFileSystemImporter) +``` + +PyArrowLocalFileSystemImporter wrapper for use with the local file system and the Feather/IPC/Arrow file type. + +#### \_\_init\_\_ + +```python +def __init__(path: str, data_configuration: Dict[str, Any], memgraph: Optional[Memgraph] = None) -> None +``` + +**Arguments**: + +- `path` - Full path to directory. +- `data_configuration` - Configuration for the translations. +- `memgraph` - Connection to Memgraph (Optional). +- `**kwargs` - Specified for LocalFileSystem. + diff --git a/docs/reference/gqlalchemy/models.md b/docs/reference/gqlalchemy/models.md index c3557204..94159d99 100644 --- a/docs/reference/gqlalchemy/models.md +++ b/docs/reference/gqlalchemy/models.md @@ -170,7 +170,7 @@ Returns True if the Node has any unique fields. #### save ```python -def save(db: "Memgraph") -> "Node" +def save(db: "Database") -> "Node" ``` Saves node to Memgraph. @@ -184,7 +184,7 @@ Null properties are ignored. #### load ```python -def load(db: "Memgraph") -> "Node" +def load(db: "Database") -> "Node" ``` Loads a node from Memgraph. @@ -221,7 +221,7 @@ class Relationship(UniqueGraphObject, metaclass=RelationshipMetaclass) #### save ```python -def save(db: "Memgraph") -> "Relationship" +def save(db: "Database") -> "Relationship" ``` Saves a relationship to Memgraph. @@ -234,7 +234,7 @@ relationship, use `load_relationship` first. #### load ```python -def load(db: "Memgraph") -> "Relationship" +def load(db: "Database") -> "Relationship" ``` Returns a relationship loaded from Memgraph. diff --git a/docs/reference/gqlalchemy/query_builder.md b/docs/reference/gqlalchemy/query_builder.md deleted file mode 100644 index f86e2832..00000000 --- a/docs/reference/gqlalchemy/query_builder.md +++ /dev/null @@ -1,613 +0,0 @@ ---- -sidebar_label: query_builder -title: gqlalchemy.query_builder ---- - -## WhereConditionPartialQuery Objects - -```python -class WhereConditionPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Constructs a where partial query. - -## NodePartialQuery Objects - -```python -class NodePartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Constructs a node partial query. - -## EdgePartialQuery Objects - -```python -class EdgePartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Constructs an edge partial query. - -## UnwindPartialQuery Objects - -```python -class UnwindPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Constructs an unwind partial query. - -#### dict\_to\_alias\_statement - -```python -def dict_to_alias_statement(alias_dict: Dict[str, str]) -> str -``` - -Creates a string expression of alias statements from a dictionary of -expression, variable name dictionary. - -## WithPartialQuery Objects - -```python -class WithPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a WITH statement Cypher partial query. - -## UnionPartialQuery Objects - -```python -class UnionPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a UNION statement Cypher partial query. - -## DeletePartialQuery Objects - -```python -class DeletePartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a DELETE statement Cypher partial query. - -## RemovePartialQuery Objects - -```python -class RemovePartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a REMOVE statement Cypher partial query. - -## YieldPartialQuery Objects - -```python -class YieldPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a YIELD statement Cypher partial query. - -## ReturnPartialQuery Objects - -```python -class ReturnPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a RETURN statement Cypher partial query. - -## OrderByPartialQuery Objects - -```python -class OrderByPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a ORDER BY statement Cypher partial query. - -## LimitPartialQuery Objects - -```python -class LimitPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a LIMIT statement Cypher partial query. - -## SkipPartialQuery Objects - -```python -class SkipPartialQuery(PartialQuery) -``` - -#### construct\_query - -```python -def construct_query() -> str -``` - -Creates a SKIP statement Cypher partial query. - -## DeclarativeBase Objects - -```python -class DeclarativeBase(ABC) -``` - -#### match - -```python -def match(optional: bool = False) -> "DeclarativeBase" -``` - -Obtain data from the database by matching it to a given pattern. - -**Arguments**: - -- `optional` - A bool indicating if missing parts of the pattern will be - filled with null values. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### merge - -```python -def merge() -> "DeclarativeBase" -``` - -Ensure that a pattern you are looking for exists in the database. -This means that if the pattern is not found, it will be created. In a -way, this clause is like a combination of MATCH and CREATE. - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### create - -```python -def create() -> "DeclarativeBase" -``` - -Create nodes and relationships in a graph. - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### call - -```python -def call(procedure: str, arguments: Optional[str] = None) -> "DeclarativeBase" -``` - -Call a query module procedure. - -**Arguments**: - -- `procedure` - A string representing the name of the procedure in the - format `query_module.procedure`. -- `arguments` - A string representing the arguments of the procedure in - text format. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### node - -```python -def node(labels: Union[str, List[str], None] = "", variable: Optional[str] = None, node: Optional["Node"] = None, **kwargs, ,) -> "DeclarativeBase" -``` - -Add a node pattern to the query. - -**Arguments**: - -- `labels` - A string or list of strings representing the labels of the - node. -- `variable` - A string representing the name of the variable for storing - results of the node pattern. -- `node` - A `Node` object to construct the pattern from. -- `**kwargs` - Arguments representing the properties of the node. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### to - -```python -def to(edge_label: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, **kwargs, ,) -> "DeclarativeBase" -``` - -Add a relationship pattern to the query. - -**Arguments**: - -- `edge_label` - A string representing the type of the relationship. -- `directed` - A bool indicating if the relationship is directed. -- `variable` - A string representing the name of the variable for storing - results of the relationship pattern. -- `relationship` - A `Relationship` object to construct the pattern from. -- `**kwargs` - Arguments representing the properties of the relationship. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### from\_ - -```python -def from_(edge_label: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, **kwargs, ,) -> "Match" -``` - -Add a relationship pattern to the query. - -**Arguments**: - -- `edge_label` - A string representing the type of the relationship. -- `directed` - A bool indicating if the relationship is directed. -- `variable` - A string representing the name of the variable for storing - results of the relationship pattern. -- `relationship` - A `Relationship` object to construct the pattern from. -- `**kwargs` - Arguments representing the properties of the relationship. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### where - -```python -def where(item: str, operator: str, value: Any) -> "DeclarativeBase" -``` - -Creates a WHERE statement Cypher partial query. - -#### and\_where - -```python -def and_where(item: str, operator: str, value: Any) -> "DeclarativeBase" -``` - -Creates a AND (expression) statement Cypher partial query. - -#### or\_where - -```python -def or_where(item: str, operator: str, value: Any) -> "DeclarativeBase" -``` - -Creates a OR (expression) statement Cypher partial query. - -#### xor\_where - -```python -def xor_where(item: str, operator: str, value: Any) -> "DeclarativeBase" -``` - -Creates a XOR (expression) statement Cypher partial query. - -#### unwind - -```python -def unwind(list_expression: str, variable: str) -> "DeclarativeBase" -``` - -Unwind a list of values as individual rows. - -**Arguments**: - -- `list_expression` - A list of strings representing the list of values. -- `variable` - A string representing the variable name for unwinding results. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### with\_ - -```python -def with_(results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase" -``` - -Chain together parts of a query, piping the results from one to be -used as starting points or criteria in the next. - -**Arguments**: - -- `results` - A dictionary mapping variables in the first query with - aliases in the second query. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### union - -```python -def union(include_duplicates: Optional[bool] = True) -> "DeclarativeBase" -``` - -Combine the result of multiple queries. - -**Arguments**: - -- `include_duplicates` - A bool indicating if duplicates should be - included. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### delete - -```python -def delete(variable_expressions: List[str], detach: Optional[bool] = False) -> "DeclarativeBase" -``` - -Delete nodes and relationships from the database. - -**Arguments**: - -- `variable_expressions` - A list of strings indicating which nodes - and/or relationships should be removed. -- `detach` - A bool indicating if relationships should be deleted along - with a node. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### remove - -```python -def remove(items: List[str]) -> "DeclarativeBase" -``` - -Remove labels and properties from nodes and relationships. - -**Arguments**: - -- `items` - A list of strings indicating which labels and/or properties - should be removed. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### yield\_ - -```python -def yield_(results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase" -``` - -Yield data from the query. - -**Arguments**: - -- `results` - A dictionary mapping items that are returned with alias - names. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### return\_ - -```python -def return_(results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase" -``` - -Return data from the query. - -**Arguments**: - -- `results` - A dictionary mapping items that are returned with alias - names. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### order\_by - -```python -def order_by(properties: str) -> "DeclarativeBase" -``` - -Order the results of the query. - -**Arguments**: - -- `properties` - A string representing how to order the results. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### limit - -```python -def limit(integer_expression: str) -> "DeclarativeBase" -``` - -Limit the number of records when returning results. - -**Arguments**: - -- `integer_expression` - An integer indicating how many records to limit - the results to. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### skip - -```python -def skip(integer_expression: str) -> "DeclarativeBase" -``` - -Skip a number of records when returning results. - -**Arguments**: - -- `integer_expression` - An integer indicating how many records to skip - in the results. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### add\_custom\_cypher - -```python -def add_custom_cypher(custom_cypher: str) -> "DeclarativeBase" -``` - -Inject custom Cypher code into the query. - -**Arguments**: - -- `custom_cypher` - A string representing the Cypher code to be injected - into the query. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### load\_csv - -```python -def load_csv(path: str, header: bool, row: str) -> "DeclarativeBase" -``` - -Load data from a CSV file by executing a Cypher query for each row. - -**Arguments**: - -- `path` - A string representing the path to the CSV file. -- `header` - A bool indicating if the CSV file starts with a header row. -- `row` - A string representing the name of the variable for iterating - over each row. - - -**Returns**: - - A `DeclarativeBase` instance for constructing queries. - -#### get\_single - -```python -def get_single(retrieve: str) -> Any -``` - -Returns a single result with a `retrieve` variable name. - -**Arguments**: - -- `retrieve` - A string representing the results variable to be returned. - - -**Returns**: - - An iterator of dictionaries containing the results of the query. - -#### execute - -```python -def execute() -> Iterator[Dict[str, Any]] -``` - -Executes the Cypher query and returns the results. - -**Returns**: - - An iterator of dictionaries containing the results of the query. - diff --git a/docs/reference/gqlalchemy/query_builders/declarative_base.md b/docs/reference/gqlalchemy/query_builders/declarative_base.md new file mode 100644 index 00000000..d1012e0f --- /dev/null +++ b/docs/reference/gqlalchemy/query_builders/declarative_base.md @@ -0,0 +1,1115 @@ +--- +sidebar_label: declarative_base +title: gqlalchemy.query_builders.declarative_base +--- + +## WhereConditionPartialQuery Objects + +```python +class WhereConditionPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Constructs a where partial query. + +## NodePartialQuery Objects + +```python +class NodePartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Constructs a node partial query. + +## RelationshipPartialQuery Objects + +```python +class RelationshipPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Constructs a relationship partial query. + +## UnwindPartialQuery Objects + +```python +class UnwindPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Constructs an unwind partial query. + +#### dict\_to\_alias\_statement + +```python +def dict_to_alias_statement(alias_dict: Dict[str, str]) -> str +``` + +Creates a string expression of alias statements from a dictionary of +expression, variable name dictionary. + +## \_ResultPartialQuery Objects + +```python +class _ResultPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a RETURN/YIELD/WITH statement Cypher partial query. + +## UnionPartialQuery Objects + +```python +class UnionPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a UNION statement Cypher partial query. + +## DeletePartialQuery Objects + +```python +class DeletePartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a DELETE statement Cypher partial query. + +## RemovePartialQuery Objects + +```python +class RemovePartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a REMOVE statement Cypher partial query. + +## OrderByPartialQuery Objects + +```python +class OrderByPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a ORDER BY statement Cypher partial query. + +## LimitPartialQuery Objects + +```python +class LimitPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a LIMIT statement Cypher partial query. + +## SkipPartialQuery Objects + +```python +class SkipPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a SKIP statement Cypher partial query. + +## ForeachPartialQuery Objects + +```python +class ForeachPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Creates a FOREACH statement Cypher partial query. + +## SetPartialQuery Objects + +```python +class SetPartialQuery(PartialQuery) +``` + +#### construct\_query + +```python +def construct_query() -> str +``` + +Constructs a set partial query. + +## DeclarativeBase Objects + +```python +class DeclarativeBase(ABC) +``` + +#### match + +```python +def match(optional: bool = False) -> "DeclarativeBase" +``` + +Obtain data from the database by matching it to a given pattern. + +**Arguments**: + +- `optional` - A bool indicating if missing parts of the pattern will be + filled with null values. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Get all nodes with a certain label: + +- `Python` - `match().node(labels='Country', variable='c').return_(results='c').execute()` +- `Cypher` - `MATCH (c:Country) RETURN c;` + + Get a relationship of a certain type that connects two nodes with certain label: + +- `Python` - `match().node(labels='Town', variable='t').to(relationship_type='BELONGS_TO', variable='b').node(labels='Country', variable='c').return_(results='b').execute()` +- `Cypher` - `MATCH (t:Town)-[b:BELONGS_TO]->(c:Country) RETURN b;` + +#### merge + +```python +def merge() -> "DeclarativeBase" +``` + +Ensure that a pattern you are looking for exists in the database. +This means that if the pattern is not found, it will be created. In a +way, this clause is like a combination of MATCH and CREATE. + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Merge node with properties: + +- `Python` - `merge().node(variable='city').where(item='city.name', operator=Operator.EQUAL, literal='London').return_(results='city').execute()` +- `Cypher` - `MERGE (city) WHERE city.name = 'London' RETURN city;` + +#### create + +```python +def create() -> "DeclarativeBase" +``` + +Create nodes and relationships in a graph. + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Create a single node: + +- `Python` - `create().node(labels='Person', variable='p').return_(results='p').execute()` +- `Cypher` - `CREATE (p:Person) RETURN p;` + +#### call + +```python +def call(procedure: str, arguments: Optional[Union[str, Tuple[Union[str, int, float]]]] = None) -> "DeclarativeBase" +``` + +Call a query module procedure. + +**Arguments**: + +- `procedure` - A string representing the name of the procedure in the + format `query_module.procedure`. +- `arguments` - A string representing the arguments of the procedure in + text format. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Call procedure with no arguments: + +- `Python` - `call('pagerank.get').yield_().return_().execute()` +- `Cypher` - `CALL pagerank.get() YIELD * RETURN *;` + + Call procedure with arguments: + +- `Python` - `call('json_util.load_from_url', 'https://some-url.com').yield_('objects').return_(results='objects').execute() +- `Cypher` - `CALL json_util.load_from_url(https://some-url.com) YIELD objects RETURN objects;` + +#### node + +```python +def node(labels: Union[str, List[str], None] = "", variable: Optional[str] = None, node: Optional["Node"] = None, **kwargs, ,) -> "DeclarativeBase" +``` + +Add a node pattern to the query. + +**Arguments**: + +- `labels` - A string or list of strings representing the labels of the + node. +- `variable` - A string representing the name of the variable for storing + results of the node pattern. +- `node` - A `Node` object to construct the pattern from. +- `**kwargs` - Arguments representing the properties of the node. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Create a node and return it: + +- `Python` - `create().node(labels='Person', variable='n', first_name='Kate').return_(results='n').execute()` +- `Cypher` - `CREATE (n:Person {first_name: 'Kate'}) RETURN n;` + +#### to + +```python +def to(relationship_type: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, algorithm: Optional[IntegratedAlgorithm] = None, **kwargs, ,) -> "DeclarativeBase" +``` + +Add a relationship pattern to the query. + +**Arguments**: + +- `relationship_type` - A string representing the type of the relationship. +- `directed` - A bool indicating if the relationship is directed. +- `variable` - A string representing the name of the variable for storing + results of the relationship pattern. +- `relationship` - A `Relationship` object to construct the pattern from. +- `algorithm` - algorithm object to use over graph data. +- `**kwargs` - Arguments representing the properties of the relationship. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Match and return a relationship: + +- `Python` - `match().node(labels='Town', variable='t').to(relationship_type='BELONGS_TO', variable='b').node(labels='Country', variable='c').return_(results='b').execute()` +- `Cypher` - `MATCH (t:Town)-[b:BELONGS_TO]->(c:Country) RETURN b;` + +#### from\_ + +```python +def from_(relationship_type: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, algorithm: Optional[IntegratedAlgorithm] = None, **kwargs, ,) -> "Match" +``` + +Add a relationship pattern to the query. + +**Arguments**: + +- `relationship_type` - A string representing the type of the relationship. +- `directed` - A bool indicating if the relationship is directed. +- `variable` - A string representing the name of the variable for storing + results of the relationship pattern. +- `relationship` - A `Relationship` object to construct the pattern from. +- `**kwargs` - Arguments representing the properties of the relationship. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Match and return a relationship: + +- `Python` - `match().node(labels='Country', variable='c').from_(relationship_type='BELONGS_TO', variable='b').node(labels='Town', variable='t').return_(results='b').execute()` +- `Cypher` - `MATCH (c:Country)<-[b:BELONGS_TO]-(t:Town) RETURN b;` + +#### where + +```python +def where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates a WHERE statement Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Raises**: + +- `GQLAlchemyLiteralAndExpressionMissingInWhere` - Raises an error when neither literal nor expression keyword arguments were provided. +- `GQLAlchemyExtraKeywordArgumentsInWhere` - Raises an error when both literal and expression keyword arguments were provided. + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by the equality of `name` properties of two connected nodes. + +- `Python` - `match().node(variable='n').to().node(variable='m').where(item='n.name', operator=Operator.EQUAL, expression='m.name').return_()` +- `Cypher` - `MATCH (n)-[]->(m) WHERE n.name = m.name RETURN *;` + + Filtering query results by the node label. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').return_()` +- `Cypher` - `MATCH (n) WHERE n:User RETURN *;` + + Filtering query results by the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n.age > 18 RETURN *;` + +#### where\_not + +```python +def where_not(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates a WHERE NOT statement Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Raises**: + +- `GQLAlchemyLiteralAndExpressionMissingInWhere` - Raises an error when neither literal nor expression keyword arguments were provided. +- `GQLAlchemyExtraKeywordArgumentsInWhere` - Raises an error when both literal and expression keyword arguments were provided. + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by the equality of `name` properties of two connected nodes. + +- `Python` - `match().node(variable='n').to().node(variable='m').where_not(item='n.name', operator='=', expression='m.name').return_()` +- `Cypher` - `MATCH (n)-[]->(m) WHERE NOT n.name = m.name RETURN *;` + +#### and\_where + +```python +def and_where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates an AND statement as a part of WHERE Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by node label or the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').and_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n:User AND n.age > 18 RETURN *;` + +#### and\_not\_where + +```python +def and_not_where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates an AND NOT statement as a part of WHERE Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by node label or the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').and_not_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n:User AND NOT n.age > 18 RETURN *;` + +#### or\_where + +```python +def or_where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates an OR statement as a part of WHERE Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by node label or the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').or_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n:User OR n.age > 18 RETURN *;` + +#### or\_not\_where + +```python +def or_not_where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates an OR NOT statement as a part of WHERE Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by node label or the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').or_not_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n:User OR NOT n.age > 18 RETURN *;` + +#### xor\_where + +```python +def xor_where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates an XOR statement as a part of WHERE Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by node label or the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').xor_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n:User XOR n.age > 18 RETURN *;` + +#### xor\_not\_where + +```python +def xor_not_where(item: str, operator: Operator, **kwargs) -> "DeclarativeBase" +``` + +Creates an XOR NOT statement as a part of WHERE Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - A string representing the operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Filtering query results by node label or the comparison of node property and literal. + +- `Python` - `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').xor_not_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` +- `Cypher` - `MATCH (n) WHERE n:User XOR NOT n.age > 18 RETURN *;` + +#### unwind + +```python +def unwind(list_expression: str, variable: str) -> "DeclarativeBase" +``` + +Unwind a list of values as individual rows. + +**Arguments**: + +- `list_expression` - A list of strings representing the list of values. +- `variable` - A string representing the variable name for unwinding results. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + +- `Python` - `unwind(list_expression="[1, 2, 3, null]", variable="x").return_(results=["x", ("'val'", "y")]).execute()` +- `Cypher` - `UNWIND [1, 2, 3, null] AS x RETURN x, 'val' AS y;` + +#### with\_ + +```python +def with_(results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None) -> "DeclarativeBase" +``` + +Chain together parts of a query, piping the results from one to be +used as starting points or criteria in the next. + +**Arguments**: + +- `results` - A dictionary mapping variables in the first query with + aliases in the second query. + + +**Raises**: + +- `GQLAlchemyResultQueryTypeError` - Raises an error when the provided argument is of wrong type. +- `GQLAlchemyTooLargeTupleInResultQuery` - Raises an error when the given tuple has length larger than 2. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Pipe the result from first part of the query for the futher use: + +- `Python` - `match().node(variable='n').with('n').execute()` +- `Cypher` - `MATCH (n) WITH n; + +#### union + +```python +def union(include_duplicates: Optional[bool] = True) -> "DeclarativeBase" +``` + +Combine the result of multiple queries. + +**Arguments**: + +- `include_duplicates` - A bool indicating if duplicates should be + included. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Combine querties and retain duplicates: + +- `Python` - `match().node(variable="c", labels="Country").return_(results=("c.name", "columnName")).union().match().node(variable="p", labels="Person").return_(results=("p.name", "columnName")).execute()` +- `Cypher` - `MATCH (c:Country) RETURN c.name AS columnName UNION ALL MATCH (p:Person) RETURN p.name AS columnName;` + + Combine queries and remove duplicates: + +- `Python` - `match().node(variable="c", labels="Country").return_(results=("c.name", "columnName")).union(include_duplicates=False).match().node(variable="p", labels="Person").return_(results=("p.name", "columnName")).execute()` +- `Cypher` - `MATCH (c:Country) RETURN c.name AS columnName UNION MATCH (p:Person) RETURN p.name AS columnName;` + +#### delete + +```python +def delete(variable_expressions: Union[str, List[str]], detach: Optional[bool] = False) -> "DeclarativeBase" +``` + +Delete nodes and relationships from the database. + +**Arguments**: + +- `variable_expressions` - A string or list of strings indicating which node(s) + and/or relationship(s) should be removed. +- `detach` - A bool indicating if relationships should be deleted along + with a node. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Delete a node: + +- `Python` - `match().node(labels='Node1', variable='n1').delete(variable_expressions='n1').execute()` +- `Cypher` - `MATCH (n1:Node1) DELETE n1;` + +#### remove + +```python +def remove(items: Union[str, List[str]]) -> "DeclarativeBase" +``` + +Remove labels and properties from nodes and relationships. + +**Arguments**: + +- `items` - A string or list of strings indicating which label(s) and/or properties + should be removed. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Remove a property from a node: + +- `Python` - `match().node(labels='Country', variable='n', name='United Kingdom').remove(items='n.name').return_(results='n').execute()` +- `Cypher` - `MATCH (n:Country {name: 'United Kingdom'}) REMOVE n.name RETURN n;` + +#### yield\_ + +```python +def yield_(results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None) -> "DeclarativeBase" +``` + +Yield data from the query. + +**Arguments**: + +- `results` - A dictionary mapping items that are returned with alias names. + + +**Raises**: + +- `GQLAlchemyResultQueryTypeError` - Raises an error when the provided argument is of wrong type. +- `GQLAlchemyTooLargeTupleInResultQuery` - Raises an error when the given tuple has length larger than 2. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Yield all data from a query: + +- `Python` - `call(procedure='pagerank.get').yield_().return_().execute()` +- `Cypher` - `CALL pagerank.get() YIELD * RETURN *;` + + Yield some data from a query: + +- `Python` - `.call(procedure='pagerank.get').yield_(results=['node', 'rank']).return_(results=['node','rank']).execute()` +- `Cypher` - `CALL pagerank.get() YIELD node, rank RETURN node, rank;` + +#### return\_ + +```python +def return_(results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None) -> "DeclarativeBase" +``` + +Return data from the query. + +**Arguments**: + +- `results` - An optional string, tuple or iterable of strings and tuples for alias names. + + +**Raises**: + +- `GQLAlchemyResultQueryTypeError` - Raises an error when the provided argument is of wrong type. +- `GQLAlchemyTooLargeTupleInResultQuery` - Raises an error when the given tuple has length larger than 2. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Return all variables from a query: + +- `Python` - `match().node(labels='Person', variable='p').return_().execute()` +- `Cypher` - `MATCH (p:Person) RETURN *;` + + Return specific variables from a query: + +- `Python` - `match().node(labels='Person', variable='p1').to().node(labels='Person', variable='p2').return_(results=[('p1','first'), 'p2']).execute()` +- `Cypher` - `MATCH (p1:Person)-[]->(p2:Person) RETURN p1 AS first, p2;` + +#### order\_by + +```python +def order_by(properties: Union[str, Tuple[str, Order], List[Union[str, Tuple[str, Order]]]]) -> "DeclarativeBase" +``` + +Creates an ORDER BY statement Cypher partial query. + +**Arguments**: + +- `properties` - Properties and order (DESC/DESCENDING/ASC/ASCENDING) by which the query results will be ordered. + + +**Raises**: + +- `GQLAlchemyOrderByTypeError` - Raises an error when the given ordering is of the wrong type. +- `GQLAlchemyMissingOrder` - Raises an error when the given property is neither string nor tuple. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Ordering query results by the property `n.name` in ascending order + and by the property `n.last_name` in descending order: + +- `Python` - `match().node(variable='n').return_().order_by(properties=['n.name', ('n.last_name', Order.DESC)]).execute()` +- `Cypher` - `MATCH (n) RETURN * ORDER BY n.name, n.last_name DESC;` + +#### limit + +```python +def limit(integer_expression: Union[str, int]) -> "DeclarativeBase" +``` + +Limit the number of records when returning results. + +**Arguments**: + +- `integer_expression` - An integer indicating how many records to limit + the results to. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Limit the number of returned results: + +- `Python` - `match().node(labels='Person', variable='p').return_().limit(integer_expression='10').execute()` +- `Cypher` - `MATCH (p:Person) RETURN * LIMIT 10;` + +#### skip + +```python +def skip(integer_expression: Union[str, int]) -> "DeclarativeBase" +``` + +Skip a number of records when returning results. + +**Arguments**: + +- `integer_expression` - An integer indicating how many records to skip + in the results. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + Skip the first result: + +- `Python` - `match().node(variable='n').return_(results='n').skip(integer_expression='1').execute()` +- `Cypher` - `MATCH (n) RETURN n SKIP 1;` + +#### add\_custom\_cypher + +```python +def add_custom_cypher(custom_cypher: str) -> "DeclarativeBase" +``` + +Inject custom Cypher code into the query. + +**Arguments**: + +- `custom_cypher` - A string representing the Cypher code to be injected + into the query. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + +#### get\_single + +```python +def get_single(retrieve: str) -> Any +``` + +Returns a single result with a `retrieve` variable name. + +**Arguments**: + +- `retrieve` - A string representing the results variable to be returned. + + +**Returns**: + + An iterator of dictionaries containing the results of the query. + +#### foreach + +```python +def foreach(variable: str, expression: str, update_clause: Union[str, List[str], Set[str]]) -> "DeclarativeBase" +``` + +Iterate over a list of elements and for every iteration run every update clause. + +**Arguments**: + +- `variable` - The variable name that stores each element. +- `expression` - Any expression that results in a list. +- `update_clauses` - One or more Cypher update clauses: + SET, REMOVE, CREATE, MERGE, DELETE, FOREACH. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Example**: + + For each number in a list, create a node: + +- `Python` - `update_clause = QueryBuilder().create().node(variable="n", id=PropertyVariable("i"))` + `query_builder = QueryBuilder().foreach("i", "[1, 2, 3]", update_clause.construct_query())` +- `Cypher` - `FOREACH ( i IN [1, 2, 3] | CREATE (n {id: i}) )` + +#### set\_ + +```python +def set_(item: str, operator: Operator, **kwargs) +``` + +Creates a SET statement Cypher partial query. + +**Arguments**: + +- `item` - A string representing variable or property. +- `operator` - An assignment, increment or label filter operator. + + Kwargs: +- `literal` - A value that will be converted to Cypher value, such as int, float, string, etc. +- `expression` - A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + +**Raises**: + +- `GQLAlchemyLiteralAndExpressionMissingInWhere` - Raises an error when neither literal nor expression keyword arguments were provided. +- `GQLAlchemyExtraKeywordArgumentsInWhere` - Raises an error when both literal and expression keyword arguments were provided. + + +**Returns**: + +- `self` - A partial Cypher query built from the given parameters. + + +**Examples**: + + Set or update a property. + +- `Python` - `match().node(variable='n').where(item='n.name', operator=Operator.EQUAL, literal='Germany').set_(item='n.population', operator=Operator.ASSIGNMENT, literal=83000001).return_().execute()` +- `Cypher` - `MATCH (n) WHERE n.name = 'Germany' SET n.population = 83000001 RETURN *;` + + Set or update multiple properties. + +- `Python` - `match().node(variable='n').where(item='n.name', operator=Operator.EQUAL, literal='Germany').set_(item='n.population', operator=Operator.ASSIGNMENT, literal=83000001).set_(item='n.capital', operator=Operator.ASSIGNMENT, literal='Berlin').return_().execute()` +- `Cypher` - `MATCH (n) WHERE n.name = 'Germany' SET n.population = 83000001 SET n.capital = 'Berlin' RETURN *;` + + Set node label. + +- `Python` - `match().node(variable='n').where(item='n.name', operator=Operator.EQUAL, literal='Germany').set_(item='n', operator=Operator.LABEL_FILTER, expression='Land').return_().execute()` +- `Cypher` - `MATCH (n) WHERE n.name = 'Germany' SET n:Land RETURN *;` + + Replace all properties using map. + +- `Python` - `match().node(variable='c', labels='Country').where(item='c.name', operator=Operator.EQUAL, literal='Germany').set_(item='c', operator=Operator.ASSIGNMENT, literal={'name': 'Germany', 'population': '85000000'}).return_().execute()` +- `Cypher` - `MATCH (c:Country) WHERE c.name = 'Germany' SET c = {name: 'Germany', population: '85000000'} RETURN *;` + + Update all properties using map. + +- `Python` - `match().node(variable='c', labels='Country').where(item='c.name', operator=Operator.EQUAL, literal='Germany').set_(item='c', operator=Operator.INCREMENT, literal={'name': 'Germany', 'population': '85000000'}).return_().execute()` +- `Cypher` - `MATCH (c:Country) WHERE c.name = 'Germany' SET c += {name: 'Germany', population: '85000000'} RETURN *;` + +#### execute + +```python +def execute() -> Iterator[Dict[str, Any]] +``` + +Executes the Cypher query and returns the results. + +**Returns**: + + An iterator of dictionaries containing the results of the query. + diff --git a/docs/reference/gqlalchemy/query_builders/memgraph_query_builder.md b/docs/reference/gqlalchemy/query_builders/memgraph_query_builder.md new file mode 100644 index 00000000..35e6e127 --- /dev/null +++ b/docs/reference/gqlalchemy/query_builders/memgraph_query_builder.md @@ -0,0 +1,44 @@ +--- +sidebar_label: memgraph_query_builder +title: gqlalchemy.query_builders.memgraph_query_builder +--- + +## QueryBuilder Objects + +```python +class QueryBuilder(DeclarativeBase) +``` + +#### load\_csv + +```python +def load_csv(path: str, header: bool, row: str) -> "DeclarativeBase" +``` + +Load data from a CSV file by executing a Cypher query for each row. + +**Arguments**: + +- `path` - A string representing the path to the CSV file. +- `header` - A bool indicating if the CSV file starts with a header row. +- `row` - A string representing the name of the variable for iterating + over each row. + + +**Returns**: + + A `DeclarativeBase` instance for constructing queries. + + +**Examples**: + + Load CSV with header: + +- `Python` - `load_csv(path="path/to/my/file.csv", header=True, row="row").return_().execute()` +- `Cypher` - `LOAD CSV FROM 'path/to/my/file.csv' WITH HEADER AS row RETURN *;` + + Load CSV without header: + +- `Python` - `load_csv(path='path/to/my/file.csv', header=False, row='row').return_().execute()` +- `Cypher` - `LOAD CSV FROM 'path/to/my/file.csv' NO HEADER AS row RETURN *;` + diff --git a/docs/reference/gqlalchemy/utilities.md b/docs/reference/gqlalchemy/utilities.md index 87ec246d..3e362cb9 100644 --- a/docs/reference/gqlalchemy/utilities.md +++ b/docs/reference/gqlalchemy/utilities.md @@ -27,3 +27,20 @@ def to_cypher_labels(labels: Union[str, List[str], None]) -> str Converts labels to a Cypher label definition. +#### to\_cypher\_qm\_arguments + +```python +def to_cypher_qm_arguments(arguments: Optional[Union[str, Tuple[Union[str, int, float]]]]) -> str +``` + +Converts query module arguments to a valid Cypher string of query module arguments. + +## PropertyVariable Objects + +```python +class PropertyVariable() +``` + +Class for support of using a variable as a node or edge property. Used +to avoid the quotes given to property values. + diff --git a/docs/reference/gqlalchemy/memgraph.md b/docs/reference/gqlalchemy/vendors/database_client.md similarity index 51% rename from docs/reference/gqlalchemy/memgraph.md rename to docs/reference/gqlalchemy/vendors/database_client.md index a968f7e2..d0f00322 100644 --- a/docs/reference/gqlalchemy/memgraph.md +++ b/docs/reference/gqlalchemy/vendors/database_client.md @@ -1,12 +1,12 @@ --- -sidebar_label: memgraph -title: gqlalchemy.memgraph +sidebar_label: database_client +title: gqlalchemy.vendors.database_client --- -## Memgraph Objects +## DatabaseClient Objects ```python -class Memgraph() +class DatabaseClient(ABC) ``` #### execute\_and\_fetch @@ -28,98 +28,77 @@ Executes Cypher query without returning any results. #### create\_index ```python -def create_index(index: MemgraphIndex) -> None +def create_index(index: Index) -> None ``` -Creates an index (label or label-property type) in the database +Creates an index (label or label-property type) in the database. #### drop\_index ```python -def drop_index(index: MemgraphIndex) -> None +def drop_index(index: Index) -> None ``` -Drops an index (label or label-property type) in the database +Drops an index (label or label-property type) in the database. #### get\_indexes ```python -def get_indexes() -> List[MemgraphIndex] +@abstractmethod +def get_indexes() -> List[Index] ``` -Returns a list of all database indexes (label and label-property types) +Returns a list of all database indexes (label and label-property types). #### ensure\_indexes ```python -def ensure_indexes(indexes: List[MemgraphIndex]) -> None +@abstractmethod +def ensure_indexes(indexes: List[Index]) -> None ``` -Ensures that database indexes match input indexes +Ensures that database indexes match input indexes. -#### create\_constraint - -```python -def create_constraint(index: MemgraphConstraint) -> None -``` - -Creates a constraint (label or label-property type) in the database - -#### drop\_constraint +#### drop\_indexes ```python -def drop_constraint(index: MemgraphConstraint) -> None +def drop_indexes() -> None ``` -Drops a constraint (label or label-property type) in the database +Drops all indexes in the database -#### get\_constraints - -```python -def get_constraints() -> List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]] -``` - -Returns a list of all database constraints (label and label-property types) - -#### ensure\_constraints - -```python -def ensure_constraints(constraints: List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]]) -> None -``` - -Ensures that database constraints match input constraints - -#### create\_stream +#### create\_constraint ```python -def create_stream(stream: MemgraphStream) -> None +def create_constraint(index: Constraint) -> None ``` -Create a stream +Creates a constraint (label or label-property type) in the database. -#### start\_stream +#### drop\_constraint ```python -def start_stream(stream: MemgraphStream) -> None +def drop_constraint(index: Constraint) -> None ``` -Start a stream +Drops a constraint (label or label-property type) in the database. -#### get\_streams +#### get\_constraints ```python -def get_streams() -> List[str] +@abstractmethod +def get_constraints() -> List[Constraint] ``` -Returns a list of all streams +Returns a list of all database constraints (label and label-property types). -#### drop\_stream +#### ensure\_constraints ```python -def drop_stream(stream: MemgraphStream) -> None +def ensure_constraints(constraints: List[Constraint]) -> None ``` -Drop a stream +Ensures that database constraints match input constraints. #### drop\_database @@ -127,65 +106,16 @@ Drop a stream def drop_database() ``` -Drops database by removing all nodes and edges - -#### create\_trigger - -```python -def create_trigger(trigger: MemgraphTrigger) -> None -``` - -Creates a trigger - -#### get\_triggers - -```python -def get_triggers() -> List[str] -``` - -Returns a list of all database triggers - -#### drop\_trigger - -```python -def drop_trigger(trigger) -> None -``` - -Drop a trigger - -#### drop\_triggers - -```python -def drop_triggers() -> None -``` - -Drops all triggers in the database +Drops database by removing all nodes and edges. #### new\_connection ```python +@abstractmethod def new_connection() -> Connection ``` -Creates new Memgraph connection - -#### init\_disk\_storage - -```python -def init_disk_storage(on_disk_db: OnDiskPropertyDatabase) -> None -``` - -Adds and OnDiskPropertyDatabase to Memgraph so that any property -that has a Field(on_disk=True) can be stored to and loaded from -an OnDiskPropertyDatabase. - -#### remove\_on\_disk\_storage - -```python -def remove_on_disk_storage() -> None -``` - -Removes the OnDiskPropertyDatabase from Memgraph +Creates new database connection. #### get\_variable\_assume\_one @@ -203,19 +133,20 @@ If there is more than one result, raises a GQLAlchemyError. def create_node(node: Node) -> Optional[Node] ``` -Creates a node in Memgraph from the `node` object. +Creates a node in database from the `node` object. #### save\_node ```python +@abstractmethod def save_node(node: Node) -> Node ``` -Saves node to Memgraph. -If the node._id is not None it fetches the node with the same id from -Memgraph and updates it's fields. +Saves node to database. +If the node._id is not None, it fetches the node with the same id from +the database and updates it's fields. If the node has unique fields it fetches the nodes with the same unique -fields from Memgraph and updates it's fields. +fields from the database and updates it's fields. Otherwise it creates a new node with the same properties. Null properties are ignored. @@ -225,7 +156,7 @@ Null properties are ignored. def save_nodes(nodes: List[Node]) -> None ``` -Saves a list of nodes to Memgraph. +Saves a list of nodes to the database. #### save\_node\_with\_id @@ -233,20 +164,21 @@ Saves a list of nodes to Memgraph. def save_node_with_id(node: Node) -> Optional[Node] ``` -Saves a node in Memgraph using the internal Memgraph id. +Saves a node to the database using the internal id. #### load\_node ```python +@abstractmethod def load_node(node: Node) -> Optional[Node] ``` -Loads a node from Memgraph. -If the node._id is not None it fetches the node from Memgraph with that +Loads a node from the database. +If the node._id is not None, it fetches the node from the database with that internal id. -If the node has unique fields it fetches the node from Memgraph with +If the node has unique fields it fetches the node from the database with those unique fields set. -Otherwise it tries to find any node in Memgraph that has all properties +Otherwise it tries to find any node in the database that has all properties set to exactly the same values. If no node is found or no properties are set it raises a GQLAlchemyError. @@ -256,7 +188,7 @@ If no node is found or no properties are set it raises a GQLAlchemyError. def load_node_with_all_properties(node: Node) -> Optional[Node] ``` -Loads a node from Memgraph with all equal property values. +Loads a node from the database with all equal property values. #### load\_node\_with\_id @@ -264,22 +196,23 @@ Loads a node from Memgraph with all equal property values. def load_node_with_id(node: Node) -> Optional[Node] ``` -Loads a node with the same internal Memgraph id. +Loads a node with the same internal database id. #### load\_relationship ```python +@abstractmethod def load_relationship(relationship: Relationship) -> Optional[Relationship] ``` -Returns a relationship loaded from Memgraph. -If the relationship._id is not None it fetches the relationship from -Memgraph that has the same internal id. +Returns a relationship loaded from the database. +If the relationship._id is not None, it fetches the relationship from +the database that has the same internal id. Otherwise it returns the relationship whose relationship._start_node_id and relationship._end_node_id and all relationship properties that -are not None match the relationship in Memgraph. -If there is no relationship like that in Memgraph, or if there are -multiple relationships like that in Memgraph, throws GQLAlchemyError. +are not None match the relationship in the database. +If there is no relationship like that in database, or if there are +multiple relationships like that in database, throws GQLAlchemyError. #### load\_relationship\_with\_id @@ -287,7 +220,7 @@ multiple relationships like that in Memgraph, throws GQLAlchemyError. def load_relationship_with_id(relationship: Relationship) -> Optional[Relationship] ``` -Loads a relationship from Memgraph using the internal id. +Loads a relationship from the database using the internal id. #### load\_relationship\_with\_start\_node\_id\_and\_end\_node\_id @@ -295,17 +228,18 @@ Loads a relationship from Memgraph using the internal id. def load_relationship_with_start_node_id_and_end_node_id(relationship: Relationship) -> Optional[Relationship] ``` -Loads a relationship from Memgraph using start node and end node id +Loads a relationship from the database using start node and end node id for which all properties of the relationship that are not None match. #### save\_relationship ```python +@abstractmethod def save_relationship(relationship: Relationship) -> Optional[Relationship] ``` -Saves a relationship to Memgraph. -If relationship._id is not None it finds the relationship in Memgraph +Saves a relationship to the database. +If relationship._id is not None it finds the relationship in the database and updates it's properties with the values in `relationship`. If relationship._id is None, it creates a new relationship. If you want to set a relationship._id instead of creating a new @@ -317,7 +251,7 @@ relationship, use `load_relationship` first. def save_relationships(relationships: List[Relationship]) -> None ``` -Saves a list of relationships to Memgraph. +Saves a list of relationships to the database. #### save\_relationship\_with\_id @@ -325,7 +259,7 @@ Saves a list of relationships to Memgraph. def save_relationship_with_id(relationship: Relationship) -> Optional[Relationship] ``` -Saves a relationship in Memgraph using the relationship._id. +Saves a relationship to the database using the relationship._id. #### create\_relationship @@ -333,5 +267,5 @@ Saves a relationship in Memgraph using the relationship._id. def create_relationship(relationship: Relationship) -> Optional[Relationship] ``` -Creates a new relationship in Memgraph. +Creates a new relationship in the database. diff --git a/docs/reference/gqlalchemy/vendors/memgraph.md b/docs/reference/gqlalchemy/vendors/memgraph.md new file mode 100644 index 00000000..16b902a7 --- /dev/null +++ b/docs/reference/gqlalchemy/vendors/memgraph.md @@ -0,0 +1,246 @@ +--- +sidebar_label: memgraph +title: gqlalchemy.vendors.memgraph +--- + +## Memgraph Objects + +```python +class Memgraph(DatabaseClient) +``` + +#### get\_indexes + +```python +def get_indexes() -> List[MemgraphIndex] +``` + +Returns a list of all database indexes (label and label-property types). + +#### ensure\_indexes + +```python +def ensure_indexes(indexes: List[MemgraphIndex]) -> None +``` + +Ensures that database indexes match input indexes. + +#### get\_constraints + +```python +def get_constraints() -> List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]] +``` + +Returns a list of all database constraints (label and label-property types). + +#### new\_connection + +```python +def new_connection() -> Connection +``` + +Creates new Memgraph connection. + +#### create\_stream + +```python +def create_stream(stream: MemgraphStream) -> None +``` + +Create a stream. + +#### start\_stream + +```python +def start_stream(stream: MemgraphStream) -> None +``` + +Start a stream. + +#### get\_streams + +```python +def get_streams() -> List[str] +``` + +Returns a list of all streams. + +#### drop\_stream + +```python +def drop_stream(stream: MemgraphStream) -> None +``` + +Drop a stream. + +#### create\_trigger + +```python +def create_trigger(trigger: MemgraphTrigger) -> None +``` + +Creates a trigger. + +#### get\_triggers + +```python +def get_triggers() -> List[str] +``` + +Returns a list of all database triggers. + +#### drop\_trigger + +```python +def drop_trigger(trigger: MemgraphTrigger) -> None +``` + +Drop a trigger. + +#### drop\_triggers + +```python +def drop_triggers() -> None +``` + +Drops all triggers in the database. + +#### init\_disk\_storage + +```python +def init_disk_storage(on_disk_db: OnDiskPropertyDatabase) -> None +``` + +Adds and OnDiskPropertyDatabase to the database so that any property +that has a Field(on_disk=True) can be stored to and loaded from +an OnDiskPropertyDatabase. + +#### remove\_on\_disk\_storage + +```python +def remove_on_disk_storage() -> None +``` + +Removes the OnDiskPropertyDatabase from the database. + +#### save\_node + +```python +def save_node(node: Node) -> Node +``` + +Saves node to the database. +If the node._id is not None it fetches the node with the same id from +the database and updates it's fields. +If the node has unique fields it fetches the nodes with the same unique +fields from the database and updates it's fields. +Otherwise it creates a new node with the same properties. +Null properties are ignored. + +#### load\_node + +```python +def load_node(node: Node) -> Optional[Node] +``` + +Loads a node from the database. +If the node._id is not None it fetches the node from the database with that +internal id. +If the node has unique fields it fetches the node from the database with +those unique fields set. +Otherwise it tries to find any node in the database that has all properties +set to exactly the same values. +If no node is found or no properties are set it raises a GQLAlchemyError. + +#### load\_relationship + +```python +def load_relationship(relationship: Relationship) -> Optional[Relationship] +``` + +Returns a relationship loaded from the database. +If the relationship._id is not None it fetches the relationship from +the database that has the same internal id. +Otherwise it returns the relationship whose relationship._start_node_id +and relationship._end_node_id and all relationship properties that +are not None match the relationship in the database. +If there is no relationship like that in the database, or if there are +multiple relationships like that in the database, throws GQLAlchemyError. + +#### save\_relationship + +```python +def save_relationship(relationship: Relationship) -> Optional[Relationship] +``` + +Saves a relationship to the database. +If relationship._id is not None it finds the relationship in the database +and updates it's properties with the values in `relationship`. +If relationship._id is None, it creates a new relationship. +If you want to set a relationship._id instead of creating a new +relationship, use `load_relationship` first. + +#### get\_procedures + +```python +def get_procedures(starts_with: Optional[str] = None, update: bool = False) -> List["QueryModule"] +``` + +Return query procedures. + +Maintains a list of query modules in the Memgraph object. If starts_with +is defined then return those modules that start with starts_with string. + +**Arguments**: + +- `starts_with` - Return those modules that start with this string. + (Optional) +- `update` - Whether to update the list of modules in + self.query_modules. (Optional) + +#### add\_query\_module + +```python +def add_query_module(file_path: str, module_name: str) -> "Memgraph" +``` + +Function for adding a query module in Python written language to Memgraph. +Example can be found in the functions below (with_kafka_stream, with_power_bi). + +The module is synced dynamically then with the database to enable higher processing +capabilities. + +**Arguments**: + +- `file_name` _str_ - path to file containing module. +- `module_name` _str_ - name of the module. + + +**Returns**: + +- `Memgraph` - Memgraph object. + +#### with\_kafka\_stream + +```python +def with_kafka_stream() -> "Memgraph" +``` + +Load kafka stream query module. + +**Returns**: + +- `Memgraph` - Memgraph instance + +#### with\_power\_bi + +```python +def with_power_bi() -> "Memgraph" +``` + +Load power_bi stream query module. + +**Returns**: + +- `Memgraph` - Memgraph instance + diff --git a/docs/reference/gqlalchemy/vendors/neo4j.md b/docs/reference/gqlalchemy/vendors/neo4j.md new file mode 100644 index 00000000..b94e9d0d --- /dev/null +++ b/docs/reference/gqlalchemy/vendors/neo4j.md @@ -0,0 +1,100 @@ +--- +sidebar_label: neo4j +title: gqlalchemy.vendors.neo4j +--- + +## Neo4j Objects + +```python +class Neo4j(DatabaseClient) +``` + +#### get\_indexes + +```python +def get_indexes() -> List[Neo4jIndex] +``` + +Returns a list of all database indexes (label and label-property types). + +#### ensure\_indexes + +```python +def ensure_indexes(indexes: List[Neo4jIndex]) -> None +``` + +Ensures that database indexes match input indexes. + +#### get\_constraints + +```python +def get_constraints() -> List[Union[Neo4jConstraintExists, Neo4jConstraintUnique]] +``` + +Returns a list of all database constraints (label and label-property types). + +#### new\_connection + +```python +def new_connection() -> Connection +``` + +Creates new Neo4j connection. + +#### save\_node + +```python +def save_node(node: Node) -> Node +``` + +Saves node to the database. +If the node._id is not None it fetches the node with the same id from +the database and updates it's fields. +If the node has unique fields it fetches the nodes with the same unique +fields from the database and updates it's fields. +Otherwise it creates a new node with the same properties. +Null properties are ignored. + +#### load\_node + +```python +def load_node(node: Node) -> Optional[Node] +``` + +Loads a node from the database. +If the node._id is not None it fetches the node from the database with that +internal id. +If the node has unique fields it fetches the node from the database with +those unique fields set. +Otherwise it tries to find any node in the database that has all properties +set to exactly the same values. +If no node is found or no properties are set it raises a GQLAlchemyError. + +#### load\_relationship + +```python +def load_relationship(relationship: Relationship) -> Optional[Relationship] +``` + +Returns a relationship loaded from the database. +If the relationship._id is not None it fetches the relationship from +the database that has the same internal id. +Otherwise it returns the relationship whose relationship._start_node_id +and relationship._end_node_id and all relationship properties that +are not None match the relationship in the database. +If there is no relationship like that in the database, or if there are +multiple relationships like that in the database, throws GQLAlchemyError. + +#### save\_relationship + +```python +def save_relationship(relationship: Relationship) -> Optional[Relationship] +``` + +Saves a relationship to the database. +If relationship._id is not None it finds the relationship in the database +and updates it's properties with the values in `relationship`. +If relationship._id is None, it creates a new relationship. +If you want to set a relationship._id instead of creating a new +relationship, use `load_relationship` first. + diff --git a/docs/reference/sidebar.json b/docs/reference/sidebar.json index 8d75be5f..573947dc 100644 --- a/docs/reference/sidebar.json +++ b/docs/reference/sidebar.json @@ -2,11 +2,36 @@ "items": [ { "items": [ + { + "items": [ + "reference/gqlalchemy/graph_algorithms/integrated_algorithms", + "reference/gqlalchemy/graph_algorithms/query_builder", + "reference/gqlalchemy/graph_algorithms/query_modules" + ], + "label": "gqlalchemy.graph_algorithms", + "type": "category" + }, + { + "items": [ + "reference/gqlalchemy/query_builders/declarative_base", + "reference/gqlalchemy/query_builders/memgraph_query_builder" + ], + "label": "gqlalchemy.query_builders", + "type": "category" + }, + { + "items": [ + "reference/gqlalchemy/vendors/database_client", + "reference/gqlalchemy/vendors/memgraph", + "reference/gqlalchemy/vendors/neo4j" + ], + "label": "gqlalchemy.vendors", + "type": "category" + }, "reference/gqlalchemy/disk_storage", "reference/gqlalchemy/instance_runner", - "reference/gqlalchemy/memgraph", + "reference/gqlalchemy/loaders", "reference/gqlalchemy/models", - "reference/gqlalchemy/query_builder", "reference/gqlalchemy/transformations", "reference/gqlalchemy/utilities" ], diff --git a/gqlalchemy/__init__.py b/gqlalchemy/__init__.py index 172e2a03..56beb29a 100644 --- a/gqlalchemy/__init__.py +++ b/gqlalchemy/__init__.py @@ -12,41 +12,55 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .memgraph import Memgraph # noqa F401 -from .models import ( # noqa F401 +import warnings + +from pydantic import validator # noqa F401 + +from gqlalchemy.models import ( # noqa F401 MemgraphConstraintExists, MemgraphConstraintUnique, MemgraphIndex, MemgraphKafkaStream, MemgraphPulsarStream, MemgraphTrigger, + Neo4jConstraintUnique, + Neo4jIndex, Node, Path, Relationship, + Field, +) +from gqlalchemy.disk_storage import SQLitePropertyDatabase # noqa F401 +from gqlalchemy.instance_runner import ( # noqa F401 + DockerImage, + MemgraphInstanceBinary, + MemgraphInstanceDocker, + wait_for_docker_container, + wait_for_port, ) -from .disk_storage import SQLitePropertyDatabase # noqa F401 -from .query_builder import ( # noqa F401 +from gqlalchemy.exceptions import GQLAlchemyError, GQLAlchemyWarning # noqa F401 + +from gqlalchemy.query_builders import ( # noqa F401 + neo4j_query_builder, + memgraph_query_builder, + memgraph_query_builder as query_builder, +) +from gqlalchemy.query_builders.declarative_base import ( # noqa F401 Call, Create, + Foreach, InvalidMatchChainException, + Return, Match, Merge, NoVariablesMatchedException, - QueryBuilder, Unwind, With, ) -from .instance_runner import ( # noqa F401 - DockerImage, - MemgraphInstanceBinary, - MemgraphInstanceDocker, - wait_for_docker_container, - wait_for_port, -) - -from .exceptions import GQLAlchemyWarning, GQLAlchemyError # noqa F401 -from pydantic import Field, validator # noqa F401 -import warnings +from gqlalchemy.query_builders.memgraph_query_builder import LoadCsv, QueryBuilder +from gqlalchemy.query_builders.neo4j_query_builder import Neo4jQueryBuilder # noqa F401 +from gqlalchemy.vendors.memgraph import Memgraph # noqa F401 +from gqlalchemy.vendors.neo4j import Neo4j # noqa F401 warnings.filterwarnings("once", category=GQLAlchemyWarning) __all__ = ["Memgraph"] @@ -57,3 +71,7 @@ merge = Merge unwind = Unwind with_ = With +foreach = Foreach +return_ = Return +load_csv = LoadCsv +MemgraphQueryBuilder = QueryBuilder diff --git a/gqlalchemy/connection.py b/gqlalchemy/connection.py index 7c223e04..bf3c41e1 100644 --- a/gqlalchemy/connection.py +++ b/gqlalchemy/connection.py @@ -16,8 +16,13 @@ from typing import Any, Dict, Iterator, Optional import mgclient +from neo4j import GraphDatabase +from neo4j.graph import Node as Neo4jNode +from neo4j.graph import Path as Neo4jPath +from neo4j.graph import Relationship as Neo4jRelationship -from .models import Node, Path, Relationship +from gqlalchemy.exceptions import database_error_handler, connection_handler +from gqlalchemy.models import Node, Path, Relationship __all__ = ("Connection",) @@ -51,14 +56,9 @@ def execute_and_fetch(self, query: str) -> Iterator[Dict[str, Any]]: @abstractmethod def is_active(self) -> bool: - """Returns True if connection is active and can be used""" + """Returns True if connection is active and can be used.""" pass - @staticmethod - def create(**kwargs) -> "Connection": - """Creates an instance of a connection.""" - return MemgraphConnection(**kwargs) - class MemgraphConnection(Connection): def __init__( @@ -69,18 +69,22 @@ def __init__( password: str, encrypted: bool, client_name: Optional[str] = None, - lazy: bool = True, + lazy: bool = False, ): - super().__init__(host, port, username, password, encrypted, client_name=client_name) + super().__init__( + host=host, port=port, username=username, password=password, encrypted=encrypted, client_name=client_name + ) self.lazy = lazy self._connection = self._create_connection() + @database_error_handler def execute(self, query: str) -> None: """Executes Cypher query without returning any results.""" cursor = self._connection.cursor() cursor.execute(query) cursor.fetchall() + @database_error_handler def execute_and_fetch(self, query: str) -> Iterator[Dict[str, Any]]: """Executes Cypher query and returns iterator of results.""" cursor = self._connection.cursor() @@ -92,13 +96,14 @@ def execute_and_fetch(self, query: str) -> Iterator[Dict[str, Any]]: yield {dsc.name: _convert_memgraph_value(row[index]) for index, dsc in enumerate(cursor.description)} def is_active(self) -> bool: - """Returns True if connection is active and can be used""" + """Returns True if connection is active and can be used.""" return self._connection is not None and self._connection.status == mgclient.CONN_STATUS_READY + @connection_handler def _create_connection(self) -> Connection: """Creates and returns a connection with Memgraph.""" sslmode = mgclient.MG_SSLMODE_REQUIRE if self.encrypted else mgclient.MG_SSLMODE_DISABLE - return mgclient.connect( + connection = mgclient.connect( host=self.host, port=self.port, username=self.username, @@ -107,10 +112,12 @@ def _create_connection(self) -> Connection: lazy=self.lazy, client_name=self.client_name, ) + connection.autocommit = True + return connection def _convert_memgraph_value(value: Any) -> Any: - """Converts Memgraph objects to custom Node/Relationship objects""" + """Converts Memgraph objects to custom Node/Relationship objects.""" if isinstance(value, mgclient.Relationship): return Relationship.parse_obj( { @@ -140,3 +147,76 @@ def _convert_memgraph_value(value: Any) -> Any: ) return value + + +class Neo4jConnection(Connection): + def __init__( + self, + host: str, + port: int, + username: str, + password: str, + encrypted: bool, + client_name: Optional[str] = None, + lazy: bool = True, + ): + super().__init__( + host=host, port=port, username=username, password=password, encrypted=encrypted, client_name=client_name + ) + self.lazy = lazy + self._connection = self._create_connection() + + def execute(self, query: str) -> None: + """Executes Cypher query without returning any results.""" + with self._connection.session() as session: + session.run(query) + + def execute_and_fetch(self, query: str) -> Iterator[Dict[str, Any]]: + """Executes Cypher query and returns iterator of results.""" + with self._connection.session() as session: + results = session.run(query) + columns = results.keys() + for result in results: + yield {column: _convert_neo4j_value(result[column]) for column in columns} + + def is_active(self) -> bool: + """Returns True if connection is active and can be used.""" + return self._connection is not None + + def _create_connection(self): + return GraphDatabase.driver( + f"bolt://{self.host}:{self.port}", auth=(self.username, self.password), encrypted=self.encrypted + ) + + +def _convert_neo4j_value(value: Any) -> Any: + """Converts Neo4j objects to custom Node/Relationship objects.""" + if isinstance(value, Neo4jRelationship): + return Relationship.parse_obj( + { + "_type": value.type, + "_id": value.id, + "_start_node_id": value.start_node.id, + "_end_node_id": value.end_node.id, + **dict(value.items()), + } + ) + + if isinstance(value, Neo4jNode): + return Node.parse_obj( + { + "_id": value.id, + "_labels": set(value.labels), + **dict(value.items()), + } + ) + + if isinstance(value, Neo4jPath): + return Path.parse_obj( + { + "_nodes": list([_convert_neo4j_value(node) for node in value.nodes]), + "_relationships": list([_convert_neo4j_value(rel) for rel in value.relationships]), + } + ) + + return value diff --git a/gqlalchemy/disk_storage.py b/gqlalchemy/disk_storage.py index 27eeb5db..d335a83c 100644 --- a/gqlalchemy/disk_storage.py +++ b/gqlalchemy/disk_storage.py @@ -14,7 +14,6 @@ import sqlite3 import contextlib - from abc import ABC from typing import Optional, List diff --git a/gqlalchemy/exceptions.py b/gqlalchemy/exceptions.py index cab8fa26..99b493c8 100644 --- a/gqlalchemy/exceptions.py +++ b/gqlalchemy/exceptions.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum +import time + DATABASE_MISSING_IN_FIELD_ERROR_MESSAGE = """ Can't have an index on a property without providing the database `db` object. Define your property as: @@ -48,15 +51,46 @@ TypeError: The argument provided is of wrong type. Please provide str, tuple[str, str] or list[tuple[str, str]]. """ -LITERAL_AND_EXPRESSION_MISSING_IN_WHERE = """ -Can't create WHERE query without providing either 'literal' or 'expression' keyword arguments, that can be literals, labels or properties. +LITERAL_AND_EXPRESSION_MISSING = """ +Can't create {clause} query without providing either 'literal' or 'expression' keyword arguments, +that can be literals, labels or properties. +""" + +EXTRA_KEYWORD_ARGUMENTS = """ +Can't create {clause} query with extra keyword arguments: +Please provide a value to either 'literal' or 'expression' keyword arguments. +""" + +RESULT_QUERY_TYPE_ERROR = """ +Can't create {clause} query: +The argument provided is of wrong type. Please provide str, tuple[str, str], list[Union[tuple[str, str], str]] or set[Union[tuple[str, str], str]]. +""" + +INSTANTIATION_ERROR = """ +{class_name} class shouldn't be instantiatied! """ -EXTRA_KEYWORD_ARGUMENTS_IN_WHERE = """ -Can't create WHERE query with extra keyword arguments: -Please provide a value to either 'literal' or 'expression' keyword arguments." +TOO_LARGE_TUPLE_IN_RESULT_QUERY = """ +Tuple argument in {clause} clause only has two arguments - variable name and alias. """ +FILE_NOT_FOUND = """ +File with path {path} not found. +""" + +OPERATOR_TYPE_ERROR = """ +Operator argument in {clause} clause that is a string must be a valid operator. +""" + +TIMEOUT_ERROR_MESSAGE = "Waited too long for the port {port} on host {host} to start accepting connections." +DOCKER_TIMEOUT_ERROR_MESSAGE = "Waited too long for the Docker container to start." +MEMGRAPH_CONNECTION_ERROR_MESSAGE = "The Memgraph process probably died." + + +class QueryClause(Enum): + WHERE = "WHERE" + SET = "SET" + class GQLAlchemyWarning(Warning): pass @@ -77,7 +111,6 @@ class GQLAlchemyUniquenessConstraintError(GQLAlchemyError): class GQLAlchemyDatabaseMissingInFieldError(GQLAlchemyError): def __init__(self, constraint: str, field: str, field_type: str): - super().__init__() self.message = DATABASE_MISSING_IN_FIELD_ERROR_MESSAGE.format( constraint=constraint, field=field, @@ -87,35 +120,120 @@ def __init__(self, constraint: str, field: str, field_type: str): class GQLAlchemyDatabaseMissingInNodeClassError(GQLAlchemyError): def __init__(self, cls): - super().__init__() self.message = DATABASE_MISSING_IN_NODE_CLASS_ERROR_MESSAGE.format(cls=cls) class GQLAlchemyOnDiskPropertyDatabaseNotDefinedError(GQLAlchemyError): def __init__(self): - super().__init__() self.message = ON_DISK_PROPERTY_DATABASE_NOT_DEFINED_ERROR class GQLAlchemyMissingOrder(GQLAlchemyError): def __init__(self): - super().__init__() self.message = MISSING_ORDER -class GQLAlchemyOrderByTypeError(TypeError): +class GQLAlchemyOrderByTypeError(GQLAlchemyError): def __init__(self): - super().__init__() self.message = ORDER_BY_TYPE_ERROR -class GQLAlchemyLiteralAndExpressionMissingInWhere(GQLAlchemyError): +class GQLAlchemyLiteralAndExpressionMissing(GQLAlchemyError): + def __init__(self, clause: str): + self.message = LITERAL_AND_EXPRESSION_MISSING.format(clause=clause) + + +class GQLAlchemyExtraKeywordArguments(GQLAlchemyError): + def __init__(self, clause: str): + self.message = EXTRA_KEYWORD_ARGUMENTS.format(clause=clause) + + +class GQLAlchemyTooLargeTupleInResultQuery(GQLAlchemyError): + def __init__(self, clause) -> None: + self.message = TOO_LARGE_TUPLE_IN_RESULT_QUERY.format(clause=clause) + + +class GQLAlchemyResultQueryTypeError(GQLAlchemyError): + def __init__(self, clause): + self.message = RESULT_QUERY_TYPE_ERROR.format(clause=clause) + + +class GQLAlchemyInstantiationError(GQLAlchemyError): + def __init__(self, class_name) -> None: + self.message = INSTANTIATION_ERROR.format(class_name=class_name) + + +class GQLAlchemyDatabaseError(GQLAlchemyError): + def __init__(self, message): + self.message = message + + +class GQLAlchemyOperatorTypeError(GQLAlchemyError): + def __init__(self, clause) -> None: + self.message = OPERATOR_TYPE_ERROR.format(clause=clause) + + +class GQLAlchemyTimeoutError(GQLAlchemyError): + def __init__(self, message): + self.message = message + + +class GQLAlchemyWaitForPortError(GQLAlchemyTimeoutError): + def __init__(self, port, host): + super().__init__(message=TIMEOUT_ERROR_MESSAGE.format(port=port, host=host)) + + +class GQLAlchemyWaitForDockerError(GQLAlchemyTimeoutError): def __init__(self): - super().__init__() - self.message = LITERAL_AND_EXPRESSION_MISSING_IN_WHERE + super().__init__(message=DOCKER_TIMEOUT_ERROR_MESSAGE) -class GQLAlchemyExtraKeywordArgumentsInWhere(GQLAlchemyError): +class GQLAlchemyWaitForConnectionError(GQLAlchemyTimeoutError): def __init__(self): + super().__init__(message=MEMGRAPH_CONNECTION_ERROR_MESSAGE) + + +class GQLAlchemyFileNotFoundError(GQLAlchemyError): + def __init__(self, path): super().__init__() - self.message = EXTRA_KEYWORD_ARGUMENTS_IN_WHERE + self.message = FILE_NOT_FOUND.format(path=path) + + +def database_error_handler(func): + def inner_function(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + raise GQLAlchemyDatabaseError(e) + + return inner_function + + +def connection_handler(func, delay: float = 0.01, timeout: float = 5.0, backoff: int = 2): + """Wrapper for a wait on the connection. + + Args: + func: A function that tries to create the connection + delay: A float that defines how long to wait between retries. + timeout: A float that defines how long to wait for the port. + backoff: An integer used for multiplying the delay. + + Raises: + GQLAlchemyWaitForConnectionError: Raises an error + after the timeout period has passed. + """ + + def _handler(*args, **kwargs): + start_time = time.perf_counter() + current_delay = delay + while True: + try: + return func(*args, **kwargs) + except Exception as ex: + time.sleep(current_delay) + if time.perf_counter() - start_time >= timeout: + raise GQLAlchemyWaitForConnectionError(ex) + + current_delay *= backoff + + return _handler diff --git a/gqlalchemy/graph_algorithms/__init__.py b/gqlalchemy/graph_algorithms/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/gqlalchemy/graph_algorithms/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/gqlalchemy/graph_algorithms/integrated_algorithms.py b/gqlalchemy/graph_algorithms/integrated_algorithms.py new file mode 100644 index 00000000..cfda0966 --- /dev/null +++ b/gqlalchemy/graph_algorithms/integrated_algorithms.py @@ -0,0 +1,203 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod + +BFS_EXPANSION = " *BFS" +DFS_EXPANSION = " *" +WSHORTEST_EXPANSION = " *WSHORTEST" + +DEFAULT_TOTAL_WEIGHT = "total_weight" +DEFAULT_WEIGHT_PROPERTY = "r.weight" + + +class IntegratedAlgorithm(ABC): + """Abstract class modeling Memgraph's built-in graph algorithms. + + These algorithms are integrated into Memgraph's codebase and are called + within a relationship part of a query. For instance: + MATCH p = (:City {name: "Paris"}) + -[:Road * bfs (r, n | r.length <= 200 AND n.name != "Metz")]-> + (:City {name: "Berlin"}) + """ + + @abstractmethod + def __str__(self) -> str: + """Instance of IntegratedAlgorithm subclass is used as a string""" + pass + + @staticmethod + def to_cypher_lambda(expression: str) -> str: + """Method for creating a general lambda expression. + + Variables `r` and `n` stand for relationship and node. The expression is + used e.g. for a filter lambda, to use only relationships of length less + than 200: + expression="r.length < 200" + with the filter lambda being: + (r, n | r.length < 200) + + Args: + expression: Lambda conditions or statements. + """ + return "" if expression is None else f"(r, n | {expression})" + + +class BreadthFirstSearch(IntegratedAlgorithm): + """Build a BFS call for a Cypher query. + + The Breadth-first search can be called in Memgraph with Cypher queries such + as: `MATCH (a {id: 723})-[*BFS ..10 (r, n | r.x > 12 AND n.y < 3)]-() RETURN *;` + It is called inside the relationship clause, `*BFS` naming the algorithm, + `..10` specifying depth bounds, and `(r, n | )` is a filter + lambda. + """ + + def __init__( + self, + lower_bound: int = None, + upper_bound: int = None, + condition: str = None, + ) -> None: + """ + Args: + lower_bound: Lower bound for path depth. Defaults to `None`. + upper_bound: Upper bound for path depth. Defaults to `None`. + condition: Filter through nodes and relationships that pass this + condition. Defaults to `None`. + """ + super().__init__() + self.lower_bound = str(lower_bound) if lower_bound is not None else "" + self.upper_bound = str(upper_bound) if upper_bound is not None else "" + self.condition = condition + + def __str__(self) -> str: + """Get a Cypher query string for this algorithm.""" + algo_str = BFS_EXPANSION + + bounds = self.to_cypher_bounds() + if bounds != "": + algo_str = f"{algo_str} {bounds}" + + filter_lambda = super().to_cypher_lambda(self.condition) + if filter_lambda != "": + algo_str = f"{algo_str} {filter_lambda}" + + return algo_str + + def to_cypher_bounds(self) -> str: + """If bounds are specified, returns them in grammar-defined form.""" + if self.lower_bound == "" and self.upper_bound == "": + return "" + + return f"{self.lower_bound}..{self.upper_bound}" + + +class DepthFirstSearch(IntegratedAlgorithm): + """Build a DFS call for a Cypher query. + The Depth-First Search can be called in Memgraph with Cypher queries + such as: + MATCH (a {id: 723})-[* ..10 (r, n | r.x > 12 AND n.y < 3)]-() RETURN *; + It is called inside the relationship clause, "*" naming the algorithm + ("*" without "DFS" because it is defined like such in openCypher), + "..10" specifying depth bounds, and "(r, n | )" is a filter + lambda. + """ + + def __init__( + self, + lower_bound: int = None, + upper_bound: int = None, + condition: str = None, + ) -> None: + """ + Args: + lower_bound: Lower bound for path depth. Defaults to None. + upper_bound: Upper bound for path depth. Defaults to None. + condition: Filter through nodes and relationships that pass this + condition. Defaults to None. + """ + super().__init__() + self.lower_bound = str(lower_bound) if lower_bound is not None else "" + self.upper_bound = str(upper_bound) if upper_bound is not None else "" + self.condition = condition + + def __str__(self) -> str: + """get Cypher query string for this algorithm.""" + algo_str = DFS_EXPANSION + + bounds = self.to_cypher_bounds() + if bounds != "": + algo_str = f"{algo_str} {bounds}" + + filter_lambda = super().to_cypher_lambda(self.condition) + if filter_lambda != "": + algo_str = f"{algo_str} {filter_lambda}" + + return algo_str + + def to_cypher_bounds(self) -> str: + """If bounds are specified, returns them in grammar-defined form.""" + if self.lower_bound == "" and self.upper_bound == "": + return "" + + return f"{self.lower_bound}..{self.upper_bound}" + + +class WeightedShortestPath(IntegratedAlgorithm): + """Build a Djikstra shortest path call for a Cypher query + The weighted shortest path algorithm can be called in Memgraph with Cypher + queries such as: + " MATCH (a {id: 723})-[r *WSHORTEST 10 (r, n | r.weight) weight_sum + (r, n | r.x > 12 AND r.y < 3)]-(b {id: 882}) RETURN * " + It is called inside the relationship clause, "*WSHORTEST" naming the + algorithm, "10" specifying search depth bounds, and "(r, n | )" + is a filter lambda, used to filter which relationships and nodes to use. + """ + + def __init__( + self, + upper_bound: int = None, + condition: str = None, + total_weight_var: str = DEFAULT_TOTAL_WEIGHT, + weight_property: str = DEFAULT_WEIGHT_PROPERTY, + ) -> None: + """ + Args: + upper_bound: Upper bound for path depth. Defaults to None. + condition: Filter through nodes and relationships that pass this + condition. Defaults to None. + total_weight_var: Variable defined as the sum of all weights on + path being returned. Defaults to "total_weight". + weight_property: property being used as weight. Defaults to + "r.weight". + """ + super().__init__() + self.weight_property = f"r.{weight_property}" if "." not in weight_property else weight_property + self.total_weight_var = total_weight_var + self.condition = condition + self.upper_bound = str(upper_bound) if upper_bound is not None else "" + + def __str__(self) -> str: + algo_str = WSHORTEST_EXPANSION + if self.upper_bound != "": + algo_str = f"{algo_str} {self.upper_bound}" + + algo_str = f"{algo_str} {super().to_cypher_lambda(self.weight_property)} {self.total_weight_var}" + + filter_lambda = super().to_cypher_lambda(self.condition) + if filter_lambda != "": + algo_str = f"{algo_str} {filter_lambda}" + + return algo_str diff --git a/gqlalchemy/graph_algorithms/query_builder.py b/gqlalchemy/graph_algorithms/query_builder.py new file mode 100644 index 00000000..22fed3b7 --- /dev/null +++ b/gqlalchemy/graph_algorithms/query_builder.py @@ -0,0 +1,762 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict, List, Optional, Union + +from gqlalchemy.query_builders.declarative_base import DeclarativeBase +from gqlalchemy.query_builders.memgraph_query_builder import QueryBuilder +from gqlalchemy.vendors.memgraph import Connection, Memgraph + + +class MemgraphQueryBuilder(QueryBuilder): + """ + This query builder extends the usual Cypher query builder capabilities with Memgraph's query modules. + User gets with this module autocomplete features of graph algorithms. + Documentation on the methods can be found on Memgraph's web page. + """ + + def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): + super().__init__(connection) + + def example_procedure(self, required_arg: Any, optional_arg=None) -> DeclarativeBase: + return self.call("example.procedure", (required_arg, optional_arg)) + + def example_write_procedure(self, required_arg: str) -> DeclarativeBase: + return self.call("example.write_procedure", (required_arg)) + + def graph_analyzer_analyze(self, analyses: Optional[List[str]] = None) -> DeclarativeBase: + return self.call("graph_analyzer.analyze", (analyses)) + + def graph_analyzer_analyze_subgraph( + self, vertices: List[Any], edges: List[Any], analyses: Optional[List[str]] = None + ) -> DeclarativeBase: + return self.call("graph_analyzer.analyze_subgraph", (vertices, edges, analyses)) + + def graph_analyzer_help(self, name: str, value: str) -> DeclarativeBase: + return self.call("graph_analyzer.help", (name, value)) + + def mg_create_module_file(self, filename: str, content: str) -> DeclarativeBase: + return self.call("mg.create_module_file", (filename, content)) + + def mg_delete_module_file(self, path: str) -> DeclarativeBase: + return self.call("mg.delete_module_file", (path)) + + def mg_functions(self) -> DeclarativeBase: + return self.call("mg.functions") + + def mg_get_module_file(self, path: str) -> DeclarativeBase: + return self.call("mg.get_module_file", (path)) + + def mg_get_module_files(self, is_editable: bool, path: str) -> DeclarativeBase: + return self.call("mg.get_module_files", (is_editable, path)) + + def mg_kafka_set_stream_offset(self, stream_name: str, offset: int) -> DeclarativeBase: + return self.call("mg.kafka_set_stream_offset", (stream_name, offset)) + + def mg_kafka_stream_info(self, stream_name: str) -> DeclarativeBase: + return self.call("mg.kafka_stream_info", (stream_name)) + + def mg_load(self, module_name: str) -> DeclarativeBase: + return self.call("mg.load", (module_name)) + + def mg_load_all(self) -> DeclarativeBase: + return self.call("mg.load_all") + + def mg_procedures(self) -> DeclarativeBase: + return self.call("mg.procedures") + + def mg_pulsar_stream_info(self, stream_name: str) -> DeclarativeBase: + return self.call("mg.procedures", (stream_name)) + + def mg_transformations(self) -> DeclarativeBase: + return self.call("mg.transformations") + + def mg_update_module_file(self, path: str, content: str) -> DeclarativeBase: + return self.call("mg.update_module_file", (path, content)) + + def nxalg_all_shortest_paths( + self, source: Any, target: Any, weight: Optional[str] = None, method: str = "dijkstra" + ) -> DeclarativeBase: + return self.call("nxalg.all_shortest_paths", (source, target, weight, method)) + + def nxalg_all_simple_paths(self, source: Any, target: Any, cutoff: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.all_simple_paths", (source, target, cutoff)) + + def nxalg_ancestors(self, source: Any) -> DeclarativeBase: + return self.call("nxalg.all_simple_paths", (source)) + + def nxalg_betweenness_centrality( + self, + k: Optional[int] = None, + normalized: bool = True, + weight: Optional[str] = None, + endpoints: bool = False, + seed: Optional[int] = None, + ) -> DeclarativeBase: + return self.call("nxalg.betweenness_centrality", (k, normalized, weight, endpoints, seed)) + + def nxalg_bfs_edges(self, source: Any, reverse: bool = False, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.bfs_edges", (source, reverse, depth_limit)) + + def nxalg_bfs_predecessors(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.bfs_predecessors", (source, depth_limit)) + + def nxalg_bfs_successors(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.bfs_successors", (source, depth_limit)) + + def nxalg_bfs_tree(self, source: Any, reverse: bool = False, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.bfs_tree", (source, reverse, depth_limit)) + + def nxalg_biconnected_components(self) -> DeclarativeBase: + return self.call("nxalg.biconnected_components") + + def nxalg_bridges(self, root: Any) -> DeclarativeBase: + return self.call("nxalg.bridges", (root)) + + def nxalg_center(self) -> DeclarativeBase: + return self.call("nxalg.center") + + def nxalg_chain_decomposition(self, root: Any) -> DeclarativeBase: + return self.call("nxalg.chain_decomposition", (root)) + + def nxalg_check_planarity(self) -> DeclarativeBase: + return self.call("nxalg.check_planarity") + + def nxalg_clustering(self, nodes: Optional[List[Any]] = None, weight: Optional[str] = None) -> DeclarativeBase: + return self.call("nxalg.clustering", (nodes, weight)) + + def nxalg_communicability(self) -> DeclarativeBase: + return self.call("nxalg.communicability") + + def nxalg_core_number(self) -> DeclarativeBase: + return self.call("nxalg.core_number") + + def nxalg_degree_assortativity_coefficient( + self, x: str = "out", y: str = "in", weight: Optional[str] = None, nodes: Optional[List[Any]] = None + ) -> DeclarativeBase: + return self.call("nxalg.degree_assortativity_coefficient", (x, y, weight, nodes)) + + def nxalg_descendants(self, source: Any) -> DeclarativeBase: + return self.call("nxalg.descendants", (source)) + + def nxalg_dfs_postorder_nodes(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.dfs_postorder_nodes", (source, depth_limit)) + + def nxalg_dfs_predecessors(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.dfs_predecessors", (source, depth_limit)) + + def nxalg_dfs_preorder_nodes(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.dfs_preorder_nodes", (source, depth_limit)) + + def nxalg_dfs_successors(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.dfs_successors", (source, depth_limit)) + + def nxalg_dfs_tree(self, source: Any, depth_limit: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.dfs_tree", (source, depth_limit)) + + def nxalg_diameter(self) -> DeclarativeBase: + return self.call("nxalg.diameter") + + def nxalg_dominance_frontiers(self, start: Any) -> DeclarativeBase: + return self.call("nxalg.dominance_frontiers", (start)) + + def nxalg_dominating_set(self, start: Any) -> DeclarativeBase: + return self.call("nxalg.dominance_frontiers", (start)) + + def nxalg_edge_bfs(self, source: Optional[Any], orientation: Optional[str] = None) -> DeclarativeBase: + return self.call("nxalg.edge_bfs", (source, orientation)) + + def nxalg_edge_dfs(self, source: Optional[Any], orientation: Optional[str] = None) -> DeclarativeBase: + return self.call("nxalg.edge_dfs", (source, orientation)) + + def nxalg_find_cliques(self) -> DeclarativeBase: + return self.call("nxalg.find_cliques") + + def nxalg_find_cycle( + self, source: Optional[List[Any]] = None, orientation: Optional[str] = None + ) -> DeclarativeBase: + return self.call("nxalg.find_cycle", (source, orientation)) + + def nxalg_flow_hierarchy(self, weight: Optional[str] = None) -> DeclarativeBase: + return self.call("nxalg.flow_hierarchy", (weight)) + + def nxalg_global_efficiency(self) -> DeclarativeBase: + return self.call("nxalg.global_efficiency") + + def nxalg_greedy_color(self, strategy: str = "largest_first", interchange: bool = False) -> DeclarativeBase: + return self.call("nxalg.greedy_color", (strategy, interchange)) + + def nxalg_has_eulerian_path(self) -> DeclarativeBase: + return self.call("nxalg.has_eulerian_path") + + def nxalg_has_path(self, source: Any, target: Any) -> DeclarativeBase: + return self.call("nxalg.has_path", (source, target)) + + def nxalg_immediate_dominators(self, start: Any) -> DeclarativeBase: + return self.call("nxalg.immediate_dominators", (start)) + + def nxalg_is_arborescence(self) -> DeclarativeBase: + return self.call("nxalg.is_arborescence") + + def nxalg_is_at_free(self) -> DeclarativeBase: + return self.call("nxalg.is_at_free") + + def nxalg_is_bipartite(self) -> DeclarativeBase: + return self.call("nxalg.is_bipartite") + + def nxalg_is_branching(self) -> DeclarativeBase: + return self.call("nxalg.is_branching") + + def nxalg_is_chordal(self) -> DeclarativeBase: + return self.call("nxalg.is_chordal") + + def nxalg_is_distance_regular(self) -> DeclarativeBase: + return self.call("nxalg.is_distance_regular") + + def nxalg_is_edge_cover(self, cover: List[Any]) -> DeclarativeBase: + return self.call("nxalg.is_edge_cover", (cover)) + + def nxalg_is_eulerian(self) -> DeclarativeBase: + return self.call("nxalg.is_eulerian") + + def nxalg_is_forest(self) -> DeclarativeBase: + return self.call("nxalg.is_forest") + + def nxalg_is_isolate(self, n: Any) -> DeclarativeBase: + return self.call("nxalg.is_isolate", (n)) + + def nxalg_is_isomorphic( + self, nodes1: List[Any], edges1: List[Any], nodes2: List[Any], edges2: List[Any] + ) -> DeclarativeBase: + return self.call("nxalg.is_isomorphic", (nodes1, edges1, nodes2, edges2)) + + def nxalg_is_semieulerian(self) -> DeclarativeBase: + return self.call("nxalg.is_semieulerian") + + def nxalg_is_simple_path(self, nodes: List[Any]) -> DeclarativeBase: + return self.call("nxalg.is_simple_path", (nodes)) + + def nxalg_is_strongly_regular(self) -> DeclarativeBase: + return self.call("nxalg.is_strongly_regular") + + def nxalg_is_tournament(self) -> DeclarativeBase: + return self.call("nxalg.is_tournament") + + def nxalg_is_tree(self) -> DeclarativeBase: + return self.call("nxalg.is_tree") + + def nxalg_isolates(self) -> DeclarativeBase: + return self.call("nxalg.isolates") + + def nxalg_jaccard_coefficient(self, ebunch: Optional[List[Any]] = None) -> DeclarativeBase: + return self.call("nxalg.jaccard_coefficient", (ebunch)) + + def nxalg_k_clique_communities(self, k: int, cliques: List[List[Any]] = None) -> DeclarativeBase: + return self.call("nxalg.k_clique_communities", (k, cliques)) + + def nxalg_k_components(self, density: float = 0.95) -> DeclarativeBase: + return self.call("nxalg.k_components", (density)) + + def nxalg_k_edge_components(self, k: int) -> DeclarativeBase: + return self.call("nxalg.k_edge_components", (k)) + + def nxalg_local_efficiency(self) -> DeclarativeBase: + return self.call("nxalg.local_efficiency") + + def nxalg_lowest_common_ancestor(self, node1: Any, node2: Any) -> DeclarativeBase: + return self.call("nxalg.lowest_common_ancestor", (node1, node2)) + + def nxalg_maximal_matching(self) -> DeclarativeBase: + return self.call("nxalg.maximal_matching") + + def nxalg_minimum_spanning_tree( + self, weight: str = "weight", algorithm: str = "kruskal", ignore_nan: bool = False + ) -> DeclarativeBase: + return self.call("nxalg.minimum_spanning_tree", (weight, algorithm, ignore_nan)) + + def nxalg_multi_source_dijkstra_path( + self, sources: List[Any], cutoff: Optional[int] = None, weight: str = "weight" + ) -> DeclarativeBase: + return self.call("nxalg.multi_source_dijkstra_path", (sources, cutoff, weight)) + + def nxalg_multi_source_dijkstra_path_length( + self, sources: List[Any], cutoff: Optional[int] = None, weight: str = "weight" + ) -> DeclarativeBase: + return self.call("nxalg.multi_source_dijkstra_path_length", (sources, cutoff, weight)) + + def nxalg_node_boundary(self, nbunch1: List[Any], nbunch2: List[Any] = None) -> DeclarativeBase: + return self.call("nxalg.node_boundary", (nbunch1, nbunch2)) + + def nxalg_node_connectivity(self, source: List[Any] = None, target: List[Any] = None) -> DeclarativeBase: + return self.call("nxalg.node_connectivity", (source, target)) + + def nxalg_node_expansion(self, s: List[Any]) -> DeclarativeBase: + return self.call("nxalg.node_expansion", (s)) + + def nxalg_non_randomness(self, k: Optional[int] = None) -> DeclarativeBase: + return self.call("nxalg.non_randomness", (k)) + + def nxalg_pagerank( + self, + alpha: float = 0.85, + personalization: Optional[str] = None, + max_iter: int = 100, + tol: float = 1e-06, + nstart: Optional[str] = None, + weight: Optional[str] = "weight", + dangling: Optional[str] = None, + ) -> DeclarativeBase: + return self.call("nxalg.pagerank", (alpha, personalization, max_iter, tol, nstart, weight, dangling)) + + def nxalg_reciprocity(self, nodes: List[Any] = None) -> DeclarativeBase: + return self.call("nxalg.reciprocity", (nodes)) + + def nxalg_shortest_path( + self, + source: Optional[Any] = None, + target: Optional[Any] = None, + weight: Optional[str] = None, + method: str = "dijkstra", + ) -> DeclarativeBase: + return self.call("nxalg.shortest_path", (source, target, weight, method)) + + def nxalg_shortest_path_length( + self, + source: Optional[Any] = None, + target: Optional[Any] = None, + weight: Optional[str] = None, + method: str = "dijkstra", + ) -> DeclarativeBase: + return self.call("nxalg.shortest_path_length", (source, target, weight, method)) + + def nxalg_simple_cycles(self) -> DeclarativeBase: + return self.call("nxalg.simple_cycles") + + def nxalg_strongly_connected_components(self) -> DeclarativeBase: + return self.call("nxalg.strongly_connected_components") + + def nxalg_topological_sort(self) -> DeclarativeBase: + return self.call("nxalg.topological_sort") + + def nxalg_triadic_census(self) -> DeclarativeBase: + return self.call("nxalg.triadic_census") + + def nxalg_voronoi_cells(self, center_nodes: List[Any], weight: str = "weight") -> DeclarativeBase: + return self.call("nxalg.voronoi_cells", (center_nodes, weight)) + + def nxalg_wiener_index(self, weight: Optional[str] = None) -> DeclarativeBase: + return self.call("nxalg.wiener_index", (weight)) + + def py_example_procedure(self, required_arg: Optional[Any], optional_arg: Optional[Any] = None) -> DeclarativeBase: + return self.call("py_example.procedure", (required_arg, optional_arg)) + + def py_example_write_procedure(self, property_name: str, property_value: Optional[Any]) -> DeclarativeBase: + return self.call("py_example.write_procedure", (property_name, property_value)) + + def wcc_get_components(self, vertices: List[Any], edges: List[Any]) -> DeclarativeBase: + return self.call("wcc.get_components", (vertices, edges)) + + +class MageQueryBuilder(MemgraphQueryBuilder): + """ + This query builder extends the Memgraph query builder with Memgraph MAGE graph algorithm Cypher options. + User gets with this module autocomplete features of graph algorithms written in MAGE library. + Documentation on the methods can be found on Memgraph's web page. + """ + + def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): + super().__init__(connection) + + def betweenness_centrality_get( + self, directed: bool = True, normalized: bool = True, threads: int = 8 + ) -> DeclarativeBase: + return self.call("betweenness_centrality.get", (directed, normalized, threads)) + + def betweenness_centrality_online_get(self, normalize: bool = True) -> DeclarativeBase: + return self.call("betweenness_centrality_online.get", (normalize)) + + def betweenness_centrality_online_reset(self) -> DeclarativeBase: + return self.call("betweenness_centrality_online.reset") + + def betweenness_centrality_online_set(self, normalize: bool = True, threads: int = 8) -> DeclarativeBase: + return self.call("betweenness_centrality_online.set", (normalize, threads)) + + def betweenness_centrality_online_update( + self, + created_vertices: List[Any] = [], + created_edges: List[Any] = [], + deleted_vertices: List[Any] = [], + deleted_edges: List[Any] = [], + normalize: bool = True, + threads: int = 8, + ) -> DeclarativeBase: + return self.call( + "betweenness_centrality_online.update", + (created_vertices, created_edges, deleted_vertices, deleted_edges, normalize, threads), + ) + + def biconnected_components_get(self) -> DeclarativeBase: + return self.call("biconnected_components.get") + + def bipartite_matching_max(self) -> DeclarativeBase: + return self.call("bipartite_matching.max") + + def bridges_get(self) -> DeclarativeBase: + return self.call("bridges.get") + + def community_detection_get( + self, + weight: str = "weight", + coloring: bool = False, + min_graph_shrink: int = 100000, + community_alg_threshold: float = 1e-06, + coloring_alg_threshold: float = 0.01, + ) -> DeclarativeBase: + return self.call( + "community_detection.get", + (weight, coloring, min_graph_shrink, community_alg_threshold, coloring_alg_threshold), + ) + + def community_detection_online_get(self) -> DeclarativeBase: + return self.call("community_detection_online.get") + + def community_detection_online_reset(self) -> DeclarativeBase: + return self.call("community_detection_online.reset") + + def community_detection_online_set( + self, + directed: bool = False, + weighted: bool = False, + similarity_threshold: float = 0.7, + exponent: float = 4, + min_value: float = 0.1, + weight_property: str = "weight", + w_selfloop: float = 1.0, + max_iterations: int = 100, + max_updates: int = 5, + ) -> DeclarativeBase: + return self.call( + "community_detection_online.set", + ( + directed, + weighted, + similarity_threshold, + exponent, + min_value, + weight_property, + w_selfloop, + max_iterations, + max_updates, + ), + ) + + def community_detection_online_update( + self, + createdVertices: List[Any] = [], + createdEdges: List[Any] = [], + updatedVertices: List[Any] = [], + updatedEdges: List[Any] = [], + deletedVertices: List[Any] = [], + deletedEdges: List[Any] = [], + ) -> DeclarativeBase: + return self.call( + "community_detection_online.update", + (createdVertices, createdEdges, updatedVertices, updatedEdges, deletedVertices, deletedEdges), + ) + + def cycles_get(self) -> DeclarativeBase: + return self.call("cycles.get") + + def distance_calculator_multiple( + self, start_points: List[Any], end_points: List[Any], metrics: str = "m" + ) -> DeclarativeBase: + return self.call("distance_calculator.multiple", (start_points, end_points, metrics)) + + def distance_calculator_single( + self, start: Optional[Any], end: Optional[Any], metrics: str = "m" + ) -> DeclarativeBase: + return self.call("distance_calculator.single", (start, end, metrics)) + + def export_util_json(self, path: str) -> DeclarativeBase: + return self.call("export_util.json", (path)) + + def graph_coloring_color_graph( + self, parameters: Dict[str, Union[str, int]], edge_property: str = "weight" + ) -> DeclarativeBase: + return self.call("graph_coloring.color_graph", (parameters, edge_property)) + + def graph_coloring_color_subgraph( + self, + vertices: List[Any], + edges: List[Any], + parameters: Dict[str, Union[str, int]], + edge_property: str = "weight", + ) -> DeclarativeBase: + return self.call("graph_coloring.color_subgraph", (vertices, edges, parameters, edge_property)) + + def import_util_json(self, path: str) -> DeclarativeBase: + return self.call("import_util.json", (path)) + + def json_util_load_from_path(self, path: str) -> DeclarativeBase: + return self.call("json_util.load_from_path", (path)) + + def json_util_load_from_url(self, url: str) -> DeclarativeBase: + return self.call("json_util.load_from_url", (url)) + + def katz_centrality_get(self, alpha: float = 0.2, epsilon: float = 0.01) -> DeclarativeBase: + return self.call("katz_centrality.get", (alpha, epsilon)) + + def katz_centrality_online_get(self) -> DeclarativeBase: + return self.call("katz_centrality_online.get") + + def katz_centrality_online_reset(self) -> DeclarativeBase: + return self.call("katz_centrality_online.reset") + + def katz_centrality_online_set(self, alpha: float = 0.2, epsilon: float = 0.01) -> DeclarativeBase: + return self.call("katz_centrality_online.set", (alpha, epsilon)) + + def katz_centrality_online_update( + self, + created_vertices: List[Any] = [], + created_edges: List[Any] = [], + deleted_vertices: List[Any] = [], + deleted_edges: List[Any] = [], + ) -> DeclarativeBase: + return self.call( + "katz_centrality_online.update", (created_vertices, created_edges, deleted_vertices, deleted_edges) + ) + + def max_flow_get_flow(self, start_v: Any, end_v: Any, edge_property: str = "weight") -> DeclarativeBase: + return self.call("max_flow.get_flow", (start_v, end_v, edge_property)) + + def max_flow_get_paths(self, start_v: Any, end_v: Any, edge_property: str = "weight") -> DeclarativeBase: + return self.call("max_flow.get_paths", (start_v, end_v, edge_property)) + + def node2vec_get_embeddings( + self, + is_directed: bool = False, + p: Optional[Any] = 2, + q: Optional[Any] = 0.5, + num_walks: Optional[Any] = 4, + walk_length: Optional[Any] = 5, + vector_size: Optional[Any] = 100, + alpha: Optional[Any] = 0.025, + window: Optional[Any] = 5, + min_count: Optional[Any] = 1, + seed: Optional[Any] = 1, + workers: Optional[Any] = 1, + min_alpha: Optional[Any] = 0.0001, + sg: Optional[Any] = 1, + hs: Optional[Any] = 0, + negative: Optional[Any] = 5, + epochs: Optional[Any] = 5, + edge_weight_property: Optional[Any] = "weight", + ) -> DeclarativeBase: + return self.call( + "node2vec.get_embeddings", + ( + is_directed, + p, + q, + num_walks, + walk_length, + vector_size, + alpha, + window, + min_count, + seed, + workers, + min_alpha, + sg, + hs, + negative, + epochs, + edge_weight_property, + ), + ) + + def node2vec_help(self) -> DeclarativeBase: + return self.call("node2vec.help") + + def node2vec_set_embeddings( + self, + is_directed: bool = False, + p: Optional[Any] = 2, + q: Optional[Any] = 0.5, + num_walks: Optional[Any] = 4, + walk_length: Optional[Any] = 5, + vector_size: Optional[Any] = 100, + alpha: Optional[Any] = 0.025, + window: Optional[Any] = 5, + min_count: Optional[Any] = 1, + seed: Optional[Any] = 1, + workers: Optional[Any] = 1, + min_alpha: Optional[Any] = 0.0001, + sg: Optional[Any] = 1, + hs: Optional[Any] = 0, + negative: Optional[Any] = 5, + epochs: Optional[Any] = 5, + edge_weight_property: Optional[Any] = "weight", + ) -> DeclarativeBase: + return self.call( + "node2vec.set_embeddings", + ( + is_directed, + p, + q, + num_walks, + walk_length, + vector_size, + alpha, + window, + min_count, + seed, + workers, + min_alpha, + sg, + hs, + negative, + epochs, + edge_weight_property, + ), + ) + + def node2vec_online_get(self) -> DeclarativeBase: + return self.call("node2vec_online.get") + + def node2vec_online_help(self) -> DeclarativeBase: + return self.call("node2vec_online.help") + + def node2vec_online_reset(self) -> DeclarativeBase: + return self.call("node2vec_online.reset") + + def node2vec_online_set_streamwalk_updater( + self, + half_life: int = 7200, + max_length: int = 3, + beta: float = 0.9, + cutoff: int = 604800, + sampled_walks: int = 4, + full_walks: bool = False, + ) -> DeclarativeBase: + return self.call( + "node2vec_online.set_streamwalk_updater", (half_life, max_length, beta, cutoff, sampled_walks, full_walks) + ) + + def node2vec_online_set_word2vec_learner( + self, + embedding_dimension: int = 128, + learning_rate: float = 0.01, + skip_gram: bool = True, + negative_rate: float = 10, + threads: Optional[int] = None, + ) -> DeclarativeBase: + return self.call( + "node2vec_online.set_word2vec_learner", + (embedding_dimension, learning_rate, skip_gram, negative_rate, threads), + ) + + def node2vec_online_update(self, edges: List[Any]) -> DeclarativeBase: + return self.call("node2vec_online.update", (edges)) + + def node_similarity_cosine(self, node1: Any, node2: Any, mode: str = "cartesian") -> DeclarativeBase: + return self.call("node_similarity.cosine", (node1, node2, mode)) + + def node_similarity_jaccard(self, node1: Any, node2: Any, mode: str = "cartesian") -> DeclarativeBase: + return self.call("node_similarity.jaccard", (node1, node2, mode)) + + def node_similarity_overlap(self, node1: Any, node2: Any, mode: str = "cartesian") -> DeclarativeBase: + return self.call("node_similarity.overlap", (node1, node2, mode)) + + def pagerank_get( + self, max_iterations: int = 100, damping_factor: float = 0.85, stop_epsilon: float = 1e-05 + ) -> DeclarativeBase: + return self.call("pagerank.get", (max_iterations, damping_factor, stop_epsilon)) + + def pagerank_online_get(self) -> DeclarativeBase: + return self.call("pagerank_online.get") + + def pagerank_online_reset(self) -> DeclarativeBase: + return self.call("pagerank_online.reset") + + def pagerank_online_set(self, walks_per_node: int = 10, walk_stop_epsilon: float = 0.1) -> DeclarativeBase: + return self.call("pagerank_online.set", (walks_per_node, walk_stop_epsilon)) + + def pagerank_online_update( + self, + created_vertices: List[Any] = [], + created_edges: List[Any] = [], + deleted_vertices: List[Any] = [], + deleted_edges: List[Any] = [], + ) -> DeclarativeBase: + return self.call("pagerank_online.update", (created_vertices, created_edges, deleted_vertices, deleted_edges)) + + def rust_example_basic(self, input_string: str, optional_input_int: int = 0) -> DeclarativeBase: + return self.call("rust_example.basic", (input_string, optional_input_int)) + + def rust_example_test_procedure(self) -> DeclarativeBase: + return self.call("rust_example.test_procedure") + + def set_cover_cp_solve(self, element_vertexes: List[Any], set_vertexes: List[Any]) -> DeclarativeBase: + return self.call("set_cover.cp_solve", (element_vertexes, set_vertexes)) + + def set_cover_greedy(self, element_vertexes: List[Any], set_vertexes: List[Any]) -> DeclarativeBase: + return self.call("set_cover.greedy", (element_vertexes, set_vertexes)) + + def tgn_get(self) -> DeclarativeBase: + return self.call("tgn.get") + + def tgn_get_results(self) -> DeclarativeBase: + return self.call("tgn.get_results") + + def tgn_predict_link_score(self, src: Any, dest: Any) -> DeclarativeBase: + return self.call("tgn.predict_link_score", (src, dest)) + + def tgn_reset(self) -> DeclarativeBase: + return self.call("tgn.reset") + + def tgn_revert_from_database(self) -> DeclarativeBase: + return self.call("tgn.revert_from_database") + + def tgn_save_tgn_params(self) -> DeclarativeBase: + return self.call("tgn.save_tgn_params") + + def tgn_set_eval(self) -> DeclarativeBase: + return self.call("tgn.set_eval") + + def tgn_set_params(self, params: Dict[str, Union[int, str]]) -> DeclarativeBase: + return self.call("tgn.set_params", (params)) + + def tgn_train_and_eval(self, num_epochs: int) -> DeclarativeBase: + return self.call("tgn.train_and_eval", (num_epochs)) + + def tgn_update(self, edges: List[Any]) -> DeclarativeBase: + return self.call("tgn.update", (edges)) + + def tsp_solve(self, points: List[Any], method: str = "1.5_approx") -> DeclarativeBase: + return self.call("tsp.solve", (points, method)) + + def union_find_connected( + self, nodes1: Optional[Any], nodes2: Optional[Any], mode: str = "pairwise", update: bool = True + ) -> DeclarativeBase: + return self.call("union_find.connected", (nodes1, nodes2, mode, update)) + + def uuid_generator_get(self) -> DeclarativeBase: + return self.call("uuid_generator.get") + + def vrp_route(self, depot_node: Any, number_of_vehicles: Optional[int] = None) -> DeclarativeBase: + return self.call("vrp.route", (depot_node, number_of_vehicles)) + + def weakly_connected_components_get(self) -> DeclarativeBase: + return self.call("weakly_connected_components.get") diff --git a/gqlalchemy/graph_algorithms/query_modules.py b/gqlalchemy/graph_algorithms/query_modules.py new file mode 100644 index 00000000..2bbf0de2 --- /dev/null +++ b/gqlalchemy/graph_algorithms/query_modules.py @@ -0,0 +1,166 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Tuple, List + +QM_KEY_NAME = "name" +QM_KEY_VALUE = "value" +QM_KEY_DEFAULT = "default" +QM_KEY_TYPE = "type" + +QM_FIELD_NAME = "name" +QM_FIELD_IS_EDITABLE = "is_editable" +QM_FIELD_IS_WRITE = "is_write" +QM_FIELD_PATH = "path" +QM_FIELD_SIGNATURE = "signature" + +LEFT_PARANTHESES = "(" +RIGHT_PARANTHESES = ")" +EQUALS_DELIMITER = " = " +NAME_TYPE_DELIMITIER = " :: " +COMMA_SEP = ", " +STRING_TYPE = "STRING" +QUOTATION_MARK = '"' + + +class QueryModule: + """Class representing a single MAGE query module.""" + + def __init__(self, **kwargs) -> None: + arguments, returns = parse_query_module_signature(kwargs[QM_FIELD_SIGNATURE]) + + self.name = kwargs[QM_FIELD_NAME] + self.is_editable = kwargs[QM_FIELD_IS_EDITABLE] + self.is_write = kwargs[QM_FIELD_IS_WRITE] + self.path = kwargs[QM_FIELD_PATH] + self.signature = kwargs[QM_FIELD_SIGNATURE] + self.arguments = arguments + self.returns = returns + + def __str__(self) -> str: + return self.name + + def set_argument_values(self, **kwargs) -> None: + """Set values for QueryModule arguments so the module can be called. + + Kwargs: + Named arguments in self.arguments. + + Raises: + KeyError: Passed an argument not in the self.arguments list. + """ + for argument_name in kwargs: + has_arg = False + for argument_dict in self.arguments: + if argument_dict[QM_KEY_NAME] == argument_name: + argument_dict[QM_KEY_VALUE] = str(kwargs[argument_name]) + has_arg = True + break + if not has_arg: + raise KeyError(f"{argument_name} is not an argument in this query module.") + + def get_arguments_for_call(self) -> str: + """return inputs in form "value1, value2, ..." for QueryBuilder call() + method. + + Raises: + KeyError: Cannot get all values of arguments because one or more is + not set. + """ + arguments_str = "" + + for argument_dict in self.arguments: + if QM_KEY_VALUE in argument_dict: + val = argument_dict[QM_KEY_VALUE] + elif QM_KEY_DEFAULT in argument_dict: + val = argument_dict[QM_KEY_DEFAULT] + else: + raise KeyError(f"{argument_dict[QM_KEY_NAME]} has no value set.") + + if argument_dict[QM_KEY_TYPE] == STRING_TYPE: + arguments_str += QUOTATION_MARK + val + QUOTATION_MARK + else: + arguments_str += val + + arguments_str += COMMA_SEP + + return arguments_str[:-2] + + +def parse_query_module_signature(signature: str) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + """Query Modules signatures received from Memgraph are parsed into a + list of dictionaries. + + One list is for arguments and another for returns. + For instance, if a query module signature is: + dummy_module.dummy(lst :: LIST OF STRING, num = 3 :: NUMBER) :: (ret :: STRING) + the method should return a list of arguments: + [{"name": "lst", "type": "LIST OF STRING"}, {"name": "num", "type": "NUMBER", "default": 3}] + and a list of returns: + [{"name": "ret", "type": "STRING"}] + + Dictionary consists of fields: "name" - argument name, "type" - data + type of argument and "default" where default argument value is given + + Args: + signature: module signature as returned by Cypher CALL operation + """ + end_arguments_parentheses = signature.index(RIGHT_PARANTHESES) + arguments_field = signature[signature.index(LEFT_PARANTHESES) + 1 : end_arguments_parentheses] + returns_field = signature[ + signature.index(LEFT_PARANTHESES, end_arguments_parentheses) + + 1 : signature.index(RIGHT_PARANTHESES, end_arguments_parentheses + 1) + ] + + arguments = parse_field( + vars_field=arguments_field.strip(), + name_type_delimiter=NAME_TYPE_DELIMITIER, + default_value_delimiter=EQUALS_DELIMITER, + ) + returns = parse_field( + vars_field=returns_field.strip(), + name_type_delimiter=NAME_TYPE_DELIMITIER, + default_value_delimiter=EQUALS_DELIMITER, + ) + + return arguments, returns + + +def parse_field( + vars_field: str, name_type_delimiter: str = NAME_TYPE_DELIMITIER, default_value_delimiter: str = EQUALS_DELIMITER +) -> List[Dict[str, str]]: + """Parse a field of arguments or returns from Query Module signature. + + Args: + vars_field: signature field inside parentheses + """ + if len(vars_field) == 0: + return [] + + vars = [] + + for var in vars_field.split(COMMA_SEP): + var_dict = {} + sides = var.split(name_type_delimiter) + var_dict[QM_KEY_TYPE] = sides[1] + if default_value_delimiter in sides[0]: + splt = sides[0].split(default_value_delimiter) + var_dict[QM_KEY_NAME] = splt[0] + var_dict[QM_KEY_DEFAULT] = splt[1].strip(QUOTATION_MARK) + else: + var_dict[QM_KEY_NAME] = sides[0] + + vars.append(var_dict) + + return vars diff --git a/gqlalchemy/instance_runner.py b/gqlalchemy/instance_runner.py index ed447658..f4d679e6 100644 --- a/gqlalchemy/instance_runner.py +++ b/gqlalchemy/instance_runner.py @@ -12,16 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import docker -import os -import psutil + import socket import subprocess import time from abc import ABC, abstractmethod from enum import Enum from typing import Any, Dict, Union -from .memgraph import Memgraph + +import docker +import psutil + +from gqlalchemy.exceptions import ( + GQLAlchemyWaitForConnectionError, + GQLAlchemyWaitForDockerError, + GQLAlchemyWaitForPortError, +) +from gqlalchemy.vendors.memgraph import Memgraph MEMGRAPH_DEFAULT_BINARY_PATH = "/usr/lib/memgraph/memgraph" @@ -32,10 +39,6 @@ LOOPBACK_ADDRESS = "127.0.0.1" WILDCARD_ADDRESS = "0.0.0.0" -TIMEOUT_ERROR_MESSAGE = "Waited too long for the port {port} on host {host} to start accepting connections." -DOCKER_TIMEOUT_ERROR_MESSAGE = "Waited too long for the Docker container to start." -MEMGRAPH_CONNECTION_ERROR_MESSAGE = "The Memgraph process probably died." - class DockerImage(Enum): MEMGRAPH = "memgraph/memgraph" @@ -50,7 +53,11 @@ class DockerContainerStatus(Enum): def wait_for_port( - host: str = LOOPBACK_ADDRESS, port: int = MEMGRAPH_DEFAULT_PORT, delay: float = 0.01, timeout: float = 5.0 + host: str = LOOPBACK_ADDRESS, + port: int = MEMGRAPH_DEFAULT_PORT, + delay: float = 0.01, + timeout: float = 5.0, + backoff: int = 2, ) -> None: """Wait for a TCP port to become available. @@ -59,13 +66,13 @@ def wait_for_port( port: A string representing the port that is being checked. delay: A float that defines how long to wait between retries. timeout: A float that defines how long to wait for the port. + backoff: An integer used for multiplying the delay. Raises: TimeoutError: Raises an error when the host and port are not accepting connections after the timeout period has passed. """ start_time = time.perf_counter() - time.sleep(delay) while True: try: with socket.create_connection((host, port), timeout=timeout): @@ -73,33 +80,35 @@ def wait_for_port( except OSError as ex: time.sleep(delay) if time.perf_counter() - start_time >= timeout: - raise TimeoutError(TIMEOUT_ERROR_MESSAGE.format(port=port, host=host)) from ex + raise GQLAlchemyWaitForPortError(port=port, host=host) from ex - delay *= 2 + delay *= backoff -def wait_for_docker_container(container: "docker.Container", delay: float = 0.01, timeout: float = 5.0) -> None: +def wait_for_docker_container( + container: "docker.Container", delay: float = 0.01, timeout: float = 5.0, backoff: int = 2 +) -> None: """Wait for a Docker container to enter the status `running`. Args: container: The Docker container to wait for. delay: A float that defines how long to wait between retries. timeout: A float that defines how long to wait for the status. + backoff: An integer used for multiplying the delay. Raises: TimeoutError: Raises an error when the container isn't running after the timeout period has passed. """ start_time = time.perf_counter() - time.sleep(delay) - container.reload() while container.status != DockerContainerStatus.RUNNING.value: + container.reload() time.sleep(delay) + if time.perf_counter() - start_time >= timeout: - raise TimeoutError(DOCKER_TIMEOUT_ERROR_MESSAGE) + raise GQLAlchemyWaitForDockerError - container.reload() - delay *= 2 + delay *= backoff class MemgraphInstance(ABC): @@ -115,31 +124,66 @@ def __init__( self.proc_mg = None self.config[MEMGRAPH_CONFIG_BOLT_PORT] = self.port self.config[MEMGRAPH_CONFIG_BOLT_ADDRESS] = self.host + self._memgraph = None + + @property + def memgraph(self) -> Memgraph: + if self._memgraph is None: + self._memgraph = Memgraph(self.host, self.port) + + return self._memgraph def set_config(self, config: Dict[str, Union[str, int, bool]]) -> None: self.config.update(config) def connect(self) -> "Memgraph": - self.memgraph = Memgraph(self.host, self.port) if not self.is_running(): - raise ConnectionError(MEMGRAPH_CONNECTION_ERROR_MESSAGE) + raise GQLAlchemyWaitForConnectionError return self.memgraph - @abstractmethod + def start_and_connect(self, restart: bool = False) -> "Memgraph": + """Start the Memgraph instance and return the + connection object. + + Attributes: + restart: A bool indicating if the instance should be + restarted if it's already running. + """ + self.start(restart=restart) + + return self.connect() + def start(self, restart: bool = False) -> None: - pass + """Start the Memgraph instance. + + Attributes: + restart: A bool indicating if the instance should be + restarted if it's already running. + """ + if not restart and self.is_running(): + return + + self.stop() + self._start_instance() + + def stop(self) -> Any: + """Stop the Memgraph instance.""" + if not self.is_running(): + return + + self._stop_instance() @abstractmethod - def start_and_connect(self, restart: bool = False) -> "Memgraph": + def is_running(self) -> bool: pass @abstractmethod - def stop(self) -> Any: + def _start_instance(self) -> None: pass @abstractmethod - def is_running(self) -> bool: + def _stop_instance(self) -> Any: pass @@ -159,60 +203,28 @@ def __init__(self, binary_path: str = MEMGRAPH_DEFAULT_BINARY_PATH, user: str = self.binary_path = binary_path self.user = user - def start(self, restart: bool = False) -> None: - """Start the Memgraph instance from a binary file. - - Attributes: - restart: A bool indicating if the instance should be - restarted if it's already running. - """ - if not restart and self.is_running(): - return - - self.stop() + def _start_instance(self) -> None: args_mg = f"{self.binary_path } " + (" ").join([f"{k}={v}" for k, v in self.config.items()]) if self.user != "": - args_mg = f"sudo runuser -l {self.user} -c '{args_mg}'" + args_mg = f"runuser -l {self.user} -c '{args_mg}'" self.proc_mg = subprocess.Popen(args_mg, shell=True) wait_for_port(self.host, self.port) - def start_and_connect(self, restart: bool = False) -> "Memgraph": - """Start the Memgraph instance from a binary file and return the - connection object. - - Attributes: - restart: A bool indicating if the instance should be - restarted if it's already running. - """ - self.start(restart=restart) - - return self.connect() - - def stop(self) -> None: - """Stop the Memgraph instance.""" - if not self.is_running(): - return - + def _stop_instance(self) -> None: procs = set() process = psutil.Process(self.proc_mg.pid) procs.add(process) for proc in process.children(recursive=True): procs.add(proc) - os.system(f"sudo kill {proc.pid}") + proc.kill() process.kill() psutil.wait_procs(procs) def is_running(self) -> bool: """Check if the Memgraph instance is still running.""" - if self.proc_mg is None: - return False - - if self.proc_mg.poll() is not None: - return False - - return True + return self.proc_mg is not None and self.proc_mg.poll() is None class MemgraphInstanceDocker(MemgraphInstance): @@ -233,42 +245,16 @@ def __init__( self._client = docker.from_env() self._container = None - def start(self, restart: bool = False) -> None: - """Start the Memgraph instance in a Docker container. - - Attributes: - restart: A bool indicating if the instance should be - restarted if it's already running. - """ - if not restart and self.is_running(): - return - - self.stop() + def _start_instance(self) -> None: self._container = self._client.containers.run( image=f"{self.docker_image.value}:{self.docker_image_tag}", command=f"{MEMGRAPH_DEFAULT_BINARY_PATH} {(' ').join([f'{k}={v}' for k, v in self.config.items()])}", detach=True, ports={f"{self.port}/tcp": self.port}, ) - wait_for_docker_container(self._container, delay=1) - - def start_and_connect(self, restart: bool = False) -> "Memgraph": - """Start the Memgraph instance in a Docker container and return the - connection object. - - Attributes: - restart: A bool indicating if the instance should be - restarted if it's already running. - """ - self.start(restart=restart) - - return self.connect() - - def stop(self) -> Dict: - """Stop the Memgraph instance.""" - if not self.is_running(): - return + wait_for_docker_container(self._container) + def _stop_instance(self) -> Dict: self._container.stop() return self._container.wait() @@ -279,7 +265,5 @@ def is_running(self) -> bool: return False self._container.reload() - if self._container.status == DockerContainerStatus.RUNNING.value: - return True - return False + return self._container.status == DockerContainerStatus.RUNNING.value diff --git a/gqlalchemy/loaders.py b/gqlalchemy/loaders.py index 2b2ac70e..ab17131c 100644 --- a/gqlalchemy/loaders.py +++ b/gqlalchemy/loaders.py @@ -12,27 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +import platform +from abc import ABC, abstractmethod +from enum import Enum +from dataclasses import dataclass, field from string import Template +from typing import List, Dict, Any, Optional, Union + +import adlfs +import pyarrow.dataset as ds +from pyarrow import fs +from dacite import from_dict -from . import Memgraph -from .query_builder import QueryBuilder, Unwind -from .models import ( +from gqlalchemy import Memgraph +from gqlalchemy.models import ( MemgraphIndex, MemgraphTrigger, TriggerEventObject, TriggerEventType, TriggerExecutionPhase, ) +from gqlalchemy.query_builders.memgraph_query_builder import Operator, QueryBuilder, Unwind -from abc import ABC, abstractmethod -from enum import Enum -from dataclasses import dataclass, field -from dacite import from_dict -from pyarrow import fs -from typing import List, Dict, Any, Optional, Union -import pyarrow.dataset as ds -import adlfs -import platform NAME_MAPPINGS_KEY = "name_mappings" ONE_TO_MANY_RELATIONS_KEY = "one_to_many_relations" @@ -52,15 +53,15 @@ FEATHER_EXTENSION = "feather" ARROW_EXTENSION = "arrow" -BLOB_ACCOUNT_NAME = "blob_account_name" -BLOB_ACCOUNT_KEY = "blob_account_key" -BLOB_SAS_TOKEN = "blob_sas_token" +BLOB_ACCOUNT_NAME = "account_name" +BLOB_ACCOUNT_KEY = "account_key" +BLOB_SAS_TOKEN = "sas_token" BLOB_CONTAINER_NAME_KEY = "container_name" -S3_REGION = "s3_region" -S3_ACCESS_KEY = "s3_access_key" -S3_SECRET_KEY = "s3_secret_key" -S3_SESSION_TOKEN = "s3_session_token" +S3_REGION = "region" +S3_ACCESS_KEY = "access_key" +S3_SECRET_KEY = "secret_key" +S3_SESSION_TOKEN = "session_token" S3_BUCKET_NAME_KEY = "bucket_name" LOCAL_STORAGE_PATH = "local_storage_path" @@ -209,10 +210,10 @@ def __init__(self, bucket_name: str, **kwargs): bucket_name: Name of the bucket on S3 from which to read the data Kwargs: - s3_access_key: S3 access key. - s3_secret_key: S3 secret key. - s3_region: S3 region. - s3_session_token: S3 session token (Optional). + access_key: S3 access key. + secret_key: S3 secret key. + region: S3 region. + session_token: S3 session token (Optional). Raises: KeyError: kwargs doesn't contain necessary fields. @@ -244,9 +245,9 @@ def __init__(self, container_name: str, **kwargs) -> None: container_name: Name of the Blob container storing data. Kwargs: - blob_account_name: Account name from Azure Blob. - blob_account_key: Account key for Azure Blob (Optional - if using sas_token). - blob_sas_token: Shared access signature token for authentification (Optional). + account_name: Account name from Azure Blob. + account_key: Account key for Azure Blob (Optional - if using sas_token). + sas_token: Shared access signature token for authentification (Optional). Raises: KeyError: kwargs doesn't contain necessary fields. @@ -382,18 +383,20 @@ class TableToGraphImporter: _TriggerQueryTemplate = Template( Unwind(list_expression="createdVertices", variable="$node_a") .with_(results={"$node_a": ""}) - .where(item="$node_a:$label_2", operator="MATCH", expression="($node_b:$label_1)") - .where(item="$node_b.$property_1", operator="=", expression="$node_a.$property_2") + .where(item="$node_a", operator=Operator.LABEL_FILTER, expression="$label_2") + .match() + .node(labels="$label_1", variable="$node_b") + .where(item="$node_b.$property_1", operator=Operator.EQUAL, expression="$node_a.$property_2") .create() .node(variable="$from_node") - .to(edge_label="$edge_type") + .to(relationship_type="$relationship_type") .node(variable="$to_node") .construct_query() ) @staticmethod def _create_trigger_cypher_query( - label1: str, label2: str, property1: str, property2: str, edge_type: str, from_entity: bool + label1: str, label2: str, property1: str, property2: str, relationship_type: str, from_entity: bool ) -> str: """Creates a Cypher query for the translation trigger. @@ -402,7 +405,7 @@ def _create_trigger_cypher_query( label2: Label of the second node. property1: Property of the first node. property2: Property of the second node. - edge_type: Label for the relationship that the trigger creates. + relationship_type: Label for the relationship that the trigger creates. from_entity: Indicate whether the relationship goes from or to the first entity. """ from_node, to_node = TableToGraphImporter._DIRECTION[from_entity] @@ -416,7 +419,7 @@ def _create_trigger_cypher_query( property_2=property2, from_node=from_node, to_node=to_node, - edge_type=edge_type, + relationship_type=relationship_type, ) def __init__( @@ -497,7 +500,7 @@ def _create_triggers(self) -> None: property2 = self._name_mapper.get_property_name( collection_name=one_to_many_mapping.table_name, column_name=mapping.foreign_key.reference_key ) - edge_type = mapping.label + relationship_type = mapping.label from_entity = mapping.from_entity self._create_trigger( @@ -505,7 +508,7 @@ def _create_triggers(self) -> None: label2=label2, property1=property1, property2=property2, - edge_type=edge_type, + relationship_type=relationship_type, from_entity=from_entity, ) self._create_trigger( @@ -513,12 +516,12 @@ def _create_triggers(self) -> None: label2=label1, property1=property2, property2=property1, - edge_type=edge_type, + relationship_type=relationship_type, from_entity=not from_entity, ) def _create_trigger( - self, label1: str, label2: str, property1: str, property2: str, edge_type: str, from_entity: bool + self, label1: str, label2: str, property1: str, property2: str, relationship_type: str, from_entity: bool ) -> None: """Creates a translation trigger in Memgraph. @@ -527,7 +530,7 @@ def _create_trigger( label2: Label of the second node. property1: Property of the first node. property2: Property of the second node. - edge_type: Label for the relationship that the trigger creates. + relationship_type: Label for the relationship that the trigger creates. from_entity: Indicate whether the relationship goes from or to the first entity. """ trigger_name = "__".join([label1, property1, label2, property2]) @@ -538,7 +541,7 @@ def _create_trigger( event_object=TriggerEventObject.NODE, execution_phase=TriggerExecutionPhase.BEFORE, statement=TableToGraphImporter._create_trigger_cypher_query( - label1, label2, property1, property2, edge_type, from_entity + label1, label2, property1, property2, relationship_type, from_entity ), ) diff --git a/gqlalchemy/models.py b/gqlalchemy/models.py index 86b3b72b..3fbe488d 100644 --- a/gqlalchemy/models.py +++ b/gqlalchemy/models.py @@ -11,22 +11,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -import warnings import datetime - +import warnings from abc import ABC, abstractmethod from collections import defaultdict from dataclasses import dataclass -from typing import Any, Dict, List, Iterable, Optional, Set, Tuple, Union -from pydantic import BaseModel, PrivateAttr, Extra +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union + +from pydantic import BaseModel, Extra, Field, PrivateAttr # noqa F401 -from .exceptions import ( +from gqlalchemy.exceptions import ( GQLAlchemyError, GQLAlchemySubclassNotFoundWarning, GQLAlchemyDatabaseMissingInFieldError, GQLAlchemyDatabaseMissingInNodeClassError, ) +# Suppress the warning GQLAlchemySubclassNotFoundWarning +IGNORE_SUBCLASSNOTFOUNDWARNING = False + class TriggerEventType: """An enum representing types of trigger events.""" @@ -78,17 +81,27 @@ def list(cls): @dataclass(frozen=True, eq=True) -class MemgraphIndex: +class Index(ABC): label: str property: Optional[str] = None def to_cypher(self) -> str: - property_cypher = f"({self.property})" if self.property else "" - return f":{self.label}{property_cypher}" + return f":{self.label}{f'({self.property})' if self.property else ''}" + + +@dataclass(frozen=True, eq=True) +class MemgraphIndex(Index): + pass @dataclass(frozen=True, eq=True) -class MemgraphConstraint(ABC): +class Neo4jIndex(Index): + type: Optional[str] = None + uniqueness: Optional[str] = None + + +@dataclass(frozen=True, eq=True) +class Constraint(ABC): label: str @abstractmethod @@ -97,7 +110,7 @@ def to_cypher(self) -> str: @dataclass(frozen=True, eq=True) -class MemgraphConstraintUnique(MemgraphConstraint): +class MemgraphConstraintUnique(Constraint): property: Union[str, Tuple] def to_cypher(self) -> str: @@ -110,7 +123,28 @@ def to_cypher(self) -> str: @dataclass(frozen=True, eq=True) -class MemgraphConstraintExists(MemgraphConstraint): +class MemgraphConstraintExists(Constraint): + property: str + + def to_cypher(self) -> str: + return f"(n:{self.label}) ASSERT EXISTS (n.{self.property})" + + +@dataclass(frozen=True, eq=True) +class Neo4jConstraintUnique(Constraint): + property: Union[str, Tuple] + + def to_cypher(self) -> str: + properties_str = "" + if isinstance(self.property, (tuple, set, list)): + properties_str = ", ".join([f"n.{prop}" for prop in self.property]) + else: + properties_str = f"n.{self.property}" + return f"(n:{self.label}) ASSERT {properties_str} IS UNIQUE" + + +@dataclass(frozen=True, eq=True) +class Neo4jConstraintExists(Constraint): property: str def to_cypher(self) -> str: @@ -283,7 +317,9 @@ def _convert_to_real_type_(cls, data): if sub is None: types = data.get("_type", data.get("_labels")) - warnings.warn(GQLAlchemySubclassNotFoundWarning(types, cls)) + if not IGNORE_SUBCLASSNOTFOUNDWARNING: + warnings.warn(GQLAlchemySubclassNotFoundWarning(types, cls)) + sub = cls return sub(**data) @@ -522,7 +558,7 @@ def has_unique_fields(self) -> bool: def _label(self) -> str: return ":".join(sorted(self._labels)) - def save(self, db: "Memgraph") -> "Node": # noqa F821 + def save(self, db: "Database") -> "Node": # noqa F821 """Saves node to Memgraph. If the node._id is not None it fetches the node with the same id from Memgraph and updates it's fields. @@ -537,7 +573,7 @@ def save(self, db: "Memgraph") -> "Node": # noqa F821 self._id = node._id return self - def load(self, db: "Memgraph") -> "Node": # noqa F821 + def load(self, db: "Database") -> "Node": # noqa F821 """Loads a node from Memgraph. If the node._id is not None it fetches the node from Memgraph with that internal id. @@ -596,7 +632,7 @@ def __str__(self) -> str: ) ) - def save(self, db: "Memgraph") -> "Relationship": # noqa F821 + def save(self, db: "Database") -> "Relationship": # noqa F821 """Saves a relationship to Memgraph. If relationship._id is not None it finds the relationship in Memgraph and updates it's properties with the values in `relationship`. @@ -610,7 +646,7 @@ def save(self, db: "Memgraph") -> "Relationship": # noqa F821 self._id = relationship._id return self - def load(self, db: "Memgraph") -> "Relationship": # noqa F821 + def load(self, db: "Database") -> "Relationship": # noqa F821 """Returns a relationship loaded from Memgraph. If the relationship._id is not None it fetches the relationship from Memgraph that has the same internal id. diff --git a/gqlalchemy/query_builders/__init__.py b/gqlalchemy/query_builders/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/gqlalchemy/query_builders/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/gqlalchemy/query_builder.py b/gqlalchemy/query_builders/declarative_base.py similarity index 53% rename from gqlalchemy/query_builder.py rename to gqlalchemy/query_builders/declarative_base.py index ffc5a204..5ae50f8c 100644 --- a/gqlalchemy/query_builder.py +++ b/gqlalchemy/query_builders/declarative_base.py @@ -12,35 +12,43 @@ # See the License for the specific language governing permissions and # limitations under the License. -from enum import Enum import re from abc import ABC, abstractmethod -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union - -from .memgraph import Connection, Memgraph -from .utilities import to_cypher_labels, to_cypher_properties, to_cypher_value -from .models import Node, Relationship -from .exceptions import ( - GQLAlchemyLiteralAndExpressionMissingInWhere, - GQLAlchemyExtraKeywordArgumentsInWhere, +from enum import Enum +from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, Union + +from gqlalchemy.exceptions import ( + GQLAlchemyExtraKeywordArguments, + GQLAlchemyInstantiationError, + GQLAlchemyLiteralAndExpressionMissing, GQLAlchemyMissingOrder, + GQLAlchemyOperatorTypeError, GQLAlchemyOrderByTypeError, + GQLAlchemyResultQueryTypeError, + GQLAlchemyTooLargeTupleInResultQuery, ) +from gqlalchemy.graph_algorithms.integrated_algorithms import IntegratedAlgorithm +from gqlalchemy.vendors.memgraph import Memgraph +from gqlalchemy.models import Node, Relationship +from gqlalchemy.utilities import to_cypher_labels, to_cypher_properties, to_cypher_value, to_cypher_qm_arguments +from gqlalchemy.vendors.database_client import DatabaseClient class DeclarativeBaseTypes: CALL = "CALL" CREATE = "CREATE" DELETE = "DELETE" - EDGE = "EDGE" + FOREACH = "FOREACH" LIMIT = "LIMIT" LOAD_CSV = "LOAD_CSV" MATCH = "MATCH" MERGE = "MERGE" NODE = "NODE" ORDER_BY = "ORDER BY" + RELATIONSHIP = "RELATIONSHIP" REMOVE = "REMOVE" RETURN = "RETURN" + SET = "SET" SKIP = "SKIP" UNION = "UNION" UNWIND = "UNWIND" @@ -59,6 +67,12 @@ class MatchConstants: VARIABLE = "variable" +class Result(Enum): + RETURN = 1 + YIELD = 2 + WITH = 3 + + class Where(Enum): WHERE = 1 AND = 2 @@ -67,6 +81,19 @@ class Where(Enum): NOT = 5 +class Operator(Enum): + ASSIGNMENT = "=" + EQUAL = "=" + GEQ_THAN = ">=" + GREATER_THAN = ">" + INEQUAL = "<>" + LABEL_FILTER = ":" + LESS_THAN = "<" + LEQ_THAN = "<=" + NOT_EQUAL = "!=" + INCREMENT = "+=" + + class Order(Enum): ASC = 1 ASCENDING = 2 @@ -95,17 +122,6 @@ def construct_query(self) -> str: pass -class LoadCsvPartialQuery(PartialQuery): - def __init__(self, path: str, header: bool, row: str): - super().__init__(DeclarativeBaseTypes.LOAD_CSV) - self.path = path - self.header = header - self.row = row - - def construct_query(self) -> str: - return f" LOAD CSV FROM '{self.path}' " + ("WITH" if self.header else "NO") + f" HEADER AS {self.row} " - - class MatchPartialQuery(PartialQuery): def __init__(self, optional: bool): super().__init__(DeclarativeBaseTypes.MATCH) @@ -136,11 +152,11 @@ def construct_query(self) -> str: class CallPartialQuery(PartialQuery): - def __init__(self, procedure: str, arguments: str): + def __init__(self, procedure: str, arguments: Optional[Union[str, Tuple[Union[str, int, float]]]]): super().__init__(DeclarativeBaseTypes.CALL) self.procedure = procedure - self.arguments = arguments + self.arguments = to_cypher_qm_arguments(arguments) def construct_query(self) -> str: return f" CALL {self.procedure}({self.arguments if self.arguments else ''}) " @@ -149,9 +165,8 @@ def construct_query(self) -> str: class WhereConditionPartialQuery(PartialQuery): _LITERAL = "literal" _EXPRESSION = "expression" - _LABEL_FILTER = ":" - def __init__(self, item: str, operator: str, keyword: Where = Where.WHERE, is_negated: bool = False, **kwargs): + def __init__(self, item: str, operator: Operator, keyword: Where = Where.WHERE, is_negated: bool = False, **kwargs): super().__init__(type=keyword.name if not is_negated else f"{keyword.name} {Where.NOT.name}") self.query = self._build_where_query(item=item, operator=operator, **kwargs) @@ -159,109 +174,131 @@ def construct_query(self) -> str: """Constructs a where partial query.""" return f" {self.type} {self.query} " - def _build_where_query(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def _build_where_query(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Builds parts of a WHERE Cypher query divided by the boolean operators.""" literal = kwargs.get(WhereConditionPartialQuery._LITERAL) value = kwargs.get(WhereConditionPartialQuery._EXPRESSION) + operator_str = operator.value if isinstance(operator, Operator) else operator + + if operator_str not in Operator._value2member_map_: + raise GQLAlchemyOperatorTypeError(clause=self.type) + if value is None: if literal is None: - raise GQLAlchemyLiteralAndExpressionMissingInWhere + raise GQLAlchemyLiteralAndExpressionMissing(clause=self.type) value = to_cypher_value(literal) elif literal is not None: - raise GQLAlchemyExtraKeywordArgumentsInWhere - - return ("" if operator == WhereConditionPartialQuery._LABEL_FILTER else " ").join([item, operator, value]) + raise GQLAlchemyExtraKeywordArguments(clause=self.type) + + return ("" if operator_str == Operator.LABEL_FILTER.value else " ").join( + [ + item, + operator_str, + value, + ] + ) class WhereNotConditionPartialQuery(WhereConditionPartialQuery): - def __init__(self, item: str, operator: str, keyword: Where = Where.WHERE, **kwargs): + def __init__(self, item: str, operator: Operator, keyword: Where = Where.WHERE, **kwargs): super().__init__(item=item, operator=operator, keyword=keyword, is_negated=True, **kwargs) class AndWhereConditionPartialQuery(WhereConditionPartialQuery): - def __init__(self, item: str, operator: str, **kwargs): + def __init__(self, item: str, operator: Operator, **kwargs): super().__init__(item=item, operator=operator, keyword=Where.AND, **kwargs) class AndNotWhereConditionPartialQuery(WhereNotConditionPartialQuery): - def __init__(self, item: str, operator: str, **kwargs): + def __init__(self, item: str, operator: Operator, **kwargs): super().__init__(item=item, operator=operator, keyword=Where.AND, **kwargs) class OrWhereConditionPartialQuery(WhereConditionPartialQuery): - def __init__(self, item: str, operator: str, **kwargs): + def __init__(self, item: str, operator: Operator, **kwargs): super().__init__(item=item, operator=operator, keyword=Where.OR, **kwargs) class OrNotWhereConditionPartialQuery(WhereNotConditionPartialQuery): - def __init__(self, item: str, operator: str, **kwargs): + def __init__(self, item: str, operator: Operator, **kwargs): super().__init__(item=item, operator=operator, keyword=Where.OR, **kwargs) class XorWhereConditionPartialQuery(WhereConditionPartialQuery): - def __init__(self, item: str, operator: str, **kwargs): + def __init__(self, item: str, operator: Operator, **kwargs): super().__init__(item=item, operator=operator, keyword=Where.XOR, **kwargs) class XorNotWhereConditionPartialQuery(WhereNotConditionPartialQuery): - def __init__(self, item: str, operator: str, **kwargs): + def __init__(self, item: str, operator: Operator, **kwargs): super().__init__(item=item, operator=operator, keyword=Where.XOR, **kwargs) class NodePartialQuery(PartialQuery): - def __init__(self, variable: str, labels: str, properties: str): + def __init__(self, variable: Optional[str], labels: str, properties: str): super().__init__(DeclarativeBaseTypes.NODE) - self._variable = variable + self._variable = "" if variable is None else variable self._labels = labels self._properties = properties @property def variable(self) -> str: - return self._variable if self._variable is not None else "" + return self._variable @property def labels(self) -> str: - return self._labels if self._labels is not None else "" + return self._labels @property def properties(self) -> str: - return self._properties if self._properties is not None else "" + return self._properties def construct_query(self) -> str: """Constructs a node partial query.""" return f"({self.variable}{self.labels}{' ' + self.properties if self.properties else ''})" -class EdgePartialQuery(PartialQuery): +class RelationshipPartialQuery(PartialQuery): def __init__( - self, variable: Optional[str], labels: Optional[str], properties: Optional[str], directed: bool, from_: bool + self, + variable: Optional[str], + relationship_type: str, + algorithm: str, + properties: str, + directed: bool, + from_: bool, ): - super().__init__(DeclarativeBaseTypes.EDGE) + super().__init__(DeclarativeBaseTypes.RELATIONSHIP) self.directed = directed - self._variable = variable - self._labels = labels + self._variable = "" if variable is None else variable + self._relationship_type = relationship_type + self._algorithm = algorithm self._properties = properties self._from = from_ @property def variable(self) -> str: - return self._variable if self._variable is not None else "" + return self._variable @property - def labels(self) -> str: - return self._labels if self._labels is not None else "" + def relationship_type(self) -> str: + return self._relationship_type + + @property + def algorithm(self) -> str: + return self._algorithm @property def properties(self) -> str: - return self._properties if self._properties is not None else "" + return self._properties def construct_query(self) -> str: - """Constructs an edge partial query.""" - relationship_query = f"{self.variable}{self.labels}{self.properties}" + """Constructs a relationship partial query.""" + relationship_query = f"{self.variable}{self.relationship_type}{self.algorithm}{self.properties}" if not self.directed: relationship_query = f"-[{relationship_query}]-" @@ -296,21 +333,89 @@ def dict_to_alias_statement(alias_dict: Dict[str, str]) -> str: ) -class WithPartialQuery(PartialQuery): - def __init__(self, results: Dict[str, str]): - super().__init__(DeclarativeBaseTypes.WITH) +class _ResultPartialQuery(PartialQuery): + def __init__( + self, + keyword: Result, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ): + if type(self) is _ResultPartialQuery: + raise GQLAlchemyInstantiationError(class_name=type(self).__name__) + + super().__init__(type=keyword.name) + + if results is None: + self.query = None + elif isinstance(results, str): + self.query = results + elif isinstance(results, dict): + self.query = self._return_read_dict(results) + elif isinstance(results, tuple): + self.query = self._return_read_tuple(results) + elif isinstance(results, (list, set)): + self.query = self._return_read_iterable(results) + else: + raise GQLAlchemyResultQueryTypeError(clause=self.type) - self._results = results + def construct_query(self) -> str: + """Creates a RETURN/YIELD/WITH statement Cypher partial query.""" + if self.query is None: + return f" {self.type} * " - @property - def results(self) -> str: - return self._results if self._results is not None else "" + return f" {self.type} {self.query} " - def construct_query(self) -> str: - """Creates a WITH statement Cypher partial query.""" - if len(self.results) == 0: - return " WITH * " - return f" WITH {dict_to_alias_statement(self.results)} " + def _return_read_iterable( + self, iterable: Union[List[Union[str, Tuple[str, str]]], Set[Union[str, Tuple[str, str]]]] + ): + return ", ".join(self._return_read_item(item=item) for item in iterable) + + def _return_read_item(self, item: Union[str, Tuple]) -> str: + if isinstance(item, str): + return item + + if isinstance(item, tuple): + return f"{self._return_read_tuple(item)}" + + raise GQLAlchemyResultQueryTypeError(clause=self.type) + + def _return_read_tuple(self, tuple: Tuple[str, str]) -> str: + if len(tuple) > 2: + raise GQLAlchemyTooLargeTupleInResultQuery(clause=self.type) + + if not isinstance(tuple[0], str) or not isinstance(tuple[1], str): + raise GQLAlchemyResultQueryTypeError(clause=self.type) + + if tuple[0] == tuple[1] or tuple[1] == "": + return f"{tuple[0]}" + + return f"{tuple[0]} AS {tuple[1]}" + + def _return_read_dict(self, results: Dict[str, str]): + return f"{dict_to_alias_statement(results)}" + + +class WithPartialQuery(_ResultPartialQuery): + def __init__( + self, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ): + super().__init__(keyword=Result.WITH, results=results) class UnionPartialQuery(PartialQuery): @@ -325,7 +430,7 @@ def construct_query(self) -> str: class DeletePartialQuery(PartialQuery): - def __init__(self, variable_expressions: List[str], detach: bool): + def __init__(self, variable_expressions: Union[str, List[str]], detach: bool): super().__init__(DeclarativeBaseTypes.DELETE) self._variable_expressions = variable_expressions @@ -333,60 +438,58 @@ def __init__(self, variable_expressions: List[str], detach: bool): @property def variable_expressions(self) -> str: - return self._variable_expressions if self._variable_expressions is not None else "" + return self._variable_expressions def construct_query(self) -> str: """Creates a DELETE statement Cypher partial query.""" - return f" {'DETACH' if self.detach else ''} DELETE {', '.join(self.variable_expressions)} " + return f" {'DETACH' if self.detach else ''} DELETE {', '.join(self.variable_expressions) if isinstance(self.variable_expressions, list) else self.variable_expressions} " class RemovePartialQuery(PartialQuery): - def __init__(self, items: List[str]): + def __init__(self, items: Union[str, List[str]]): super().__init__(DeclarativeBaseTypes.REMOVE) self._items = items @property def items(self) -> str: - return self._items if self._items is not None else "" + return self._items def construct_query(self) -> str: """Creates a REMOVE statement Cypher partial query.""" - return f" REMOVE {', '.join(self.items)} " - - -class YieldPartialQuery(PartialQuery): - def __init__(self, results: Dict[str, str]): - super().__init__(DeclarativeBaseTypes.YIELD) - - self._results = results - - @property - def results(self) -> str: - return self._results if self._results is not None else "" - - def construct_query(self) -> str: - """Creates a YIELD statement Cypher partial query.""" - if len(self.results) == 0: - return " YIELD * " - return f" YIELD {dict_to_alias_statement(self.results)} " + return f" REMOVE {', '.join(self.items) if isinstance(self.items, list) else self.items} " -class ReturnPartialQuery(PartialQuery): - def __init__(self, results: Dict[str, str]): - super().__init__(DeclarativeBaseTypes.RETURN) - - self._results = results +class YieldPartialQuery(_ResultPartialQuery): + def __init__( + self, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ): + super().__init__(keyword=Result.YIELD, results=results) - @property - def results(self) -> str: - return self._results if self._results is not None else "" - def construct_query(self) -> str: - """Creates a RETURN statement Cypher partial query.""" - if len(self.results) == 0: - return " RETURN * " - return f" RETURN {dict_to_alias_statement(self.results)} " +class ReturnPartialQuery(_ResultPartialQuery): + def __init__( + self, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ): + super().__init__(keyword=Result.RETURN, results=results) class OrderByPartialQuery(PartialQuery): @@ -405,7 +508,7 @@ def construct_query(self) -> str: def _order_by_read_item(self, item: Union[str, Tuple[str, Order]]) -> str: if isinstance(item, str): - return f"{self._order_by_read_str(item)}" + return item elif isinstance(item, tuple): return f"{self._order_by_read_tuple(item)}" else: @@ -414,9 +517,6 @@ def _order_by_read_item(self, item: Union[str, Tuple[str, Order]]) -> str: def _order_by_read_list(self, property: List[Union[str, Tuple[str, Order]]]): return ", ".join(self._order_by_read_item(item=item) for item in property) - def _order_by_read_str(self, property: str) -> str: - return f"{property}" - def _order_by_read_tuple(self, tuple: Tuple[str, Order]) -> str: if not isinstance(tuple[1], Order): raise GQLAlchemyMissingOrder @@ -425,7 +525,7 @@ def _order_by_read_tuple(self, tuple: Tuple[str, Order]) -> str: class LimitPartialQuery(PartialQuery): - def __init__(self, integer_expression: str): + def __init__(self, integer_expression: Union[str, int]): super().__init__(DeclarativeBaseTypes.LIMIT) self.integer_expression = integer_expression @@ -436,7 +536,7 @@ def construct_query(self) -> str: class SkipPartialQuery(PartialQuery): - def __init__(self, integer_expression: str): + def __init__(self, integer_expression: Union[str, int]): super().__init__(DeclarativeBaseTypes.SKIP) self.integer_expression = integer_expression @@ -456,8 +556,72 @@ def construct_query(self) -> str: return f"{self.custom_cypher}" +class ForeachPartialQuery(PartialQuery): + def __init__(self, variable: str, expression: str, update_clauses: str): + super().__init__(DeclarativeBaseTypes.FOREACH) + self._variable = variable + self._expression = expression + self._update_clauses = update_clauses + + @property + def variable(self) -> str: + return self._variable + + @property + def expression(self) -> str: + return self._expression + + @property + def update_clauses(self) -> str: + return self._update_clauses + + def construct_query(self) -> str: + """Creates a FOREACH statement Cypher partial query.""" + return f" FOREACH ( {self.variable} IN {self.expression} | {self.update_clauses} ) " + + +class SetPartialQuery(PartialQuery): + _LITERAL = "literal" + _EXPRESSION = "expression" + + def __init__(self, item: str, operator: Operator, **kwargs): + super().__init__(DeclarativeBaseTypes.SET) + + self.query = self._build_set_query(item=item, operator=operator, **kwargs) + + def construct_query(self) -> str: + """Constructs a set partial query.""" + return f" {self.type} {self.query}" + + def _build_set_query(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": + """Builds parts of a SET Cypher query divided by the boolean operators.""" + literal = kwargs.get(SetPartialQuery._LITERAL) + value = kwargs.get(SetPartialQuery._EXPRESSION) + + operator_str = operator.value if isinstance(operator, Operator) else operator + + if operator_str not in Operator._value2member_map_: + raise GQLAlchemyOperatorTypeError(clause=self.type) + + if value is None: + if literal is None: + raise GQLAlchemyLiteralAndExpressionMissing(clause=self.type) + + value = to_cypher_value(literal) + elif literal is not None: + raise GQLAlchemyExtraKeywordArguments(clause=self.type) + + return ("" if operator_str == Operator.LABEL_FILTER.value else " ").join( + [ + item, + operator_str, + value, + ] + ) + + class DeclarativeBase(ABC): - def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): + def __init__(self, connection: Optional[DatabaseClient] = None): self._query: List[PartialQuery] = [] self._connection = connection if connection is not None else Memgraph() self._fetch_results: bool = False @@ -471,6 +635,17 @@ def match(self, optional: bool = False) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Examples: + Get all nodes with a certain label: + + Python: `match().node(labels='Country', variable='c').return_(results='c').execute()` + Cypher: `MATCH (c:Country) RETURN c;` + + Get a relationship of a certain type that connects two nodes with certain label: + + Python: `match().node(labels='Town', variable='t').to(relationship_type='BELONGS_TO', variable='b').node(labels='Country', variable='c').return_(results='b').execute()` + Cypher: `MATCH (t:Town)-[b:BELONGS_TO]->(c:Country) RETURN b;` """ self._query.append(MatchPartialQuery(optional)) @@ -483,6 +658,12 @@ def merge(self) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Merge node with properties: + + Python: `merge().node(variable='city').where(item='city.name', operator=Operator.EQUAL, literal='London').return_(results='city').execute()` + Cypher: `MERGE (city) WHERE city.name = 'London' RETURN city;` """ self._query.append(MergePartialQuery()) @@ -493,12 +674,20 @@ def create(self) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Create a single node: + + Python: `create().node(labels='Person', variable='p').return_(results='p').execute()` + Cypher: `CREATE (p:Person) RETURN p;` """ self._query.append(CreatePartialQuery()) return self - def call(self, procedure: str, arguments: Optional[str] = None) -> "DeclarativeBase": + def call( + self, procedure: str, arguments: Optional[Union[str, Tuple[Union[str, int, float]]]] = None + ) -> "DeclarativeBase": """Call a query module procedure. Args: @@ -509,6 +698,17 @@ def call(self, procedure: str, arguments: Optional[str] = None) -> "DeclarativeB Returns: A `DeclarativeBase` instance for constructing queries. + + Examples: + Call procedure with no arguments: + + Python: `call('pagerank.get').yield_().return_().execute()` + Cypher: `CALL pagerank.get() YIELD * RETURN *;` + + Call procedure with arguments: + + Python: `call('json_util.load_from_url', 'https://some-url.com').yield_('objects').return_(results='objects').execute() + Cypher: `CALL json_util.load_from_url(https://some-url.com) YIELD objects RETURN objects;` """ self._query.append(CallPartialQuery(procedure, arguments)) @@ -533,6 +733,12 @@ def node( Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Create a node and return it: + + Python: `create().node(labels='Person', variable='n', first_name='Kate').return_(results='n').execute()` + Cypher: `CREATE (n:Person {first_name: 'Kate'}) RETURN n;` """ if not self._is_linking_valid_with_query(DeclarativeBaseTypes.NODE): raise InvalidMatchChainException() @@ -550,51 +756,69 @@ def node( def to( self, - edge_label: Optional[str] = "", + relationship_type: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, + algorithm: Optional[IntegratedAlgorithm] = None, **kwargs, ) -> "DeclarativeBase": """Add a relationship pattern to the query. Args: - edge_label: A string representing the type of the relationship. + relationship_type: A string representing the type of the relationship. directed: A bool indicating if the relationship is directed. variable: A string representing the name of the variable for storing results of the relationship pattern. relationship: A `Relationship` object to construct the pattern from. + algorithm: algorithm object to use over graph data. **kwargs: Arguments representing the properties of the relationship. Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Match and return a relationship: + + Python: `match().node(labels='Town', variable='t').to(relationship_type='BELONGS_TO', variable='b').node(labels='Country', variable='c').return_(results='b').execute()` + Cypher: `MATCH (t:Town)-[b:BELONGS_TO]->(c:Country) RETURN b;` """ - if not self._is_linking_valid_with_query(DeclarativeBaseTypes.EDGE): + if not self._is_linking_valid_with_query(DeclarativeBaseTypes.RELATIONSHIP): raise InvalidMatchChainException() if relationship is None: - type_str = to_cypher_labels(edge_label) + type_str = to_cypher_labels(relationship_type) properties_str = to_cypher_properties(kwargs) else: type_str = to_cypher_labels(relationship._type) properties_str = to_cypher_properties(relationship._properties) - self._query.append(EdgePartialQuery(variable, type_str, properties_str, bool(directed), False)) + self._query.append( + RelationshipPartialQuery( + variable=variable, + relationship_type=type_str, + algorithm="" if algorithm is None else str(algorithm), + properties=properties_str, + directed=bool(directed), + from_=False, + ) + ) return self def from_( self, - edge_label: Optional[str] = "", + relationship_type: Optional[str] = "", directed: Optional[bool] = True, variable: Optional[str] = None, relationship: Optional["Relationship"] = None, + algorithm: Optional[IntegratedAlgorithm] = None, **kwargs, ) -> "Match": """Add a relationship pattern to the query. Args: - edge_label: A string representing the type of the relationship. + relationship_type: A string representing the type of the relationship. directed: A bool indicating if the relationship is directed. variable: A string representing the name of the variable for storing results of the relationship pattern. @@ -603,22 +827,37 @@ def from_( Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Match and return a relationship: + + Python: `match().node(labels='Country', variable='c').from_(relationship_type='BELONGS_TO', variable='b').node(labels='Town', variable='t').return_(results='b').execute()` + Cypher: `MATCH (c:Country)<-[b:BELONGS_TO]-(t:Town) RETURN b;` """ - if not self._is_linking_valid_with_query(DeclarativeBaseTypes.EDGE): + if not self._is_linking_valid_with_query(DeclarativeBaseTypes.RELATIONSHIP): raise InvalidMatchChainException() if relationship is None: - labels_str = to_cypher_labels(edge_label) + type_str = to_cypher_labels(relationship_type) properties_str = to_cypher_properties(kwargs) else: - labels_str = to_cypher_labels(relationship._type) + type_str = to_cypher_labels(relationship._type) properties_str = to_cypher_properties(relationship._properties) - self._query.append(EdgePartialQuery(variable, labels_str, properties_str, bool(directed), True)) + self._query.append( + RelationshipPartialQuery( + variable=variable, + relationship_type=type_str, + algorithm="" if algorithm is None else str(algorithm), + properties=properties_str, + directed=bool(directed), + from_=True, + ) + ) return self - def where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates a WHERE statement Cypher partial query. Args: @@ -639,17 +878,17 @@ def where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": Examples: Filtering query results by the equality of `name` properties of two connected nodes. - Python: `match().node(variable="n").to().node(variable="m").where(item="n.name", operator="=", expression="m.name").return_()` + Python: `match().node(variable='n').to().node(variable='m').where(item='n.name', operator=Operator.EQUAL, expression='m.name').return_()` Cypher: `MATCH (n)-[]->(m) WHERE n.name = m.name RETURN *;` Filtering query results by the node label. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').return_()` Cypher: `MATCH (n) WHERE n:User RETURN *;` Filtering query results by the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n.age > 18 RETURN *;` """ # WHERE item operator (literal | expression) @@ -659,7 +898,7 @@ def where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": return self - def where_not(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def where_not(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates a WHERE NOT statement Cypher partial query. Args: @@ -680,14 +919,14 @@ def where_not(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": Examples: Filtering query results by the equality of `name` properties of two connected nodes. - Python: `match().node(variable="n").to().node(variable="m").where_not(item="n.name", operator="=", expression="m.name").return_()` + Python: `match().node(variable='n').to().node(variable='m').where_not(item='n.name', operator='=', expression='m.name').return_()` Cypher: `MATCH (n)-[]->(m) WHERE NOT n.name = m.name RETURN *;` """ self._query.append(WhereNotConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def and_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def and_where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates an AND statement as a part of WHERE Cypher partial query. Args: @@ -704,14 +943,14 @@ def and_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": Examples: Filtering query results by node label or the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").and_where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').and_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n:User AND n.age > 18 RETURN *;` """ self._query.append(AndWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def and_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def and_not_where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates an AND NOT statement as a part of WHERE Cypher partial query. Args: @@ -728,14 +967,14 @@ def and_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase" Examples: Filtering query results by node label or the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").and_not_where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').and_not_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n:User AND NOT n.age > 18 RETURN *;` """ self._query.append(AndNotWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def or_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def or_where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates an OR statement as a part of WHERE Cypher partial query. Args: @@ -752,14 +991,14 @@ def or_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": Examples: Filtering query results by node label or the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").or_where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').or_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n:User OR n.age > 18 RETURN *;` """ self._query.append(OrWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def or_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def or_not_where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates an OR NOT statement as a part of WHERE Cypher partial query. Args: @@ -776,14 +1015,14 @@ def or_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": Examples: Filtering query results by node label or the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").or_not_where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').or_not_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n:User OR NOT n.age > 18 RETURN *;` """ self._query.append(OrNotWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def xor_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def xor_where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates an XOR statement as a part of WHERE Cypher partial query. Args: @@ -800,14 +1039,14 @@ def xor_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": Examples: Filtering query results by node label or the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").xor_where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').xor_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n:User XOR n.age > 18 RETURN *;` """ self._query.append(XorWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) return self - def xor_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase": + def xor_not_where(self, item: str, operator: Operator, **kwargs) -> "DeclarativeBase": """Creates an XOR NOT statement as a part of WHERE Cypher partial query. Args: @@ -824,7 +1063,7 @@ def xor_not_where(self, item: str, operator: str, **kwargs) -> "DeclarativeBase" Examples: Filtering query results by node label or the comparison of node property and literal. - Python: `match().node(variable="n").where(item="n", operator=":", expression="User").xor_not_where(item="n.age", operator=">", literal=18).return_()` + Python: `match().node(variable='n').where(item='n', operator=Operator.LABEL_FILTER, expression='User').xor_not_where(item='n.age', operator=Operator.GREATER_THAN, literal=18).return_()` Cypher: `MATCH (n) WHERE n:User XOR NOT n.age > 18 RETURN *;` """ self._query.append(XorNotWhereConditionPartialQuery(item=item, operator=operator, **kwargs)) @@ -840,12 +1079,27 @@ def unwind(self, list_expression: str, variable: str) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Python: `unwind(list_expression="[1, 2, 3, null]", variable="x").return_(results=["x", ("'val'", "y")]).execute()` + Cypher: `UNWIND [1, 2, 3, null] AS x RETURN x, 'val' AS y;` """ self._query.append(UnwindPartialQuery(list_expression, variable)) return self - def with_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": + def with_( + self, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ) -> "DeclarativeBase": """Chain together parts of a query, piping the results from one to be used as starting points or criteria in the next. @@ -853,8 +1107,18 @@ def with_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": results: A dictionary mapping variables in the first query with aliases in the second query. + Raises: + GQLAlchemyResultQueryTypeError: Raises an error when the provided argument is of wrong type. + GQLAlchemyTooLargeTupleInResultQuery: Raises an error when the given tuple has length larger than 2. + Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Pipe the result from first part of the query for the futher use: + + Python: `match().node(variable='n').with('n').execute()` + Cypher: `MATCH (n) WITH n; """ self._query.append(WithPartialQuery(results)) @@ -869,66 +1133,139 @@ def union(self, include_duplicates: Optional[bool] = True) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Examples: + Combine querties and retain duplicates: + + Python: `match().node(variable="c", labels="Country").return_(results=("c.name", "columnName")).union().match().node(variable="p", labels="Person").return_(results=("p.name", "columnName")).execute()` + Cypher: `MATCH (c:Country) RETURN c.name AS columnName UNION ALL MATCH (p:Person) RETURN p.name AS columnName;` + + Combine queries and remove duplicates: + + Python: `match().node(variable="c", labels="Country").return_(results=("c.name", "columnName")).union(include_duplicates=False).match().node(variable="p", labels="Person").return_(results=("p.name", "columnName")).execute()` + Cypher: `MATCH (c:Country) RETURN c.name AS columnName UNION MATCH (p:Person) RETURN p.name AS columnName;` """ self._query.append(UnionPartialQuery(include_duplicates)) return self - def delete(self, variable_expressions: List[str], detach: Optional[bool] = False) -> "DeclarativeBase": + def delete(self, variable_expressions: Union[str, List[str]], detach: Optional[bool] = False) -> "DeclarativeBase": """Delete nodes and relationships from the database. Args: - variable_expressions: A list of strings indicating which nodes - and/or relationships should be removed. + variable_expressions: A string or list of strings indicating which node(s) + and/or relationship(s) should be removed. detach: A bool indicating if relationships should be deleted along with a node. Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Delete a node: + + Python: `match().node(labels='Node1', variable='n1').delete(variable_expressions='n1').execute()` + Cypher: `MATCH (n1:Node1) DELETE n1;` """ self._query.append(DeletePartialQuery(variable_expressions, detach)) return self - def remove(self, items: List[str]) -> "DeclarativeBase": + def remove(self, items: Union[str, List[str]]) -> "DeclarativeBase": """Remove labels and properties from nodes and relationships. Args: - items: A list of strings indicating which labels and/or properties + items: A string or list of strings indicating which label(s) and/or properties should be removed. Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Remove a property from a node: + + Python: `match().node(labels='Country', variable='n', name='United Kingdom').remove(items='n.name').return_(results='n').execute()` + Cypher: `MATCH (n:Country {name: 'United Kingdom'}) REMOVE n.name RETURN n;` """ self._query.append(RemovePartialQuery(items)) return self - def yield_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": + def yield_( + self, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ) -> "DeclarativeBase": """Yield data from the query. Args: - results: A dictionary mapping items that are returned with alias - names. + results: A dictionary mapping items that are returned with alias names. + + Raises: + GQLAlchemyResultQueryTypeError: Raises an error when the provided argument is of wrong type. + GQLAlchemyTooLargeTupleInResultQuery: Raises an error when the given tuple has length larger than 2. Returns: A `DeclarativeBase` instance for constructing queries. + + Examples: + Yield all data from a query: + + Python: `call(procedure='pagerank.get').yield_().return_().execute()` + Cypher: `CALL pagerank.get() YIELD * RETURN *;` + + Yield some data from a query: + + Python: `.call(procedure='pagerank.get').yield_(results=['node', 'rank']).return_(results=['node','rank']).execute()` + Cypher: `CALL pagerank.get() YIELD node, rank RETURN node, rank;` """ self._query.append(YieldPartialQuery(results)) return self - def return_(self, results: Optional[Dict[str, str]] = {}) -> "DeclarativeBase": + def return_( + self, + results: Optional[ + Union[ + str, + Tuple[str, str], + Dict[str, str], + List[Union[str, Tuple[str, str]]], + Set[Union[str, Tuple[str, str]]], + ] + ] = None, + ) -> "DeclarativeBase": """Return data from the query. Args: - results: A dictionary mapping items that are returned with alias - names. + results: An optional string, tuple or iterable of strings and tuples for alias names. + + Raises: + GQLAlchemyResultQueryTypeError: Raises an error when the provided argument is of wrong type. + GQLAlchemyTooLargeTupleInResultQuery: Raises an error when the given tuple has length larger than 2. Returns: A `DeclarativeBase` instance for constructing queries. + + Examples: + Return all variables from a query: + + Python: `match().node(labels='Person', variable='p').return_().execute()` + Cypher: `MATCH (p:Person) RETURN *;` + + Return specific variables from a query: + + Python: `match().node(labels='Person', variable='p1').to().node(labels='Person', variable='p2').return_(results=[('p1','first'), 'p2']).execute()` + Cypher: `MATCH (p1:Person)-[]->(p2:Person) RETURN p1 AS first, p2;` """ - self._query.append(ReturnPartialQuery(results)) + self._query.append(ReturnPartialQuery(results=results)) self._fetch_results = True return self @@ -939,27 +1276,27 @@ def order_by( """Creates an ORDER BY statement Cypher partial query. Args: - properties: Properties and order by which the query results will be ordered. + properties: Properties and order (DESC/DESCENDING/ASC/ASCENDING) by which the query results will be ordered. Raises: GQLAlchemyOrderByTypeError: Raises an error when the given ordering is of the wrong type. - GQLAlchemyMissingOrdering: Raises an error when the given property is neither string nor tuple. + GQLAlchemyMissingOrder: Raises an error when the given property is neither string nor tuple. Returns: - self: A partial Cypher query built from the given parameters. + A `DeclarativeBase` instance for constructing queries. Examples: Ordering query results by the property `n.name` in ascending order and by the property `n.last_name` in descending order: - Python: `match().node(variable="n").return_().order_by(properties=["n.name", ("n.last_name", Order.DESC)])` + Python: `match().node(variable='n').return_().order_by(properties=['n.name', ('n.last_name', Order.DESC)]).execute()` Cypher: `MATCH (n) RETURN * ORDER BY n.name, n.last_name DESC;` """ self._query.append(OrderByPartialQuery(properties=properties)) return self - def limit(self, integer_expression: str) -> "DeclarativeBase": + def limit(self, integer_expression: Union[str, int]) -> "DeclarativeBase": """Limit the number of records when returning results. Args: @@ -968,12 +1305,18 @@ def limit(self, integer_expression: str) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Limit the number of returned results: + + Python: `match().node(labels='Person', variable='p').return_().limit(integer_expression='10').execute()` + Cypher: `MATCH (p:Person) RETURN * LIMIT 10;` """ self._query.append(LimitPartialQuery(integer_expression)) return self - def skip(self, integer_expression: str) -> "DeclarativeBase": + def skip(self, integer_expression: Union[str, int]) -> "DeclarativeBase": """Skip a number of records when returning results. Args: @@ -982,6 +1325,12 @@ def skip(self, integer_expression: str) -> "DeclarativeBase": Returns: A `DeclarativeBase` instance for constructing queries. + + Example: + Skip the first result: + + Python: `match().node(variable='n').return_(results='n').skip(integer_expression='1').execute()` + Cypher: `MATCH (n) RETURN n SKIP 1;` """ self._query.append(SkipPartialQuery(integer_expression)) @@ -1003,22 +1352,6 @@ def add_custom_cypher(self, custom_cypher: str) -> "DeclarativeBase": return self - def load_csv(self, path: str, header: bool, row: str) -> "DeclarativeBase": - """Load data from a CSV file by executing a Cypher query for each row. - - Args: - path: A string representing the path to the CSV file. - header: A bool indicating if the CSV file starts with a header row. - row: A string representing the name of the variable for iterating - over each row. - - Returns: - A `DeclarativeBase` instance for constructing queries. - """ - self._query.append(LoadCsvPartialQuery(path, header, row)) - - return self - def get_single(self, retrieve: str) -> Any: """Returns a single result with a `retrieve` variable name. @@ -1036,6 +1369,83 @@ def get_single(self, retrieve: str) -> Any: return result[retrieve] return result + def foreach( + self, variable: str, expression: str, update_clause: Union[str, List[str], Set[str]] + ) -> "DeclarativeBase": + """Iterate over a list of elements and for every iteration run every update clause. + + Args: + variable: The variable name that stores each element. + expression: Any expression that results in a list. + update_clauses: One or more Cypher update clauses: + SET, REMOVE, CREATE, MERGE, DELETE, FOREACH. + + Returns: + A `DeclarativeBase` instance for constructing queries. + + Example: + For each number in a list, create a node: + + Python: `update_clause = QueryBuilder().create().node(variable="n", id=PropertyVariable("i"))` + `query_builder = QueryBuilder().foreach("i", "[1, 2, 3]", update_clause.construct_query())` + Cypher: `FOREACH ( i IN [1, 2, 3] | CREATE (n {id: i}) )` + """ + if isinstance(update_clause, list): + update_clause = " ".join(update_clause) + + self._query.append(ForeachPartialQuery(variable, expression, update_clause)) + + return self + + def set_(self, item: str, operator: Operator, **kwargs): + """Creates a SET statement Cypher partial query. + + Args: + item: A string representing variable or property. + operator: An assignment, increment or label filter operator. + + Kwargs: + literal: A value that will be converted to Cypher value, such as int, float, string, etc. + expression: A node label or property that won't be converted to Cypher value (no additional quotes will be added). + + Raises: + GQLAlchemyLiteralAndExpressionMissingInWhere: Raises an error when neither literal nor expression keyword arguments were provided. + GQLAlchemyExtraKeywordArgumentsInWhere: Raises an error when both literal and expression keyword arguments were provided. + + Returns: + self: A partial Cypher query built from the given parameters. + + Examples: + Set or update a property. + + Python: `match().node(variable='n').where(item='n.name', operator=Operator.EQUAL, literal='Germany').set_(item='n.population', operator=Operator.ASSIGNMENT, literal=83000001).return_().execute()` + Cypher: `MATCH (n) WHERE n.name = 'Germany' SET n.population = 83000001 RETURN *;` + + Set or update multiple properties. + + Python: `match().node(variable='n').where(item='n.name', operator=Operator.EQUAL, literal='Germany').set_(item='n.population', operator=Operator.ASSIGNMENT, literal=83000001).set_(item='n.capital', operator=Operator.ASSIGNMENT, literal='Berlin').return_().execute()` + Cypher: `MATCH (n) WHERE n.name = 'Germany' SET n.population = 83000001 SET n.capital = 'Berlin' RETURN *;` + + Set node label. + + Python: `match().node(variable='n').where(item='n.name', operator=Operator.EQUAL, literal='Germany').set_(item='n', operator=Operator.LABEL_FILTER, expression='Land').return_().execute()` + Cypher: `MATCH (n) WHERE n.name = 'Germany' SET n:Land RETURN *;` + + Replace all properties using map. + + Python: `match().node(variable='c', labels='Country').where(item='c.name', operator=Operator.EQUAL, literal='Germany').set_(item='c', operator=Operator.ASSIGNMENT, literal={'name': 'Germany', 'population': '85000000'}).return_().execute()` + Cypher: `MATCH (c:Country) WHERE c.name = 'Germany' SET c = {name: 'Germany', population: '85000000'} RETURN *;` + + Update all properties using map. + + Python: `match().node(variable='c', labels='Country').where(item='c.name', operator=Operator.EQUAL, literal='Germany').set_(item='c', operator=Operator.INCREMENT, literal={'name': 'Germany', 'population': '85000000'}).return_().execute()` + Cypher: `MATCH (c:Country) WHERE c.name = 'Germany' SET c += {name: 'Germany', population: '85000000'} RETURN *;` + + """ + self._query.append(SetPartialQuery(item=item, operator=operator, **kwargs)) + + return self + def execute(self) -> Iterator[Dict[str, Any]]: """Executes the Cypher query and returns the results. @@ -1068,7 +1478,7 @@ def construct_query(self) -> str: def _any_variables_matched(self) -> bool: """Checks if any variables are present in the result.""" return any( - q.type in [DeclarativeBaseTypes.EDGE, DeclarativeBaseTypes.NODE] and q.variable not in [None, ""] + q.type in [DeclarativeBaseTypes.RELATIONSHIP, DeclarativeBaseTypes.NODE] and q.variable not in [None, ""] for q in self._query ) @@ -1077,46 +1487,56 @@ def _is_linking_valid_with_query(self, match_type: str): return len(self._query) == 0 or self._query[-1].type != match_type -class QueryBuilder(DeclarativeBase): - def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): - super().__init__(connection) - - class Create(DeclarativeBase): - def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): + def __init__(self, connection: Optional[DatabaseClient] = None): super().__init__(connection) self._query.append(CreatePartialQuery()) class Match(DeclarativeBase): - def __init__(self, optional: bool = False, connection: Optional[Union[Connection, Memgraph]] = None): + def __init__(self, optional: bool = False, connection: Optional[DatabaseClient] = None): super().__init__(connection) self._query.append(MatchPartialQuery(optional)) class Merge(DeclarativeBase): - def __init__(self, connection: Optional[Union[Connection, Memgraph]] = None): + def __init__(self, connection: Optional[DatabaseClient] = None): super().__init__(connection) self._query.append(MergePartialQuery()) class Call(DeclarativeBase): - def __init__( - self, procedure: str, arguments: Optional[str] = None, connection: Optional[Union[Connection, Memgraph]] = None - ): + def __init__(self, procedure: str, arguments: Optional[str] = None, connection: Optional[DatabaseClient] = None): super().__init__(connection) self._query.append(CallPartialQuery(procedure, arguments)) class Unwind(DeclarativeBase): - def __init__(self, list_expression: str, variable: str, connection: Optional[Union[Connection, Memgraph]] = None): + def __init__(self, list_expression: str, variable: str, connection: Optional[DatabaseClient] = None): super().__init__(connection) self._query.append(UnwindPartialQuery(list_expression, variable)) class With(DeclarativeBase): + def __init__(self, results: Optional[Dict[str, str]] = {}, connection: Optional[DatabaseClient] = None): + super().__init__(connection) + self._query.append(WithPartialQuery(results)) + + +class Foreach(DeclarativeBase): def __init__( - self, results: Optional[Dict[str, str]] = {}, connection: Optional[Union[Connection, Memgraph]] = None + self, + variable: str, + expression: str, + update_clauses: Union[str, List[str], Set[str]], + connection: Optional[DatabaseClient] = None, ): super().__init__(connection) - self._query.append(WithPartialQuery(results)) + self._query.append(ForeachPartialQuery(variable, expression, update_clauses)) + + +class Return(DeclarativeBase): + def __init__(self, results: Optional[Dict[str, str]] = {}, connection: Optional[DatabaseClient] = None): + super().__init__(connection) + self._query.append(ReturnPartialQuery(results)) + self._fetch_results = True diff --git a/gqlalchemy/query_builders/memgraph_query_builder.py b/gqlalchemy/query_builders/memgraph_query_builder.py new file mode 100644 index 00000000..f967e5f0 --- /dev/null +++ b/gqlalchemy/query_builders/memgraph_query_builder.py @@ -0,0 +1,86 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from gqlalchemy.query_builders.declarative_base import ( # noqa F401 + Call, + Create, + DeclarativeBase, + DeclarativeBaseTypes, + Foreach, + Match, + Merge, + Operator, + Order, + PartialQuery, + Return, + Unwind, + With, +) +from gqlalchemy.vendors.database_client import DatabaseClient +from gqlalchemy.vendors.memgraph import Memgraph + + +class MemgraphQueryBuilderTypes(DeclarativeBaseTypes): + LOAD_CSV = "LOAD_CSV" + + +class LoadCsvPartialQuery(PartialQuery): + def __init__(self, path: str, header: bool, row: str): + super().__init__(DeclarativeBaseTypes.LOAD_CSV) + self.path = path + self.header = header + self.row = row + + def construct_query(self) -> str: + return f" LOAD CSV FROM '{self.path}' " + ("WITH" if self.header else "NO") + f" HEADER AS {self.row} " + + +class QueryBuilder(DeclarativeBase): + def __init__(self, connection: Optional[Memgraph] = None): + super().__init__(connection) + + def load_csv(self, path: str, header: bool, row: str) -> "DeclarativeBase": + """Load data from a CSV file by executing a Cypher query for each row. + + Args: + path: A string representing the path to the CSV file. + header: A bool indicating if the CSV file starts with a header row. + row: A string representing the name of the variable for iterating + over each row. + + Returns: + A `DeclarativeBase` instance for constructing queries. + + Examples: + Load CSV with header: + + Python: `load_csv(path="path/to/my/file.csv", header=True, row="row").return_().execute()` + Cypher: `LOAD CSV FROM 'path/to/my/file.csv' WITH HEADER AS row RETURN *;` + + Load CSV without header: + + Python: `load_csv(path='path/to/my/file.csv', header=False, row='row').return_().execute()` + Cypher: `LOAD CSV FROM 'path/to/my/file.csv' NO HEADER AS row RETURN *;` + """ + self._query.append(LoadCsvPartialQuery(path, header, row)) + + return self + + +class LoadCsv(DeclarativeBase): + def __init__(self, path: str, header: bool, row: str, connection: Optional[DatabaseClient] = None): + super().__init__(connection) + self._query.append(LoadCsvPartialQuery(path, header, row)) diff --git a/gqlalchemy/query_builders/neo4j_query_builder.py b/gqlalchemy/query_builders/neo4j_query_builder.py new file mode 100644 index 00000000..79fbe16e --- /dev/null +++ b/gqlalchemy/query_builders/neo4j_query_builder.py @@ -0,0 +1,31 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gqlalchemy.query_builders.declarative_base import ( # noqa F401 + DeclarativeBase, + Call, + Create, + Foreach, + Match, + Merge, + Return, + Unwind, + With, +) +from gqlalchemy.vendors.neo4j import Neo4j + + +class Neo4jQueryBuilder(DeclarativeBase): + def __init__(self, connection: Neo4j): + super().__init__(connection) diff --git a/gqlalchemy/query_modules/__init__.py b/gqlalchemy/query_modules/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/gqlalchemy/query_modules/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/gqlalchemy/query_modules/push_streams/kafka.py b/gqlalchemy/query_modules/push_streams/kafka.py new file mode 100644 index 00000000..f9c6207f --- /dev/null +++ b/gqlalchemy/query_modules/push_streams/kafka.py @@ -0,0 +1,91 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from typing import Dict + +import mgp +from kafka.producer import KafkaProducer + + +producers_by_name: Dict[str, KafkaProducer] = {} +topics_by_name: Dict[str, str] = {} + + +@mgp.read_proc +def create_push_stream( + context: mgp.ProcCtx, + stream_name: str, + topic: str, + config: mgp.Map, +) -> mgp.Record(created=bool): + + if not isinstance(stream_name, str): + raise TypeError("Invalid type on first argument!") + + if not isinstance(topic, str): + raise TypeError("Invalid type on second argument!") + + producer = KafkaProducer(value_serializer=lambda m: json.dumps(m).encode("utf-8"), **config) + + producers_by_name[stream_name] = producer + topics_by_name[stream_name] = topic + + return mgp.Record(created=True) + + +@mgp.read_proc +def show_streams( + context: mgp.ProcCtx, +) -> mgp.Record(name=str, topic=str): + records = [] + + for k, v in topics_by_name.items(): + records.append(mgp.Record(name=k, topic=v)) + + return records + + +@mgp.read_proc +def push( + context: mgp.ProcCtx, + stream_name: str, + payload: mgp.Map, +) -> mgp.Record(message=mgp.Map): + + if not isinstance(stream_name, str): + raise TypeError("Invalid type on first argument!") + + if not isinstance(payload, mgp.Map): + raise TypeError("Invalid type on second argument!") + + if stream_name not in producers_by_name or stream_name not in topics_by_name: + raise Exception(f"Stream {stream_name} is not present!") + + message = "" + if isinstance(payload, dict): + message = payload + elif isinstance(payload, mgp.Vertex) or isinstance(payload, mgp.Edge): + message = {x.name: x.value for x in payload.properties.items()} + else: + raise Exception("Can't have message type other than Map / Vertex / Edge") + + producer, topic = producers_by_name[stream_name], topics_by_name[stream_name] + + try: + producer.send(topic, message) + except Exception as e: + raise Exception(f"Exception when sending message: {e}") + + return mgp.Record(message=message) diff --git a/gqlalchemy/query_modules/push_streams/power_bi.py b/gqlalchemy/query_modules/push_streams/power_bi.py new file mode 100644 index 00000000..2f171dab --- /dev/null +++ b/gqlalchemy/query_modules/push_streams/power_bi.py @@ -0,0 +1,93 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests +import json +from datetime import datetime + +import mgp + +API_URLS = {} +ORDER_NUMBER = 0 + + +@mgp.read_proc +def create_push_stream( + context: mgp.ProcCtx, + stream_name: str, + api_url: str, +) -> mgp.Record(created=bool): + global API_URLS + + if not isinstance(stream_name, str): + raise TypeError("Invalid type on first argument!") + + if not isinstance(api_url, str): + raise TypeError("Invalid type on second argument!") + + API_URLS[stream_name] = api_url + + return mgp.Record(created=True) + + +@mgp.read_proc +def show_streams( + context: mgp.ProcCtx, +) -> mgp.Record(name=str, api_url=str): + records = [] + + for k, v in API_URLS.items(): + records.append(mgp.Record(name=k, api_url=v)) + + return records + + +@mgp.read_proc +def push( + context: mgp.ProcCtx, + stream_name: str, + payload: mgp.Map, +) -> mgp.Record(status=str): + + if not isinstance(stream_name, str): + raise TypeError("Invalid type on first argument!") + + if not isinstance(payload, dict): + raise TypeError("Invalid type on second argument!") + + if stream_name not in API_URLS: + raise Exception("Power BI stream not defined!") + + api_url = API_URLS[stream_name] + + message = "" + if isinstance(payload, dict): + message = payload + elif isinstance(payload, mgp.Vertex) or isinstance(payload, mgp.Edge): + message = {x.name: x.value for x in payload.properties.items()} + else: + raise Exception("Can't have message type other than Map / Vertex / Edge") + + for k, v in message.items(): + if isinstance(v, datetime): + message[k] = datetime.strftime(v, "%Y-%m-%dT%H:%M:%S") + + headers = {"Content-Type": "application/json"} + + try: + response = requests.request(method="POST", url=api_url, headers=headers, data=json.dumps(message)) + except Exception as e: + raise Exception(f"Error happened while sending results! {e}") + + return mgp.Record(status=str(response.status)) diff --git a/gqlalchemy/transformations.py b/gqlalchemy/transformations.py index 565e5a09..3f664821 100644 --- a/gqlalchemy/transformations.py +++ b/gqlalchemy/transformations.py @@ -21,7 +21,7 @@ from gqlalchemy import Memgraph from gqlalchemy.models import MemgraphIndex -from gqlalchemy.utilities import to_cypher_labels, to_cypher_properties, to_cypher_value, NetworkXCypherConfig +from gqlalchemy.utilities import NetworkXCypherConfig, to_cypher_labels, to_cypher_properties, to_cypher_value __all__ = ("nx_to_cypher", "nx_graph_to_memgraph_parallel") diff --git a/gqlalchemy/utilities.py b/gqlalchemy/utilities.py index 012a67e9..65bb84f4 100644 --- a/gqlalchemy/utilities.py +++ b/gqlalchemy/utilities.py @@ -13,9 +13,24 @@ # limitations under the License. import math - +from datetime import datetime, date, time, timedelta from enum import Enum -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Tuple, Union + + +class DatetimeKeywords(Enum): + DURATION = "duration" + LOCALTIME = "localTime" + LOCALDATETIME = "localDateTime" + DATE = "date" + + +datetimeKwMapping = { + timedelta: DatetimeKeywords.DURATION.value, + time: DatetimeKeywords.LOCALTIME.value, + datetime: DatetimeKeywords.LOCALDATETIME.value, + date: DatetimeKeywords.DATE.value, +} class NanValuesHandle(Enum): @@ -37,6 +52,17 @@ def nan_handler(self) -> NanValuesHandle: return self._nan_handler +def _format_timedelta(duration: timedelta) -> str: + days = int(duration.total_seconds() // 86400) + remainder_sec = duration.total_seconds() - days * 86400 + hours = int(remainder_sec // 3600) + remainder_sec -= hours * 3600 + minutes = int(remainder_sec // 60) + remainder_sec -= minutes * 60 + + return f"P{days}DT{hours}H{minutes}M{remainder_sec}S" + + def to_cypher_value(value: Any, config: NetworkXCypherConfig = None) -> str: """Converts value to a valid Cypher type.""" if config is None: @@ -44,7 +70,13 @@ def to_cypher_value(value: Any, config: NetworkXCypherConfig = None) -> str: value_type = type(value) - if value_type == str and value.lower() == "null": + if value_type == PropertyVariable: + return str(value) + + if isinstance(value, (timedelta, time, datetime, date)): + return f"{datetimeKwMapping[value_type]}('{_format_timedelta(value) if isinstance(value, timedelta) else value.isoformat()}')" + + if value_type == str and value.lower() in ["true", "false", "null"]: return value if value_type == float and math.isnan(value): @@ -66,9 +98,6 @@ def to_cypher_value(value: Any, config: NetworkXCypherConfig = None) -> str: if value is None: return "null" - if value.lower() in ["true", "false"]: - return value - return f"'{value}'" @@ -97,5 +126,25 @@ def to_cypher_labels(labels: Union[str, List[str], None]) -> str: return "" +def to_cypher_qm_arguments(arguments: Optional[Union[str, Tuple[Union[str, int, float]]]]) -> str: + """Converts query module arguments to a valid Cypher string of query module arguments.""" + if isinstance(arguments, tuple): + return ", ".join([to_cypher_value(arg) for arg in arguments]) + + return arguments + + +class PropertyVariable: + """Class for support of using a variable as a node or edge property. Used + to avoid the quotes given to property values. + """ + + def __init__(self, name: str) -> None: + self._name = name + + def __str__(self) -> str: + return self._name + + class NanException(Exception): pass diff --git a/gqlalchemy/vendors/__init__.py b/gqlalchemy/vendors/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/gqlalchemy/vendors/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/gqlalchemy/vendors/database_client.py b/gqlalchemy/vendors/database_client.py new file mode 100644 index 00000000..6de27037 --- /dev/null +++ b/gqlalchemy/vendors/database_client.py @@ -0,0 +1,311 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Any, Dict, Iterator, List, Optional + +from gqlalchemy.connection import Connection +from gqlalchemy.exceptions import GQLAlchemyError +from gqlalchemy.models import ( + Constraint, + Index, + Node, + Relationship, +) + + +class DatabaseClient(ABC): + def __init__( + self, + host: str, + port: int, + username: str, + password: str, + encrypted: bool, + client_name: str, + ): + self._host = host + self._port = port + self._username = username + self._password = password + self._encrypted = encrypted + self._client_name = client_name + self._cached_connection: Optional[Connection] = None + + @property + def host(self): + return self._host + + @property + def port(self): + return self._port + + def execute_and_fetch(self, query: str, connection: Connection = None) -> Iterator[Dict[str, Any]]: + """Executes Cypher query and returns iterator of results.""" + connection = connection or self._get_cached_connection() + return connection.execute_and_fetch(query) + + def execute(self, query: str, connection: Connection = None) -> None: + """Executes Cypher query without returning any results.""" + connection = connection or self._get_cached_connection() + connection.execute(query) + + def create_index(self, index: Index) -> None: + """Creates an index (label or label-property type) in the database.""" + query = f"CREATE INDEX ON {index.to_cypher()};" + self.execute(query) + + def drop_index(self, index: Index) -> None: + """Drops an index (label or label-property type) in the database.""" + query = f"DROP INDEX ON {index.to_cypher()};" + self.execute(query) + + @abstractmethod + def get_indexes(self) -> List[Index]: + """Returns a list of all database indexes (label and label-property types).""" + pass + + @abstractmethod + def ensure_indexes(self, indexes: List[Index]) -> None: + """Ensures that database indexes match input indexes.""" + pass + + def drop_indexes(self) -> None: + """Drops all indexes in the database""" + self.ensure_indexes(indexes=[]) + + def create_constraint(self, index: Constraint) -> None: + """Creates a constraint (label or label-property type) in the database.""" + query = f"CREATE CONSTRAINT ON {index.to_cypher()};" + self.execute(query) + + def drop_constraint(self, index: Constraint) -> None: + """Drops a constraint (label or label-property type) in the database.""" + query = f"DROP CONSTRAINT ON {index.to_cypher()};" + self.execute(query) + + @abstractmethod + def get_constraints( + self, + ) -> List[Constraint]: + """Returns a list of all database constraints (label and label-property types).""" + pass + + @abstractmethod + def get_exists_constraints( + self, + ) -> List[Constraint]: + pass + + @abstractmethod + def get_unique_constraints( + self, + ) -> List[Constraint]: + pass + + def ensure_constraints( + self, + constraints: List[Constraint], + ) -> None: + """Ensures that database constraints match input constraints.""" + old_constraints = set(self.get_constraints()) + new_constraints = set(constraints) + for obsolete_constraints in old_constraints.difference(new_constraints): + self.drop_constraint(obsolete_constraints) + for missing_constraint in new_constraints.difference(old_constraints): + self.create_constraint(missing_constraint) + + def drop_database(self): + """Drops database by removing all nodes and edges.""" + self.execute("MATCH (n) DETACH DELETE n;") + + def _get_cached_connection(self) -> Connection: + """Returns cached connection if it exists, creates it otherwise.""" + if self._cached_connection is None or not self._cached_connection.is_active(): + self._cached_connection = self.new_connection() + + return self._cached_connection + + @abstractmethod + def new_connection(self) -> Connection: + """Creates new database connection.""" + pass + + def _get_nodes_with_unique_fields(self, node: Node) -> Optional[Node]: + """Get's all nodes from the database that have any of the unique fields + set to the values in the `node` object. + """ + return self.execute_and_fetch( + f"MATCH (node: {node._label})" f" WHERE {node._get_cypher_unique_fields_or_block('node')}" f" RETURN node;" + ) + + def get_variable_assume_one(self, query_result: Iterator[Dict[str, Any]], variable_name: str) -> Any: + """Returns a single result from the query_result (usually gotten from + the execute_and_fetch function). + If there is more than one result, raises a GQLAlchemyError. + """ + result = next(query_result, None) + next_result = next(query_result, None) + if result is None: + raise GQLAlchemyError("No result found. Result list is empty.") + elif next_result is not None: + raise GQLAlchemyError( + f"One result expected, but more than one result found. First result: {result}, second result: {next_result}" + ) + elif variable_name not in result: + raise GQLAlchemyError(f"Variable name {variable_name} not present in result.") + + return result[variable_name] + + def create_node(self, node: Node) -> Optional[Node]: + """Creates a node in database from the `node` object.""" + results = self.execute_and_fetch( + f"CREATE (node:{node._label}) {node._get_cypher_set_properties('node')} RETURN node;" + ) + return self.get_variable_assume_one(results, "node") + + @abstractmethod + def save_node(self, node: Node) -> Node: + """Saves node to database. + If the node._id is not None, it fetches the node with the same id from + the database and updates it's fields. + If the node has unique fields it fetches the nodes with the same unique + fields from the database and updates it's fields. + Otherwise it creates a new node with the same properties. + Null properties are ignored. + """ + pass + + def save_nodes(self, nodes: List[Node]) -> None: + """Saves a list of nodes to the database.""" + for i in range(len(nodes)): + nodes[i]._id = self.save_node(nodes[i])._id + + def save_node_with_id(self, node: Node) -> Optional[Node]: + """Saves a node to the database using the internal id.""" + results = self.execute_and_fetch( + f"MATCH (node: {node._label})" + f" WHERE id(node) = {node._id}" + f" {node._get_cypher_set_properties('node')}" + f" RETURN node;" + ) + + return self.get_variable_assume_one(results, "node") + + @abstractmethod + def load_node(self, node: Node) -> Optional[Node]: + """Loads a node from the database. + If the node._id is not None, it fetches the node from the database with that + internal id. + If the node has unique fields it fetches the node from the database with + those unique fields set. + Otherwise it tries to find any node in the database that has all properties + set to exactly the same values. + If no node is found or no properties are set it raises a GQLAlchemyError. + """ + pass + + def load_node_with_all_properties(self, node: Node) -> Optional[Node]: + """Loads a node from the database with all equal property values.""" + results = self.execute_and_fetch( + f"MATCH (node: {node._label}) WHERE {node._get_cypher_fields_and_block('node')} RETURN node;" + ) + return self.get_variable_assume_one(results, "node") + + def load_node_with_id(self, node: Node) -> Optional[Node]: + """Loads a node with the same internal database id.""" + results = self.execute_and_fetch(f"MATCH (node: {node._label}) WHERE id(node) = {node._id} RETURN node;") + + return self.get_variable_assume_one(results, "node") + + @abstractmethod + def load_relationship(self, relationship: Relationship) -> Optional[Relationship]: + """Returns a relationship loaded from the database. + If the relationship._id is not None, it fetches the relationship from + the database that has the same internal id. + Otherwise it returns the relationship whose relationship._start_node_id + and relationship._end_node_id and all relationship properties that + are not None match the relationship in the database. + If there is no relationship like that in database, or if there are + multiple relationships like that in database, throws GQLAlchemyError. + """ + pass + + def load_relationship_with_id(self, relationship: Relationship) -> Optional[Relationship]: + """Loads a relationship from the database using the internal id.""" + results = self.execute_and_fetch( + f"MATCH (start_node)-[relationship: {relationship._type}]->(end_node)" + f" WHERE id(start_node) = {relationship._start_node_id}" + f" AND id(end_node) = {relationship._end_node_id}" + f" AND id(relationship) = {relationship._id}" + f" RETURN relationship;" + ) + return self.get_variable_assume_one(results, "relationship") + + def load_relationship_with_start_node_id_and_end_node_id( + self, relationship: Relationship + ) -> Optional[Relationship]: + """Loads a relationship from the database using start node and end node id + for which all properties of the relationship that are not None match. + """ + and_block = relationship._get_cypher_fields_and_block("relationship") + if and_block.strip(): + and_block = f" AND {and_block}" + results = self.execute_and_fetch( + f"MATCH (start_node)-[relationship:{relationship._type}]->(end_node)" + f" WHERE id(start_node) = {relationship._start_node_id}" + f" AND id(end_node) = {relationship._end_node_id}" + f"{and_block} RETURN relationship;" + ) + return self.get_variable_assume_one(results, "relationship") + + @abstractmethod + def save_relationship(self, relationship: Relationship) -> Optional[Relationship]: + """Saves a relationship to the database. + If relationship._id is not None it finds the relationship in the database + and updates it's properties with the values in `relationship`. + If relationship._id is None, it creates a new relationship. + If you want to set a relationship._id instead of creating a new + relationship, use `load_relationship` first. + """ + pass + + def save_relationships(self, relationships: List[Relationship]) -> None: + """Saves a list of relationships to the database.""" + for i in range(len(relationships)): + relationships[i]._id = self.save_relationship(relationships[i])._id + + def save_relationship_with_id(self, relationship: Relationship) -> Optional[Relationship]: + """Saves a relationship to the database using the relationship._id.""" + results = self.execute_and_fetch( + f"MATCH (start_node)-[relationship: {relationship._type}]->(end_node)" + f" WHERE id(start_node) = {relationship._start_node_id}" + f" AND id(end_node) = {relationship._end_node_id}" + f" AND id(relationship) = {relationship._id}" + f"{relationship._get_cypher_set_properties('relationship')} RETURN relationship;" + ) + + return self.get_variable_assume_one(results, "relationship") + + def create_relationship(self, relationship: Relationship) -> Optional[Relationship]: + """Creates a new relationship in the database.""" + results = self.execute_and_fetch( + "MATCH (start_node), (end_node)" + f" WHERE id(start_node) = {relationship._start_node_id}" + f" AND id(end_node) = {relationship._end_node_id}" + f" CREATE (start_node)-[relationship:{relationship._type}]->(end_node)" + f"{relationship._get_cypher_set_properties('relationship')} RETURN relationship" + ) + + return self.get_variable_assume_one(results, "relationship") diff --git a/gqlalchemy/memgraph.py b/gqlalchemy/vendors/memgraph.py similarity index 55% rename from gqlalchemy/memgraph.py rename to gqlalchemy/vendors/memgraph.py index 71156219..adaa035f 100644 --- a/gqlalchemy/memgraph.py +++ b/gqlalchemy/vendors/memgraph.py @@ -14,12 +14,17 @@ import os import sqlite3 -from typing import Any, Dict, Iterator, List, Optional, Union +from typing import List, Optional, Union -from .connection import Connection -from .disk_storage import OnDiskPropertyDatabase -from .models import ( - MemgraphConstraint, +from gqlalchemy.connection import Connection, MemgraphConnection +from gqlalchemy.disk_storage import OnDiskPropertyDatabase +from gqlalchemy.exceptions import ( + GQLAlchemyError, + GQLAlchemyFileNotFoundError, + GQLAlchemyOnDiskPropertyDatabaseNotDefinedError, + GQLAlchemyUniquenessConstraintError, +) +from gqlalchemy.models import ( MemgraphConstraintExists, MemgraphConstraintUnique, MemgraphIndex, @@ -28,12 +33,8 @@ Node, Relationship, ) - -from .exceptions import ( - GQLAlchemyError, - GQLAlchemyUniquenessConstraintError, - GQLAlchemyOnDiskPropertyDatabaseNotDefinedError, -) +from gqlalchemy.vendors.database_client import DatabaseClient +from gqlalchemy.graph_algorithms.query_modules import QueryModule __all__ = ("Memgraph",) @@ -43,6 +44,7 @@ MG_PASSWORD = os.getenv("MG_PASSWORD", "") MG_ENCRYPTED = os.getenv("MG_ENCRYPT", "false").lower() == "true" MG_CLIENT_NAME = os.getenv("MG_CLIENT_NAME", "GQLAlchemy") +MG_LAZY = os.getenv("MG_LAZY", "false").lower() == "true" class MemgraphConstants: @@ -54,7 +56,7 @@ class MemgraphConstants: UNIQUE = "unique" -class Memgraph: +class Memgraph(DatabaseClient): def __init__( self, host: str = MG_HOST, @@ -63,38 +65,16 @@ def __init__( password: str = MG_PASSWORD, encrypted: bool = MG_ENCRYPTED, client_name: str = MG_CLIENT_NAME, + lazy: bool = MG_LAZY, ): - self._host = host - self._port = port - self._username = username - self._password = password - self._encrypted = encrypted - self._client_name = client_name - self._cached_connection: Optional[Connection] = None + super().__init__( + host=host, port=port, username=username, password=password, encrypted=encrypted, client_name=client_name + ) + self._lazy = lazy self._on_disk_db = None - def execute_and_fetch(self, query: str, connection: Connection = None) -> Iterator[Dict[str, Any]]: - """Executes Cypher query and returns iterator of results.""" - connection = connection or self._get_cached_connection() - return connection.execute_and_fetch(query) - - def execute(self, query: str, connection: Connection = None) -> None: - """Executes Cypher query without returning any results.""" - connection = connection or self._get_cached_connection() - connection.execute(query) - - def create_index(self, index: MemgraphIndex) -> None: - """Creates an index (label or label-property type) in the database""" - query = f"CREATE INDEX ON {index.to_cypher()};" - self.execute(query) - - def drop_index(self, index: MemgraphIndex) -> None: - """Drops an index (label or label-property type) in the database""" - query = f"DROP INDEX ON {index.to_cypher()};" - self.execute(query) - def get_indexes(self) -> List[MemgraphIndex]: - """Returns a list of all database indexes (label and label-property types)""" + """Returns a list of all database indexes (label and label-property types).""" indexes = [] for result in self.execute_and_fetch("SHOW INDEX INFO;"): indexes.append( @@ -106,7 +86,7 @@ def get_indexes(self) -> List[MemgraphIndex]: return indexes def ensure_indexes(self, indexes: List[MemgraphIndex]) -> None: - """Ensures that database indexes match input indexes""" + """Ensures that database indexes match input indexes.""" old_indexes = set(self.get_indexes()) new_indexes = set(indexes) for obsolete_index in old_indexes.difference(new_indexes): @@ -114,24 +94,10 @@ def ensure_indexes(self, indexes: List[MemgraphIndex]) -> None: for missing_index in new_indexes.difference(old_indexes): self.create_index(missing_index) - def drop_indexes(self) -> None: - """Drops all indexes in the database""" - self.ensure_indexes(indexes=[]) - - def create_constraint(self, index: MemgraphConstraint) -> None: - """Creates a constraint (label or label-property type) in the database""" - query = f"CREATE CONSTRAINT ON {index.to_cypher()};" - self.execute(query) - - def drop_constraint(self, index: MemgraphConstraint) -> None: - """Drops a constraint (label or label-property type) in the database""" - query = f"DROP CONSTRAINT ON {index.to_cypher()};" - self.execute(query) - def get_constraints( self, ) -> List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]]: - """Returns a list of all database constraints (label and label-property types)""" + """Returns a list of all database constraints (label and label-property types).""" constraints: List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]] = [] for result in self.execute_and_fetch("SHOW CONSTRAINT INFO;"): if result[MemgraphConstants.CONSTRAINT_TYPE] == MemgraphConstants.UNIQUE: @@ -160,51 +126,47 @@ def get_unique_constraints( ) -> List[MemgraphConstraintUnique]: return [x for x in self.get_constraints() if isinstance(x, MemgraphConstraintUnique)] - def ensure_constraints( - self, - constraints: List[Union[MemgraphConstraintExists, MemgraphConstraintUnique]], - ) -> None: - """Ensures that database constraints match input constraints""" - old_constraints = set(self.get_constraints()) - new_constraints = set(constraints) - for obsolete_constraints in old_constraints.difference(new_constraints): - self.drop_constraint(obsolete_constraints) - for missing_constraint in new_constraints.difference(old_constraints): - self.create_constraint(missing_constraint) + def new_connection(self) -> Connection: + """Creates new Memgraph connection.""" + args = dict( + host=self._host, + port=self._port, + username=self._username, + password=self._password, + encrypted=self._encrypted, + client_name=self._client_name, + ) + return MemgraphConnection(**args) def create_stream(self, stream: MemgraphStream) -> None: - """Create a stream""" + """Create a stream.""" query = stream.to_cypher() self.execute(query) def start_stream(self, stream: MemgraphStream) -> None: - """Start a stream""" + """Start a stream.""" query = f"START STREAM {stream.name};" self.execute(query) def get_streams(self) -> List[str]: - """Returns a list of all streams""" + """Returns a list of all streams.""" streams = [] for result in self.execute_and_fetch("SHOW STREAMS;"): streams.append(result) return streams def drop_stream(self, stream: MemgraphStream) -> None: - """Drop a stream""" + """Drop a stream.""" query = f"DROP STREAM {stream.name};" self.execute(query) - def drop_database(self): - """Drops database by removing all nodes and edges""" - self.execute("MATCH (n) DETACH DELETE n;") - def create_trigger(self, trigger: MemgraphTrigger) -> None: - """Creates a trigger""" + """Creates a trigger.""" query = trigger.to_cypher() self.execute(query) def get_triggers(self) -> List[str]: - """Returns a list of all database triggers""" + """Returns a list of all database triggers.""" triggers_list = list(self.execute_and_fetch("SHOW TRIGGERS;")) memgraph_triggers_list = [] for trigger in triggers_list: @@ -228,24 +190,17 @@ def get_triggers(self) -> List[str]: return memgraph_triggers_list def drop_trigger(self, trigger: MemgraphTrigger) -> None: - """Drop a trigger""" + """Drop a trigger.""" query = f"DROP TRIGGER {trigger.name};" self.execute(query) def drop_triggers(self) -> None: - """Drops all triggers in the database""" + """Drops all triggers in the database.""" for trigger in self.get_triggers(): self.drop_trigger(trigger) - def _get_cached_connection(self) -> Connection: - """Returns cached connection if it exists, creates it otherwise""" - if self._cached_connection is None or not self._cached_connection.is_active(): - self._cached_connection = self.new_connection() - - return self._cached_connection - - def new_connection(self) -> Connection: - """Creates new Memgraph connection""" + def _new_connection(self) -> Connection: + """Creates new Memgraph connection.""" args = dict( host=self._host, port=self._port, @@ -254,60 +209,25 @@ def new_connection(self) -> Connection: encrypted=self._encrypted, client_name=self._client_name, ) - return Connection.create(**args) + return MemgraphConnection(**args) def init_disk_storage(self, on_disk_db: OnDiskPropertyDatabase) -> None: - """Adds and OnDiskPropertyDatabase to Memgraph so that any property + """Adds and OnDiskPropertyDatabase to the database so that any property that has a Field(on_disk=True) can be stored to and loaded from an OnDiskPropertyDatabase. """ self.on_disk_db = on_disk_db def remove_on_disk_storage(self) -> None: - """Removes the OnDiskPropertyDatabase from Memgraph""" + """Removes the OnDiskPropertyDatabase from the database.""" self.on_disk_db = None - def _get_nodes_with_unique_fields(self, node: Node) -> Optional[Node]: - """Get's all nodes from Memgraph that have any of the unique fields - set to the values in the `node` object. - """ - return self.execute_and_fetch( - f"MATCH (node: {node._label})" - + f" WHERE {node._get_cypher_unique_fields_or_block('node')}" - + " RETURN node;" - ) - - def get_variable_assume_one(self, query_result: Iterator[Dict[str, Any]], variable_name: str) -> Any: - """Returns a single result from the query_result (usually gotten from - the execute_and_fetch function). - If there is more than one result, raises a GQLAlchemyError. - """ - result = next(query_result, None) - next_result = next(query_result, None) - if result is None: - raise GQLAlchemyError("No result found. Result list is empty.") - elif next_result is not None: - raise GQLAlchemyError( - f"One result expected, but more than one result found. First result: {result}, second result: {next_result}" - ) - elif variable_name not in result: - raise GQLAlchemyError(f"Variable name {variable_name} not present in result.") - - return result[variable_name] - - def create_node(self, node: Node) -> Optional[Node]: - """Creates a node in Memgraph from the `node` object.""" - results = self.execute_and_fetch( - f"CREATE (node:{node._label}) {node._get_cypher_set_properties('node')} RETURN node;" - ) - return self.get_variable_assume_one(results, "node") - def save_node(self, node: Node) -> Node: - """Saves node to Memgraph. + """Saves node to the database. If the node._id is not None it fetches the node with the same id from - Memgraph and updates it's fields. + the database and updates it's fields. If the node has unique fields it fetches the nodes with the same unique - fields from Memgraph and updates it's fields. + fields from the database and updates it's fields. Otherwise it creates a new node with the same properties. Null properties are ignored. """ @@ -331,14 +251,9 @@ def save_node(self, node: Node) -> Node: result = self._save_node_properties_on_disk(node, result) return result - def save_nodes(self, nodes: List[Node]) -> None: - """Saves a list of nodes to Memgraph.""" - for i in range(len(nodes)): - nodes[i]._id = self.save_node(nodes[i])._id - def _save_node_properties_on_disk(self, node: Node, result: Node) -> Node: """Saves all on_disk properties to the on disk database attached to - Memgraph. + the database. """ for field in node.__fields__: value = getattr(node, field, None) @@ -350,24 +265,13 @@ def _save_node_properties_on_disk(self, node: Node, result: Node) -> Node: return result - def save_node_with_id(self, node: Node) -> Optional[Node]: - """Saves a node in Memgraph using the internal Memgraph id.""" - results = self.execute_and_fetch( - f"MATCH (node: {node._label})" - + f" WHERE id(node) = {node._id}" - + f" {node._get_cypher_set_properties('node')}" - + " RETURN node;" - ) - - return self.get_variable_assume_one(results, "node") - def load_node(self, node: Node) -> Optional[Node]: - """Loads a node from Memgraph. - If the node._id is not None it fetches the node from Memgraph with that + """Loads a node from the database. + If the node._id is not None it fetches the node from the database with that internal id. - If the node has unique fields it fetches the node from Memgraph with + If the node has unique fields it fetches the node from the database with those unique fields set. - Otherwise it tries to find any node in Memgraph that has all properties + Otherwise it tries to find any node in the database that has all properties set to exactly the same values. If no node is found or no properties are set it raises a GQLAlchemyError. """ @@ -399,28 +303,15 @@ def _load_node_properties_on_disk(self, result: Node) -> Node: return result - def load_node_with_all_properties(self, node: Node) -> Optional[Node]: - """Loads a node from Memgraph with all equal property values.""" - results = self.execute_and_fetch( - f"MATCH (node: {node._label}) WHERE {node._get_cypher_fields_and_block('node')} RETURN node;" - ) - return self.get_variable_assume_one(results, "node") - - def load_node_with_id(self, node: Node) -> Optional[Node]: - """Loads a node with the same internal Memgraph id.""" - results = self.execute_and_fetch(f"MATCH (node: {node._label}) WHERE id(node) = {node._id} RETURN node;") - - return self.get_variable_assume_one(results, "node") - def load_relationship(self, relationship: Relationship) -> Optional[Relationship]: - """Returns a relationship loaded from Memgraph. + """Returns a relationship loaded from the database. If the relationship._id is not None it fetches the relationship from - Memgraph that has the same internal id. + the database that has the same internal id. Otherwise it returns the relationship whose relationship._start_node_id and relationship._end_node_id and all relationship properties that - are not None match the relationship in Memgraph. - If there is no relationship like that in Memgraph, or if there are - multiple relationships like that in Memgraph, throws GQLAlchemyError. + are not None match the relationship in the database. + If there is no relationship like that in the database, or if there are + multiple relationships like that in the database, throws GQLAlchemyError. """ if relationship._id is not None: result = self.load_relationship_with_id(relationship) @@ -451,38 +342,9 @@ def _load_relationship_properties_on_disk(self, result: Relationship) -> Relatio return result - def load_relationship_with_id(self, relationship: Relationship) -> Optional[Relationship]: - """Loads a relationship from Memgraph using the internal id.""" - results = self.execute_and_fetch( - f"MATCH (start_node)-[relationship: {relationship._type}]->(end_node)" - + f" WHERE id(start_node) = {relationship._start_node_id}" - + f" AND id(end_node) = {relationship._end_node_id}" - + f" AND id(relationship) = {relationship._id}" - + " RETURN relationship;" - ) - return self.get_variable_assume_one(results, "relationship") - - def load_relationship_with_start_node_id_and_end_node_id( - self, relationship: Relationship - ) -> Optional[Relationship]: - """Loads a relationship from Memgraph using start node and end node id - for which all properties of the relationship that are not None match. - """ - and_block = relationship._get_cypher_fields_and_block("relationship") - if and_block.strip(): - and_block = " AND " + and_block - results = self.execute_and_fetch( - f"MATCH (start_node)-[relationship:{relationship._type}]->(end_node)" - + f" WHERE id(start_node) = {relationship._start_node_id}" - + f" AND id(end_node) = {relationship._end_node_id}" - + and_block - + " RETURN relationship;" - ) - return self.get_variable_assume_one(results, "relationship") - def save_relationship(self, relationship: Relationship) -> Optional[Relationship]: - """Saves a relationship to Memgraph. - If relationship._id is not None it finds the relationship in Memgraph + """Saves a relationship to the database. + If relationship._id is not None it finds the relationship in the database and updates it's properties with the values in `relationship`. If relationship._id is None, it creates a new relationship. If you want to set a relationship._id instead of creating a new @@ -498,11 +360,6 @@ def save_relationship(self, relationship: Relationship) -> Optional[Relationship result = self._save_relationship_properties_on_disk(relationship, result) return result - def save_relationships(self, relationships: List[Relationship]) -> None: - """Saves a list of relationships to Memgraph.""" - for i in range(len(relationships)): - relationships[i]._id = self.save_relationship(relationships[i])._id - def _save_relationship_properties_on_disk(self, relationship: Relationship, result: Relationship) -> Relationship: """Saves on_disk relationship propeties on the OnDiskPropertyDatabase added with Memgraph().init_disk_storage(db). If OnDiskPropertyDatabase @@ -518,28 +375,67 @@ def _save_relationship_properties_on_disk(self, relationship: Relationship, resu return result - def save_relationship_with_id(self, relationship: Relationship) -> Optional[Relationship]: - """Saves a relationship in Memgraph using the relationship._id.""" - results = self.execute_and_fetch( - f"MATCH (start_node)-[relationship: {relationship._type}]->(end_node)" - + f" WHERE id(start_node) = {relationship._start_node_id}" - + f" AND id(end_node) = {relationship._end_node_id}" - + f" AND id(relationship) = {relationship._id}" - + relationship._get_cypher_set_properties("relationship") - + " RETURN node;" - ) + def get_procedures(self, starts_with: Optional[str] = None, update: bool = False) -> List["QueryModule"]: + """Return query procedures. + + Maintains a list of query modules in the Memgraph object. If starts_with + is defined then return those modules that start with starts_with string. - return self.get_variable_assume_one(results, "relationship") - - def create_relationship(self, relationship: Relationship) -> Optional[Relationship]: - """Creates a new relationship in Memgraph.""" - results = self.execute_and_fetch( - "MATCH (start_node), (end_node)" - + f" WHERE id(start_node) = {relationship._start_node_id}" - + f" AND id(end_node) = {relationship._end_node_id}" - + f" CREATE (start_node)-[relationship:{relationship._type}]->(end_node)" - + relationship._get_cypher_set_properties("relationship") - + "RETURN relationship" + Args: + starts_with: Return those modules that start with this string. + (Optional) + update: Whether to update the list of modules in + self.query_modules. (Optional) + """ + if not hasattr(self, "query_modules") or update: + results = self.execute_and_fetch("CALL mg.procedures() YIELD *;") + self.query_modules = [QueryModule(**module_dict) for module_dict in results] + + return ( + self.query_modules + if starts_with is None + else [q for q in self.query_modules if q.name.startswith(starts_with)] ) - return self.get_variable_assume_one(results, "relationship") + def add_query_module(self, file_path: str, module_name: str) -> "Memgraph": + """Function for adding a query module in Python written language to Memgraph. + Example can be found in the functions below (with_kafka_stream, with_power_bi). + + The module is synced dynamically then with the database to enable higher processing + capabilities. + + Args: + file_name (str): path to file containing module. + module_name (str): name of the module. + + Returns: + Memgraph: Memgraph object. + """ + if not os.path.isfile(file_path): + raise GQLAlchemyFileNotFoundError(path=file_path) + + file_text = open(file_path, "r").read().replace("'", '"') + query = f"CALL mg.create_module_file('{module_name}','{file_text}') YIELD *;" + list(self.execute_and_fetch(query)) + + return self + + def with_kafka_stream(self) -> "Memgraph": + """Load kafka stream query module. + Returns: + Memgraph: Memgraph instance + """ + file_path = "gqlalchemy/query_modules/push_streams/kafka.py" + module_name = "kafka_stream.py" + + return self.add_query_module(file_path=file_path, module_name=module_name) + + def with_power_bi(self) -> "Memgraph": + """Load power_bi stream query module. + Returns: + Memgraph: Memgraph instance + """ + file_path = "gqlalchemy/query_modules/push_streams/power_bi.py" + module_name = "power_bi_stream.py" + + return self.add_query_module(file_path=file_path, module_name=module_name) diff --git a/gqlalchemy/vendors/neo4j.py b/gqlalchemy/vendors/neo4j.py new file mode 100644 index 00000000..24428948 --- /dev/null +++ b/gqlalchemy/vendors/neo4j.py @@ -0,0 +1,218 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from typing import List, Optional, Union + +from gqlalchemy.connection import Connection, Neo4jConnection +from gqlalchemy.exceptions import ( + GQLAlchemyError, + GQLAlchemyUniquenessConstraintError, +) +from gqlalchemy.models import ( + Neo4jConstraintExists, + Neo4jConstraintUnique, + Neo4jIndex, + Node, + Relationship, +) +from gqlalchemy.vendors.database_client import DatabaseClient + +__all__ = ("Neo4j",) + +NEO4J_HOST = os.getenv("NEO4J_HOST", "localhost") +NEO4J_PORT = int(os.getenv("NEO4J_PORT", "7687")) +NEO4J_USERNAME = os.getenv("NEO4J_USERNAME", "neo4j") +NEO4J_PASSWORD = os.getenv("NEO4J_PASSWORD", "test") +NEO4J_ENCRYPTED = os.getenv("NEO4J_ENCRYPT", "false").lower() == "true" +NEO4J_CLIENT_NAME = os.getenv("NEO4J_CLIENT_NAME", "neo4j") + + +class Neo4jConstants: + CONSTRAINT_TYPE = "constraint type" + EXISTS = "exists" + LABEL = "labelsOrTypes" + PROPERTY = "property" + PROPERTIES = "properties" + UNIQUE = "unique" + LOOKUP = "LOOKUP" + TYPE = "type" + UNIQUE = "UNIQUE" + UNIQUENESS = "uniqueness" + + +class Neo4j(DatabaseClient): + def __init__( + self, + host: str = NEO4J_HOST, + port: int = NEO4J_PORT, + username: str = NEO4J_USERNAME, + password: str = NEO4J_PASSWORD, + encrypted: bool = NEO4J_ENCRYPTED, + client_name: str = NEO4J_CLIENT_NAME, + ): + super().__init__( + host=host, port=port, username=username, password=password, encrypted=encrypted, client_name=client_name + ) + self._cached_connection: Optional[Connection] = None + + def get_indexes(self) -> List[Neo4jIndex]: + """Returns a list of all database indexes (label and label-property types).""" + indexes = [] + for result in self.execute_and_fetch("SHOW INDEX;"): + indexes.append( + Neo4jIndex( + result[Neo4jConstants.LABEL][0] + if result[Neo4jConstants.TYPE] != Neo4jConstants.LOOKUP + else result[Neo4jConstants.LABEL], + result[Neo4jConstants.PROPERTIES][0] + if result[Neo4jConstants.TYPE] != Neo4jConstants.LOOKUP + else result[Neo4jConstants.PROPERTIES], + result[Neo4jConstants.TYPE], + result[Neo4jConstants.UNIQUENESS], + ) + ) + return indexes + + def ensure_indexes(self, indexes: List[Neo4jIndex]) -> None: + """Ensures that database indexes match input indexes.""" + old_indexes = set(self.get_indexes()) + new_indexes = set(indexes) + for obsolete_index in old_indexes.difference(new_indexes): + if obsolete_index.type != Neo4jConstants.LOOKUP and obsolete_index.uniqueness != Neo4jConstants.UNIQUE: + self.drop_index(obsolete_index) + for missing_index in new_indexes.difference(old_indexes): + self.create_index(missing_index) + + def get_constraints( + self, + ) -> List[Union[Neo4jConstraintExists, Neo4jConstraintUnique]]: + """Returns a list of all database constraints (label and label-property types).""" + constraints: List[Union[Neo4jConstraintExists, Neo4jConstraintUnique]] = [] + for result in self.execute_and_fetch("SHOW CONSTRAINTS;"): + if result[Neo4jConstants.TYPE] == "UNIQUENESS": + constraints.append( + Neo4jConstraintUnique( + result[Neo4jConstants.LABEL][0], + tuple(result[Neo4jConstants.PROPERTIES]), + ) + ) + return constraints + + def get_exists_constraints( + self, + ) -> List[Neo4jConstraintExists]: + return [x for x in self.get_constraints() if isinstance(x, Neo4jConstraintExists)] + + def get_unique_constraints( + self, + ) -> List[Neo4jConstraintUnique]: + return [x for x in self.get_constraints() if isinstance(x, Neo4jConstraintUnique)] + + def new_connection(self) -> Connection: + """Creates new Neo4j connection.""" + args = dict( + host=self._host, + port=self._port, + username=self._username, + password=self._password, + encrypted=self._encrypted, + client_name=self._client_name, + ) + return Neo4jConnection(**args) + + def save_node(self, node: Node) -> Node: + """Saves node to the database. + If the node._id is not None it fetches the node with the same id from + the database and updates it's fields. + If the node has unique fields it fetches the nodes with the same unique + fields from the database and updates it's fields. + Otherwise it creates a new node with the same properties. + Null properties are ignored. + """ + result = None + if node._id is not None: + result = self.save_node_with_id(node) + elif node.has_unique_fields(): + matching_nodes = list(self._get_nodes_with_unique_fields(node)) + if len(matching_nodes) > 1: + raise GQLAlchemyUniquenessConstraintError( + f"Uniqueness constraints match multiple nodes: {matching_nodes}" + ) + elif len(matching_nodes) == 1: + node._id = matching_nodes[0]["node"]._id + result = self.save_node_with_id(node) + else: + result = self.create_node(node) + else: + result = self.create_node(node) + + return result + + def load_node(self, node: Node) -> Optional[Node]: + """Loads a node from the database. + If the node._id is not None it fetches the node from the database with that + internal id. + If the node has unique fields it fetches the node from the database with + those unique fields set. + Otherwise it tries to find any node in the database that has all properties + set to exactly the same values. + If no node is found or no properties are set it raises a GQLAlchemyError. + """ + if node._id is not None: + result = self.load_node_with_id(node) + elif node.has_unique_fields(): + matching_node = self.get_variable_assume_one( + query_result=self._get_nodes_with_unique_fields(node), variable_name="node" + ) + result = matching_node + else: + result = self.load_node_with_all_properties(node) + + return result + + def load_relationship(self, relationship: Relationship) -> Optional[Relationship]: + """Returns a relationship loaded from the database. + If the relationship._id is not None it fetches the relationship from + the database that has the same internal id. + Otherwise it returns the relationship whose relationship._start_node_id + and relationship._end_node_id and all relationship properties that + are not None match the relationship in the database. + If there is no relationship like that in the database, or if there are + multiple relationships like that in the database, throws GQLAlchemyError. + """ + if relationship._id is not None: + result = self.load_relationship_with_id(relationship) + elif relationship._start_node_id is not None and relationship._end_node_id is not None: + result = self.load_relationship_with_start_node_id_and_end_node_id(relationship) + else: + raise GQLAlchemyError("Can't load a relationship without a start_node_id and end_node_id.") + return result + + def save_relationship(self, relationship: Relationship) -> Optional[Relationship]: + """Saves a relationship to the database. + If relationship._id is not None it finds the relationship in the database + and updates it's properties with the values in `relationship`. + If relationship._id is None, it creates a new relationship. + If you want to set a relationship._id instead of creating a new + relationship, use `load_relationship` first. + """ + if relationship._id is not None: + result = self.save_relationship_with_id(relationship) + elif relationship._start_node_id is not None and relationship._end_node_id is not None: + result = self.create_relationship(relationship) + else: + raise GQLAlchemyError("Can't create a relationship without start_node_id and end_node_id.") + + return result diff --git a/poetry.lock b/poetry.lock index 33e44709..26f7a1bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -92,7 +92,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -104,7 +104,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "azure-core" -version = "1.23.0" +version = "1.24.1" description = "Microsoft Azure Core Library for Python" category = "main" optional = false @@ -130,7 +130,7 @@ requests = ">=2.20.0" [[package]] name = "azure-identity" -version = "1.8.0" +version = "1.10.0" description = "Microsoft Azure Identity Library for Python" category = "main" optional = false @@ -140,56 +140,52 @@ python-versions = ">=3.6" azure-core = ">=1.11.0,<2.0.0" cryptography = ">=2.5" msal = ">=1.12.0,<2.0.0" -msal-extensions = ">=0.3.0,<0.4.0" +msal-extensions = ">=0.3.0,<2.0.0" six = ">=1.12.0" [[package]] name = "azure-storage-blob" -version = "12.10.0" +version = "12.12.0" description = "Microsoft Azure Blob Storage Client Library for Python" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -azure-core = ">=1.15.0,<2.0.0" +azure-core = ">=1.23.1,<2.0.0" cryptography = ">=2.1.4" msrest = ">=0.6.21" [[package]] name = "black" -version = "21.12b0" +version = "22.3.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] -click = ">=7.1.2" +click = ">=8.0.0" mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0,<1" +pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = ">=0.2.6,<2.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} -typing-extensions = [ - {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, - {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, -] +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.5.18.1" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "cffi" @@ -223,11 +219,11 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.3" +version = "8.1.3" description = "Composable command line interface toolkit" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -243,18 +239,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.2" +version = "6.4.1" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] toml = ["tomli"] [[package]] name = "cryptography" -version = "36.0.2" +version = "37.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -269,10 +265,9 @@ docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] [[package]] - name = "dacite" version = "1.6.0" description = "Simple creation of data classes from dictionaries." @@ -310,7 +305,7 @@ tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=3.4.7)", "idna (>=2.0.0)"] [[package]] name = "filelock" -version = "3.4.2" +version = "3.7.1" description = "A platform independent file lock." category = "dev" optional = false @@ -344,7 +339,7 @@ python-versions = ">=3.7" [[package]] name = "fsspec" -version = "2022.2.0" +version = "2022.5.0" description = "File-system specification" category = "main" optional = false @@ -371,10 +366,11 @@ s3 = ["s3fs"] sftp = ["paramiko"] smb = ["smbprotocol"] ssh = ["paramiko"] +tqdm = ["tqdm"] [[package]] name = "identify" -version = "2.4.5" +version = "2.5.1" description = "File identification library for Python" category = "dev" optional = false @@ -436,21 +432,21 @@ python-versions = "*" [[package]] name = "msal" -version = "1.17.0" +version = "1.18.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." category = "main" optional = false python-versions = "*" [package.dependencies] -cryptography = ">=0.6,<39" +cryptography = ">=0.6,<40" PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} requests = ">=2.0.0,<3" [[package]] name = "msal-extensions" -version = "0.3.1" -description = "" +version = "1.0.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." category = "main" optional = false python-versions = "*" @@ -464,20 +460,21 @@ portalocker = [ [[package]] name = "msrest" -version = "0.6.21" +version = "0.7.0" description = "AutoRest swagger generator Python client runtime." category = "main" optional = false python-versions = "*" [package.dependencies] +azure-core = ">=1.24.0" certifi = ">=2017.4.17" isodate = ">=0.6.0" requests = ">=2.16,<3.0" requests-oauthlib = ">=0.5.0" [package.extras] -async = ["aiohttp (>=3.0)", "aiodns"] +async = ["aiodns", "aiohttp (>=3.0)"] [[package]] name = "multidict" @@ -495,6 +492,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "neo4j" +version = "4.4.3" +description = "Neo4j Bolt driver for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz = "*" + [[package]] name = "networkx" version = "2.6.3" @@ -540,7 +548,6 @@ signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] - name = "packaging" version = "21.3" description = "Core utilities for Python packages" @@ -561,15 +568,15 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "platformdirs" -version = "2.4.1" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] [[package]] name = "pluggy" @@ -604,11 +611,11 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "sphinx (>=3.0.3)", "pytest [[package]] name = "pre-commit" -version = "2.17.0" +version = "2.19.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] cfgv = ">=2.0.0" @@ -621,14 +628,14 @@ virtualenv = ">=20.0.8" [[package]] name = "psutil" -version = "5.9.0" +version = "5.9.1" description = "Cross-platform lib for process and system monitoring in Python." category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] [[package]] name = "py" @@ -667,8 +674,8 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.0" -description = "Data validation and settings management using python 3.6 type hinting" +version = "1.9.1" +description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.6.1" @@ -690,7 +697,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pyjwt" -version = "2.3.0" +version = "2.4.0" description = "JSON Web Token implementation in Python" category = "main" optional = false @@ -715,22 +722,22 @@ python-versions = ">=3.6" [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pytest" -version = "6.2.5" +version = "7.1.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -741,10 +748,10 @@ iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" py = ">=1.8.2" -toml = "*" +tomli = ">=1.0.0" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-black" @@ -777,15 +784,15 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale [[package]] name = "pytest-flake8" -version = "1.0.7" +version = "1.1.1" description = "pytest plugin to check FLAKE8 requirements" category = "dev" optional = false python-versions = "*" [package.dependencies] -flake8 = ">=3.5" -pytest = ">=3.5" +flake8 = ">=4.0" +pytest = ">=7.0" [[package]] name = "pytest-timeout" @@ -809,6 +816,14 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" [package.dependencies] six = ">=1.5" +[[package]] +name = "pytz" +version = "2022.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "pywin32" version = "227" @@ -827,20 +842,20 @@ python-versions = ">=3.6" [[package]] name = "requests" -version = "2.27.1" +version = "2.28.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2.0.0,<2.1.0" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] @@ -862,7 +877,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" @@ -876,15 +891,15 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.2.3" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "typed-ast" -version = "1.5.2" +version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -892,11 +907,11 @@ python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "urllib3" @@ -913,7 +928,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.13.0" +version = "20.14.1" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -931,17 +946,22 @@ docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sp testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] -name = "yarl" -version = "1.7.2" -description = "Yet another URL library" +name = "websocket-client" +version = "1.3.2" +description = "WebSocket client for Python with low level API options" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] [[package]] -name = "websocket-client" -version = "1.2.3" -description = "WebSocket client for Python with low level API options" +name = "yarl" +version = "1.7.2" +description = "Yet another URL library" category = "main" optional = false python-versions = ">=3.6" @@ -951,27 +971,22 @@ idna = ">=2.0" multidict = ">=4.0" typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "a0f72e4b4eb5755212b6ee6cfdf36f58c69020d10d7b1d1ac0a7d172f152f6d2" +content-hash = "cbfee02b9da6ea3277361bcbec821ef70f42ddab45cad62f6e2bdb3e84ebfdc0" [metadata.files] adal = [ @@ -1076,28 +1091,49 @@ attrs = [ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] azure-core = [ - {file = "azure-core-1.23.0.zip", hash = "sha256:a56a6f720d0948d3f3e4a25a5fe46df2f1b7f865c358d74e2ce47dbb49262608"}, - {file = "azure_core-1.23.0-py3-none-any.whl", hash = "sha256:23c1389a115c328878c4eface3ca3899c2468313ea6f883f2347d6924cd887b2"}, + {file = "azure-core-1.24.1.zip", hash = "sha256:39c5d59d04209bb70a1a7ee879cef05d07bc76472cd3fb5eaa2e607a90d312bb"}, + {file = "azure_core-1.24.1-py3-none-any.whl", hash = "sha256:f48a640affa59fa45ac770565b3bead4c4f834242d16983c1ae2bb173a4b8a6d"}, ] azure-datalake-store = [ {file = "azure-datalake-store-0.0.52.tar.gz", hash = "sha256:4198ddb32614d16d4502b43d5c9739f81432b7e0e4d75d30e05149fe6007fea2"}, {file = "azure_datalake_store-0.0.52-py2.py3-none-any.whl", hash = "sha256:aaed72b9c856824aeab554f4dbe0ef2c6d0ff36700bdd8b93d8298793117c48e"}, ] azure-identity = [ - {file = "azure-identity-1.8.0.zip", hash = "sha256:020ff0e47157852e4aac8a3adb06841827147f27a94cbe74a904425d8e62d93c"}, - {file = "azure_identity-1.8.0-py3-none-any.whl", hash = "sha256:8d87aff09b8dabe3c99bb934798dcdeb2f2d49614ecc4f0425cc888faafd64ae"}, + {file = "azure-identity-1.10.0.zip", hash = "sha256:656e5034d9cef297cf9b35376ed620085273c18cfa52cea4a625bf0d5d2d6409"}, + {file = "azure_identity-1.10.0-py3-none-any.whl", hash = "sha256:b386f1ccbea6a48b9ab7e7f162adc456793c345193a7c1a713959562b08dcbbd"}, ] azure-storage-blob = [ - {file = "azure-storage-blob-12.10.0.zip", hash = "sha256:3c7dc2c93e7ff2a731acd66a36a1f0a6266072b4154deba4894dab891285ea3a"}, - {file = "azure_storage_blob-12.10.0-py3-none-any.whl", hash = "sha256:a70995c4f9310eb704594f30505d1499286b4caac5543a2ebfe84431c4a38b0b"}, + {file = "azure-storage-blob-12.12.0.zip", hash = "sha256:f6daf07d1ca86d189ae15c9b1859dff5b7127bf24a07a4bbe41e0b81e01d62f7"}, + {file = "azure_storage_blob-12.12.0-py3-none-any.whl", hash = "sha256:1eac4c364309ccc193c80ee26c78d25dfbf10926b1309095a448a7a0388526eb"}, ] black = [ - {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, - {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, + {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, + {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, + {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, + {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, + {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, + {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, + {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, + {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, + {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, + {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, + {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, + {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, + {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, + {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, + {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, + {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, + {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, ] cffi = [ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, @@ -1160,83 +1196,79 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, - {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, + {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, + {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, + {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, + {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, + {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, + {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, + {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, + {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, + {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, + {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, + {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, ] cryptography = [ - {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"}, - {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"}, - {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea634401ca02367c1567f012317502ef3437522e2fc44a3ea1844de028fa4b84"}, - {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7be666cc4599b415f320839e36367b273db8501127b38316f3b9f22f17a0b815"}, - {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8241cac0aae90b82d6b5c443b853723bcc66963970c67e56e71a2609dc4b5eaf"}, - {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2d54e787a884ffc6e187262823b6feb06c338084bbe80d45166a1cb1c6c5bf"}, - {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86"}, - {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ec6597aa85ce03f3e507566b8bcdf9da2227ec86c4266bd5e6ab4d9e0cc8dab2"}, - {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ca9f686517ec2c4a4ce930207f75c00bf03d94e5063cbc00a1dc42531511b7eb"}, - {file = "cryptography-36.0.2-cp36-abi3-win32.whl", hash = "sha256:f64b232348ee82f13aac22856515ce0195837f6968aeaa94a3d0353ea2ec06a6"}, - {file = "cryptography-36.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:53e0285b49fd0ab6e604f4c5d9c5ddd98de77018542e88366923f152dbeb3c29"}, - {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32db5cc49c73f39aac27574522cecd0a4bb7384e71198bc65a0d23f901e89bb7"}, - {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b3d199647468d410994dbeb8cec5816fb74feb9368aedf300af709ef507e3e"}, - {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:da73d095f8590ad437cd5e9faf6628a218aa7c387e1fdf67b888b47ba56a17f0"}, - {file = "cryptography-36.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0a3bf09bb0b7a2c93ce7b98cb107e9170a90c51a0162a20af1c61c765b90e60b"}, - {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8897b7b7ec077c819187a123174b645eb680c13df68354ed99f9b40a50898f77"}, - {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82740818f2f240a5da8dfb8943b360e4f24022b093207160c77cadade47d7c85"}, - {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1f64a62b3b75e4005df19d3b5235abd43fa6358d5516cfc43d87aeba8d08dd51"}, - {file = "cryptography-36.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e167b6b710c7f7bc54e67ef593f8731e1f45aa35f8a8a7b72d6e42ec76afd4b3"}, - {file = "cryptography-36.0.2.tar.gz", hash = "sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9"}, + {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:ef15c2df7656763b4ff20a9bc4381d8352e6640cfeb95c2972c38ef508e75181"}, + {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3c81599befb4d4f3d7648ed3217e00d21a9341a9a688ecdd615ff72ffbed7336"}, + {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bd1096476aaac820426239ab534b636c77d71af66c547b9ddcd76eb9c79e004"}, + {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:31fe38d14d2e5f787e0aecef831457da6cec68e0bb09a35835b0b44ae8b988fe"}, + {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093cb351031656d3ee2f4fa1be579a8c69c754cf874206be1d4cf3b542042804"}, + {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59b281eab51e1b6b6afa525af2bd93c16d49358404f814fe2c2410058623928c"}, + {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:0cc20f655157d4cfc7bada909dc5cc228211b075ba8407c46467f63597c78178"}, + {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f8ec91983e638a9bcd75b39f1396e5c0dc2330cbd9ce4accefe68717e6779e0a"}, + {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:46f4c544f6557a2fefa7ac8ac7d1b17bf9b647bd20b16decc8fbcab7117fbc15"}, + {file = "cryptography-37.0.2-cp36-abi3-win32.whl", hash = "sha256:731c8abd27693323b348518ed0e0705713a36d79fdbd969ad968fbef0979a7e0"}, + {file = "cryptography-37.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:471e0d70201c069f74c837983189949aa0d24bb2d751b57e26e3761f2f782b8d"}, + {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a68254dd88021f24a68b613d8c51d5c5e74d735878b9e32cc0adf19d1f10aaf9"}, + {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:a7d5137e556cc0ea418dca6186deabe9129cee318618eb1ffecbd35bee55ddc1"}, + {file = "cryptography-37.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aeaba7b5e756ea52c8861c133c596afe93dd716cbcacae23b80bc238202dc023"}, + {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95e590dd70642eb2079d280420a888190aa040ad20f19ec8c6e097e38aa29e06"}, + {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1b9362d34363f2c71b7853f6251219298124aa4cc2075ae2932e64c91a3e2717"}, + {file = "cryptography-37.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e53258e69874a306fcecb88b7534d61820db8a98655662a3dd2ec7f1afd9132f"}, + {file = "cryptography-37.0.2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:1f3bfbd611db5cb58ca82f3deb35e83af34bb8cf06043fa61500157d50a70982"}, + {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:419c57d7b63f5ec38b1199a9521d77d7d1754eb97827bbb773162073ccd8c8d4"}, + {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:dc26bb134452081859aa21d4990474ddb7e863aa39e60d1592800a8865a702de"}, + {file = "cryptography-37.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b8398b3d0efc420e777c40c16764d6870bcef2eb383df9c6dbb9ffe12c64452"}, + {file = "cryptography-37.0.2.tar.gz", hash = "sha256:f224ad253cc9cea7568f49077007d2263efa57396a2f2f78114066fd54b5c68e"}, ] dacite = [ {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, @@ -1251,8 +1283,8 @@ docker = [ {file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"}, ] filelock = [ - {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, - {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, + {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, + {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -1320,12 +1352,12 @@ frozenlist = [ {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, ] fsspec = [ - {file = "fsspec-2022.2.0-py3-none-any.whl", hash = "sha256:eb9c9d9aee49d23028deefffe53e87c55d3515512c63f57e893710301001449a"}, - {file = "fsspec-2022.2.0.tar.gz", hash = "sha256:20322c659538501f52f6caa73b08b2ff570b7e8ea30a86559721d090e473ad5c"}, + {file = "fsspec-2022.5.0-py3-none-any.whl", hash = "sha256:2c198c50eb541a80bbd03540b07602c4a957366f3fb416a1f270d34bd4ff0926"}, + {file = "fsspec-2022.5.0.tar.gz", hash = "sha256:7a5459c75c44e760fbe6a3ccb1f37e81e023cde7da8ba20401258d877ec483b4"}, ] identify = [ - {file = "identify-2.4.5-py2.py3-none-any.whl", hash = "sha256:d27d10099844741c277b45d809bd452db0d70a9b41ea3cd93799ebbbcc6dcb29"}, - {file = "identify-2.4.5.tar.gz", hash = "sha256:d11469ff952a4d7fd7f9be520d335dc450f585d474b39b5dfb86a500831ab6c7"}, + {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"}, + {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, @@ -1348,16 +1380,16 @@ mccabe = [ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] msal = [ - {file = "msal-1.17.0-py2.py3-none-any.whl", hash = "sha256:5a52d78e70d2c451e267c1e8c2342e4c06f495c75c859aeafd9260d3974f09fe"}, - {file = "msal-1.17.0.tar.gz", hash = "sha256:04e3cb7bb75c51f56d290381f23056207df1f3eb594ed03d38551f3b16d2a36e"}, + {file = "msal-1.18.0-py2.py3-none-any.whl", hash = "sha256:9c10e6cb32e0b6b8eaafc1c9a68bc3b2ff71505e0c5b8200799582d8b9f22947"}, + {file = "msal-1.18.0.tar.gz", hash = "sha256:576af55866038b60edbcb31d831325a1bd8241ed272186e2832968fd4717d202"}, ] msal-extensions = [ - {file = "msal-extensions-0.3.1.tar.gz", hash = "sha256:d9029af70f2cbdc5ad7ecfed61cb432ebe900484843ccf72825445dbfe62d311"}, - {file = "msal_extensions-0.3.1-py2.py3-none-any.whl", hash = "sha256:89df9c0237e1adf16938fa58575db59c2bb9de04a83ffb0452c8dfc79031f717"}, + {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, + {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, ] msrest = [ - {file = "msrest-0.6.21-py2.py3-none-any.whl", hash = "sha256:c840511c845330e96886011a236440fafc2c9aff7b2df9c0a92041ee2dee3782"}, - {file = "msrest-0.6.21.tar.gz", hash = "sha256:72661bc7bedc2dc2040e8f170b6e9ef226ee6d3892e01affd4d26b06474d68d8"}, + {file = "msrest-0.7.0-py2.py3-none-any.whl", hash = "sha256:59b06d168ea11448b79921be0760f488347e140d69fb2c2b9f37000615ca4a39"}, + {file = "msrest-0.7.0.zip", hash = "sha256:2210a4dff54c6d3af30bf2fc10db206a1bc5b097c654ce54b0ecd633d762afdf"}, ] multidict = [ {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, @@ -1424,6 +1456,9 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] +neo4j = [ + {file = "neo4j-4.4.3.tar.gz", hash = "sha256:f2c9838607f7c95fa6e0c0153be48dfbbcef6f29930df3c596509490ca9eea8c"}, +] networkx = [ {file = "networkx-2.6.3-py3-none-any.whl", hash = "sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef"}, {file = "networkx-2.6.3.tar.gz", hash = "sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51"}, @@ -1475,8 +1510,8 @@ pathspec = [ {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] platformdirs = [ - {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, - {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -1487,42 +1522,42 @@ portalocker = [ {file = "portalocker-2.4.0.tar.gz", hash = "sha256:a648ad761b8ea27370cb5915350122cd807b820d2193ed5c9cc28f163df637f4"}, ] pre-commit = [ - {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, - {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, + {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"}, + {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"}, ] psutil = [ - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, - {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, - {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, - {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, - {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, - {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, - {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, - {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, - {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, - {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, - {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, - {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, - {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, - {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, - {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, - {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, - {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, - {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, - {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, + {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, + {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, + {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, + {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, + {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, + {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, + {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, + {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, + {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, + {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, + {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, + {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, + {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, + {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, + {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, + {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, + {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, + {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -1569,49 +1604,49 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, - {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, - {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, - {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, - {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, - {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, - {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, - {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, - {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, - {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, - {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, + {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, + {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, + {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, + {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, + {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, + {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, + {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, + {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, + {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, ] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pyjwt = [ - {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, - {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, ] pymgclient = [ {file = "pymgclient-1.2.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:908b039188c31b0fc6117aacc040c0ed33067a9e09cb14c934eecf73e429f961"}, @@ -1628,12 +1663,12 @@ pymgclient = [ {file = "pymgclient-1.2.0.tar.gz", hash = "sha256:50143bd138880dd32218d17dada37ade12f7a7b35d7903605e76a1149fd8bb3e"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, ] pytest-black = [ {file = "pytest-black-0.3.12.tar.gz", hash = "sha256:1d339b004f764d6cd0f06e690f6dd748df3d62e6fe1a692d6a5500ac2c5b75a5"}, @@ -1643,8 +1678,8 @@ pytest-cov = [ {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-flake8 = [ - {file = "pytest-flake8-1.0.7.tar.gz", hash = "sha256:f0259761a903563f33d6f099914afef339c085085e643bee8343eb323b32dd6b"}, - {file = "pytest_flake8-1.0.7-py2.py3-none-any.whl", hash = "sha256:c28cf23e7d359753c896745fd4ba859495d02e16c84bac36caa8b1eec58f5bc1"}, + {file = "pytest-flake8-1.1.1.tar.gz", hash = "sha256:ba4f243de3cb4c2486ed9e70752c80dd4b636f7ccb27d4eba763c35ed0cd316e"}, + {file = "pytest_flake8-1.1.1-py2.py3-none-any.whl", hash = "sha256:e0661a786f8cbf976c185f706fdaf5d6df0b1667c3bcff8e823ba263618627e7"}, ] pytest-timeout = [ {file = "pytest-timeout-1.4.2.tar.gz", hash = "sha256:20b3113cf6e4e80ce2d403b6fb56e9e1b871b510259206d40ff8d609f48bda76"}, @@ -1654,6 +1689,10 @@ python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] +pytz = [ + {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, + {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, +] pywin32 = [ {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"}, {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"}, @@ -1704,8 +1743,8 @@ pyyaml = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, + {file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"}, + {file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"}, ] requests-oauthlib = [ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, @@ -1720,46 +1759,50 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typed-ast = [ - {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, - {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, - {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"}, - {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"}, - {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"}, - {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"}, - {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"}, - {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"}, - {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"}, - {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"}, - {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"}, - {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"}, - {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"}, - {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"}, - {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"}, - {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"}, - {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"}, - {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"}, - {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"}, - {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"}, - {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"}, - {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"}, - {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"}, - {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] urllib3 = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] virtualenv = [ - {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, - {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, + {file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"}, + {file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"}, +] +websocket-client = [ + {file = "websocket-client-1.3.2.tar.gz", hash = "sha256:50b21db0058f7a953d67cc0445be4b948d7fc196ecbeb8083d68d94628e4abf6"}, + {file = "websocket_client-1.3.2-py3-none-any.whl", hash = "sha256:722b171be00f2b90e1d4fb2f2b53146a536ca38db1da8ff49c972a4e1365d0ef"}, ] yarl = [ {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, @@ -1835,11 +1878,7 @@ yarl = [ {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, ] -websocket-client = [ - {file = "websocket-client-1.2.3.tar.gz", hash = "sha256:1315816c0acc508997eb3ae03b9d3ff619c9d12d544c9a9b553704b1cc4f6af5"}, - {file = "websocket_client-1.2.3-py3-none-any.whl", hash = "sha256:2eed4cc58e4d65613ed6114af2f380f7910ff416fc8c46947f6e76b6815f56c0"}, -] zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/pydoc-markdown.yml b/pydoc-markdown.yml index aa7ed5c9..f7833fc7 100644 --- a/pydoc-markdown.yml +++ b/pydoc-markdown.yml @@ -3,11 +3,22 @@ loaders: search_path: [.] modules: - gqlalchemy.disk_storage - - gqlalchemy.memgraph + - gqlalchemy.instance_runner + - gqlalchemy.loaders - gqlalchemy.models - - gqlalchemy.query_builder - gqlalchemy.transformations - gqlalchemy.utilities + - gqlalchemy.graph_algorithms.integrated_algorithms + - gqlalchemy.graph_algorithms.query_builder + - gqlalchemy.graph_algorithms.query_modules + - gqlalchemy.query_builders.declarative_base + - gqlalchemy.query_builders.memgraph_query_builder + - gqlalchemy.query_builders.neo4j_query_builder + - gqlalchemy.query_modules.push_streams.kafka + - gqlalchemy.query_modules.push_streams.power_bi + - gqlalchemy.vendors.database_client + - gqlalchemy.vendors.memgraph + - gqlalchemy.vendors.neo4j processors: - type: filter skip_empty_modules: true diff --git a/pyproject.toml b/pyproject.toml index 6d97f1d0..dd6326e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,15 @@ [tool.poetry] name = "GQLAlchemy" -version = "1.2.0" +version = "1.3.0" description = "GQLAlchemy is library developed with purpose of assisting writing and running queries on Memgraph." repository = "https://github.com/memgraph/gqlalchemy" authors = [ - "Jure Bajic ", + "Boris Tasevski ", + "Bruno Sacaric ", + "Ivan Despot ", "Josip Mrden ", - "Josip Matak ", - "Marko Budiselic ", - "Mislav Vuletić ", + "Katarina Supe ", + "Niko Krvavica ", ] license = "Apache-2.0" readme = "README.md" @@ -36,15 +37,16 @@ python = "^3.7" pymgclient = "1.2.0" networkx = "^2.5.1" pydantic = "^1.8.2" -docker = "^5.0.3" psutil = "^5.9.0" pyarrow = "^7.0.0" dacite = "^1.6.0" adlfs = "^2022.2.0" +neo4j = "^4.4.3" +docker = "^5.0.3" [tool.poetry.dev-dependencies] -black = "^21.5b1" -pytest = "^6.2.4" +black = "^22.3.0" +pytest = "^7.1.1" pytest-black = "^0.3.12" pytest-cov = "^2.12.0" pytest-flake8 = "^1.0.7" @@ -53,4 +55,4 @@ pre-commit = "^2.15.0" [build-system] requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/scripts/query_module_signature_generator.py b/scripts/query_module_signature_generator.py new file mode 100644 index 00000000..36822d48 --- /dev/null +++ b/scripts/query_module_signature_generator.py @@ -0,0 +1,26 @@ +from gqlalchemy import Memgraph +from gqlalchemy.graph_algorithms.query_modules import QM_KEY_NAME, QM_KEY_TYPE + +QUERIES_DEST = "queries.txt" + + +if __name__ == "__main__": + mg = Memgraph() + + modules = mg.get_procedures() + + with open(QUERIES_DEST, "w") as f: + for query_module in modules: + arguments_upper = [f"{x[QM_KEY_NAME]}: {x[QM_KEY_TYPE]}" for x in query_module.arguments] + arguments_upper_str = "" if len(arguments_upper) == 0 else f", {', '.join(arguments_upper)}" + + arguments_lower = [f"{x[QM_KEY_NAME]}" for x in query_module.arguments] + arguments_lower_str = "" if len(arguments_lower) == 0 else f", ({', '.join(arguments_lower)})" + + f.write(f"def {query_module.name.replace('.', '_')}(self{arguments_upper_str}) -> DeclarativeBase:\n") + + f.write(f'\treturn self.call("{query_module.name}"{arguments_lower_str})\n\n') + + +# arguments - type, name, default +# name diff --git a/tests/conftest.py b/tests/conftest.py index a6809b38..fa88b41b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,16 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from pathlib import Path +from typing import Tuple -import pytest -from gqlalchemy import Memgraph +from gqlalchemy import Memgraph, models, Neo4j, QueryBuilder, Neo4jQueryBuilder +from gqlalchemy.instance_runner import MemgraphInstanceDocker def get_data_dir() -> Path: return Path(__file__).parents[0].joinpath("data") +@pytest.fixture +def database(request): + return request.getfixturevalue(request.param) + + +@pytest.fixture +def vendor(request): + return request.getfixturevalue(request.param) + + @pytest.fixture def memgraph() -> Memgraph: memgraph = Memgraph() @@ -35,6 +47,45 @@ def memgraph() -> Memgraph: memgraph.ensure_constraints([]) +@pytest.fixture +def neo4j() -> Neo4j: + neo4j = Neo4j(port="7688") + neo4j.ensure_constraints([]) + neo4j.ensure_indexes([]) + neo4j.drop_database() + + yield neo4j + + neo4j.ensure_constraints([]) + neo4j.ensure_indexes([]) + + +@pytest.fixture +def memgraph_query_builder() -> Tuple[Memgraph, QueryBuilder]: + memgraph = Memgraph() + memgraph.ensure_indexes([]) + memgraph.ensure_constraints([]) + memgraph.drop_database() + + yield (memgraph, QueryBuilder(memgraph)) + + memgraph.ensure_indexes([]) + memgraph.ensure_constraints([]) + + +@pytest.fixture +def neo4j_query_builder() -> Tuple[Neo4j, Neo4jQueryBuilder]: + neo4j = Neo4j(port="7688") + neo4j.ensure_constraints([]) + neo4j.ensure_indexes([]) + neo4j.drop_database() + + yield (neo4j, Neo4jQueryBuilder(neo4j)) + + neo4j.ensure_constraints([]) + neo4j.ensure_indexes([]) + + @pytest.fixture def memgraph_without_dropping_constraints() -> Memgraph: memgraph = Memgraph() @@ -58,3 +109,52 @@ def populated_memgraph(dataset_file: str) -> Memgraph: yield memgraph memgraph.drop_database() + + +@pytest.fixture +def remove_module_memgraph(module_remove_name: str) -> Memgraph: + memgraph = Memgraph() + memgraph.ensure_indexes([]) + memgraph.ensure_constraints([]) + memgraph.drop_database() + + yield memgraph + + module_paths = list(memgraph.execute_and_fetch("CALL mg.get_module_files() YIELD path")) + module_path = [path["path"] for path in module_paths if module_remove_name in path["path"]][0] + list(memgraph.execute_and_fetch(f"CALL mg.delete_module_file('{module_path}') YIELD *")) + memgraph.drop_database() + + +@pytest.fixture(scope="session", autouse=True) +def init(): + models.IGNORE_SUBCLASSNOTFOUNDWARNING = True + + +@pytest.fixture +def configuration(): + return {"--log-level": "TRACE"} + + +@pytest.fixture +def memgraph_instance_docker(): + def _memgraph_instance_docker(config): + return MemgraphInstanceDocker(port=7690, config=config) + + return _memgraph_instance_docker + + +@pytest.fixture +def memgraph_instance_docker_with_config(memgraph_instance_docker, configuration): + instance = memgraph_instance_docker(config=configuration) + yield instance + + instance.stop() + + +@pytest.fixture +def memgraph_instance_docker_without_config(memgraph_instance_docker): + instance = memgraph_instance_docker(config={}) + yield instance + + instance.stop() diff --git a/tests/docs/test_ogm.py b/tests/docs/test_ogm.py index c28e457c..c800ccb0 100644 --- a/tests/docs/test_ogm.py +++ b/tests/docs/test_ogm.py @@ -1,6 +1,22 @@ -from gqlalchemy import Memgraph, Node, Relationship, Field, match +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from typing import Optional +from gqlalchemy import Memgraph, Node, Relationship, Field, match +from gqlalchemy.query_builders.memgraph_query_builder import Operator + db = Memgraph() @@ -65,7 +81,11 @@ def test_node_mapping(self): ).save(db) result = next( - match().node("Streamer", variable="s").where(item="s.id", operator="=", literal="7").return_().execute() + match() + .node("Streamer", variable="s") + .where(item="s.id", operator=Operator.EQUAL, literal="7") + .return_() + .execute() )["s"] assert result.id == streamer.id @@ -113,7 +133,11 @@ def test_node_saving_1(self): language = Language(name="en").save(db) result = next( - match().node("UserSave", variable="u").where(item="u.id", operator="=", literal="3").return_().execute() + match() + .node("UserSave", variable="u") + .where(item="u.id", operator=Operator.EQUAL, literal="3") + .return_() + .execute() )["u"] assert result.id == user.id @@ -131,7 +155,11 @@ def test_node_saving_2(self): db.save_node(language) result = next( - match().node("UserSave", variable="u").where(item="u.id", operator="=", literal="4").return_().execute() + match() + .node("UserSave", variable="u") + .where(item="u.id", operator=Operator.EQUAL, literal="4") + .return_() + .execute() )["u"] assert result.id == user.id diff --git a/tests/docs/test_query_builder.py b/tests/docs/test_query_builder.py index d22bbaaa..a503943f 100644 --- a/tests/docs/test_query_builder.py +++ b/tests/docs/test_query_builder.py @@ -14,8 +14,24 @@ from unittest.mock import patch -from gqlalchemy import match, call, create, merge -from gqlalchemy.memgraph import Memgraph +from gqlalchemy import call, create, match, merge +from gqlalchemy.vendors.memgraph import Memgraph +from gqlalchemy.query_builders.declarative_base import CallPartialQuery, Operator + + +def test_call_procedure_arguments_string(): + call_procedure = CallPartialQuery("dummy.procedure", "'a', 'b'").construct_query() + assert call_procedure == " CALL dummy.procedure('a', 'b') " + + +def test_call_procedure_arguments_tuple(): + call_procedure = CallPartialQuery("dummy.procedure", ("a", "b")).construct_query() + assert call_procedure == " CALL dummy.procedure('a', 'b') " + + +def test_call_procedure_arguments_tuple_string_int(): + call_procedure = CallPartialQuery("dummy.procedure", ("a", 1)).construct_query() + assert call_procedure == " CALL dummy.procedure('a', 1) " def test_call_procedures_1(memgraph): @@ -67,7 +83,10 @@ def test_create_nodes_relationships_2(memgraph): def test_create_nodes_relationships_3(memgraph): query_builder = ( - create().node(labels="Person", name="Leslie").to(edge_label="FRIENDS_WITH").node(labels="Person", name="Ron") + create() + .node(labels="Person", name="Leslie") + .to(relationship_type="FRIENDS_WITH") + .node(labels="Person", name="Ron") ) expected_query = " CREATE (:Person {name: 'Leslie'})-[:FRIENDS_WITH]->(:Person {name: 'Ron'})" @@ -117,8 +136,8 @@ def test_filter_data_1(memgraph): .node("Person", variable="p1") .to("FRIENDS_WITH") .node("Person", variable="p2") - .where(item="n.name", operator="=", literal="Ron") - .or_where(item="m.id", operator="=", literal=0) + .where(item="n.name", operator=Operator.EQUAL, literal="Ron") + .or_where(item="m.id", operator=Operator.EQUAL, literal=0) .return_() ) @@ -154,6 +173,23 @@ def test_return_results_2(memgraph): mock.assert_called_with(expected_query) +def test_return_results_2_new(memgraph): + query_builder = ( + match() + .node(labels="Person", variable="p1") + .to() + .node(labels="Person", variable="p2") + .return_([("p1", "first"), "p2"]) + ) + + expected_query = " MATCH (p1:Person)-[]->(p2:Person) RETURN p1 AS first, p2 " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_results_3(memgraph): query_builder = match().node(labels="Person", variable="p").return_().limit(10) diff --git a/tests/graph_algorithms/__init__.py b/tests/graph_algorithms/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/tests/graph_algorithms/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/graph_algorithms/test_query_builder.py b/tests/graph_algorithms/test_query_builder.py new file mode 100644 index 00000000..19b17703 --- /dev/null +++ b/tests/graph_algorithms/test_query_builder.py @@ -0,0 +1,45 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gqlalchemy import Memgraph +from gqlalchemy.graph_algorithms.query_builder import MemgraphQueryBuilder +from gqlalchemy.query_builders.memgraph_query_builder import QueryBuilder + + +def test_memgraph_query_builder_methods_exist(memgraph: Memgraph): + """ + Tests functionality if all the procedures that are defined + in the Memgraph extended query builder are present in the code. + """ + + mg_qb = MemgraphQueryBuilder() + simple_qb = QueryBuilder() + + mg_qb_methods = set([method_name for method_name in dir(mg_qb) if callable(getattr(mg_qb, method_name))]) + + simple_qb_methods = set( + [method_name for method_name in dir(simple_qb) if callable(getattr(simple_qb, method_name))] + ) + + query_module_names = mg_qb_methods - simple_qb_methods + actual_query_module_names = [procedure.name.replace(".", "_", 1) for procedure in memgraph.get_procedures()] + + print(f"Query module names: {query_module_names}\n\n") + print(f"Actual: {actual_query_module_names}") + + for qm_name in query_module_names: + assert qm_name in actual_query_module_names + + for qm_name in actual_query_module_names: + assert qm_name in query_module_names diff --git a/tests/graph_algorithms/test_query_modules.py b/tests/graph_algorithms/test_query_modules.py new file mode 100644 index 00000000..46b5e418 --- /dev/null +++ b/tests/graph_algorithms/test_query_modules.py @@ -0,0 +1,54 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from gqlalchemy.graph_algorithms.query_modules import QueryModule + + +def test_set_inputs_exception(): + """setting an argument that doesn't exist shouldn't be possible""" + dummy_dict = { + "is_editable": True, + "is_write": False, + "name": "dummy", + "path": "", + "signature": "dummy() :: ()", + "arguments": [{}, {}], + "returns": [{}, {}], + } + + qm = QueryModule(**dummy_dict) + with pytest.raises(KeyError): + qm.set_argument_values(dummy=0) + + +def test_set_and_get_arguments(): + """use QueryModule class to set inputs and return in form for call()""" + color_graph_yield = { + "is_editable": True, + "is_write": False, + "name": "graph_coloring.color_graph", + "path": "/home/user/mage/python/graph_coloring.py", + "signature": 'graph_coloring.color_graph(parameters = {} :: MAP, edge_property = "weight" :: STRING) :: (color :: STRING, node :: STRING)', + "arguments": [ + {"type": "MAP", "name": "parameters", "default": "{}"}, + {"type": "STRING", "name": "edge_property", "default": "weight"}, + ], + "returns": [{"type": "STRING", "name": "color"}, {"type": "STRING", "name": "node"}], + } + + qm = QueryModule(**color_graph_yield) + qm.set_argument_values(edge_property="none") + assert qm.get_arguments_for_call() == '{}, "none"' diff --git a/tests/integration/test_constraints.py b/tests/integration/test_constraints.py index 249c6908..7936018a 100644 --- a/tests/integration/test_constraints.py +++ b/tests/integration/test_constraints.py @@ -14,7 +14,6 @@ from gqlalchemy import Field, MemgraphConstraintExists, MemgraphConstraintUnique, Memgraph, Node - db = Memgraph() diff --git a/tests/integration/test_graph_algorithms.py b/tests/integration/test_graph_algorithms.py new file mode 100644 index 00000000..8d48a6fd --- /dev/null +++ b/tests/integration/test_graph_algorithms.py @@ -0,0 +1,95 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import unittest.mock as mock +from typing import List + +from gqlalchemy.graph_algorithms.query_modules import QueryModule, parse_query_module_signature +from gqlalchemy import Memgraph, QueryBuilder + + +@pytest.mark.parametrize( + "signature, arguments, returns", + [ + ("dummy_module.1(num :: NUMBER) :: ()", [{"name": "num", "type": "NUMBER"}], []), + ( + "dummy_module.2(lst :: LIST OF STRING, num = 3 :: NUMBER) :: (ret :: STRING)", + [{"name": "lst", "type": "LIST OF STRING"}, {"name": "num", "type": "NUMBER", "default": 3}], + [{"name": "ret", "type": "STRING"}], + ), + ], +) +def test_parse_signature(signature: str, arguments: List, returns: List): + """test functionality of parsing a module signature""" + assert arguments, returns == parse_query_module_signature(signature=signature) + + +def test_get_procedures(memgraph: Memgraph): + """test get procedures with mock execute_and_fetch method, so MAGE query + modules are not needed for testing""" + + mock_modules = [ + { + "is_editable": True, + "is_write": False, + "name": "max_flow.get_flow", + "path": "/home/user/mage/python/max_flow.py", + "signature": 'max_flow.get_flow(start_v :: NODE, end_v :: NODE, edge_property = "weight" :: STRING) :: (max_flow :: NUMBER)', + }, + { + "is_editable": True, + "is_write": False, + "name": "max_flow.get_paths", + "path": "/home/user/mage/python/max_flow.py", + "signature": 'max_flow.get_paths(start_v :: NODE, end_v :: NODE, edge_property = "weight" :: STRING) :: (flow :: NUMBER, path :: PATH)', + }, + ] + + mock_execute_and_fetch = mock.Mock() + mock_execute_and_fetch.return_value = mock_modules + + real_execute_and_fetch = memgraph.execute_and_fetch + + def mock_execute_and_fetch_wrapper(query): + if query == "CALL mg.procedures() YIELD *;": + return mock_execute_and_fetch(query) + else: + return real_execute_and_fetch(query) + + memgraph.execute_and_fetch = mock_execute_and_fetch_wrapper + + assert str(memgraph.get_procedures()[0]) == "max_flow.get_flow" + + +def test_query_module_with_query_builder(): + mock_module = { + "is_editable": True, + "is_write": False, + "name": "max_flow.get_flow", + "path": "/home/user/mage/python/max_flow.py", + "signature": 'max_flow.get_flow(start_v :: NODE, end_v :: NODE, edge_property = "weight" :: STRING) :: (max_flow :: NUMBER)', + } + + query_module = QueryModule(**mock_module) + + query_module.set_argument_values(start_v=None, end_v=None) + + query_builder = QueryBuilder().call(procedure=query_module, arguments=query_module.get_arguments_for_call()) + expected_query = ' CALL max_flow.get_flow(None, None, "weight") ' + + with mock.patch.object(Memgraph, "execute", return_value=None) as m: + query_builder.execute() + + m.assert_called_with(expected_query) diff --git a/tests/integration/test_index.py b/tests/integration/test_index.py index d8aff558..0c6c85e9 100644 --- a/tests/integration/test_index.py +++ b/tests/integration/test_index.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gqlalchemy.models import MemgraphIndex -from gqlalchemy import Field, Node import pytest + +from gqlalchemy import Field, Node +from gqlalchemy.models import MemgraphIndex from gqlalchemy.exceptions import GQLAlchemyDatabaseMissingInNodeClassError diff --git a/tests/integration/test_memgraph.py b/tests/integration/test_memgraph.py index 75b4ed83..34aad45e 100644 --- a/tests/integration/test_memgraph.py +++ b/tests/integration/test_memgraph.py @@ -11,9 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import List import pytest +from typing import List + from gqlalchemy import Memgraph, Node, Relationship diff --git a/tests/integration/test_networkx.py b/tests/integration/test_networkx.py index 8e8cc57a..93978c9a 100644 --- a/tests/integration/test_networkx.py +++ b/tests/integration/test_networkx.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from random import randint import networkx as nx -import pytest + from gqlalchemy import Memgraph from gqlalchemy.models import MemgraphIndex from gqlalchemy.transformations import nx_graph_to_memgraph_parallel, nx_to_cypher diff --git a/tests/integration/test_stream.py b/tests/integration/test_stream.py index 30fd75a4..98f476fb 100644 --- a/tests/integration/test_stream.py +++ b/tests/integration/test_stream.py @@ -11,8 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import pytest + from gqlalchemy import MemgraphKafkaStream, MemgraphPulsarStream, Memgraph +from gqlalchemy.exceptions import GQLAlchemyError def stream_exists(stream: str, memgraph: Memgraph) -> bool: @@ -22,25 +25,25 @@ def stream_exists(stream: str, memgraph: Memgraph) -> bool: def test_create_kafka_stream(memgraph): kafka_stream = MemgraphKafkaStream(name="test_stream", topics=["topic"], transform="kafka_stream.transform") - with pytest.raises(Exception) as e_info: + with pytest.raises(GQLAlchemyError) as e_info: memgraph.create_stream(kafka_stream) - assert "Local: Broker transport failure" in str(e_info.value) + assert "Local: Broker transport failure" in str(e_info.value.message) def test_create_pulsar_stream(memgraph): pulsar_stream = MemgraphPulsarStream(name="test_stream", topics=["topic"], transform="pulsar_stream.transform") - with pytest.raises(Exception) as e_info: + with pytest.raises(GQLAlchemyError) as e_info: memgraph.create_stream(pulsar_stream) - assert "Pulsar consumer test_stream : ConnectError" in str(e_info.value) + assert "Pulsar consumer test_stream : ConnectError" in str(e_info.value.message) def test_drop_pulsar_stream(memgraph): pulsar_stream = MemgraphPulsarStream(name="test_stream", topics=["topic"], transform="pulsar_stream.transform") - with pytest.raises(Exception) as e_info: + with pytest.raises(GQLAlchemyError) as e_info: memgraph.create_stream(pulsar_stream) - assert "Pulsar consumer test_stream : ConnectError" in str(e_info.value) + assert "Pulsar consumer test_stream : ConnectError" in str(e_info.value.message) def test_create_kafka_stream_cypher(): @@ -58,17 +61,17 @@ def test_create_pulsar_stream_cypher(): def test_start_kafka_stream(memgraph): kafka_stream = MemgraphKafkaStream(name="test_stream", topics=["topic"], transform="kafka_stream.transform") - with pytest.raises(Exception) as e_info: + with pytest.raises(GQLAlchemyError) as e_info: memgraph.start_stream(kafka_stream) - assert "Couldn't find stream 'test_stream'" in str(e_info.value) + assert "Couldn't find stream 'test_stream'" in str(e_info.value.message) def test_start_pulsar_stream(memgraph): pulsar_stream = MemgraphPulsarStream(name="test_stream", topics=["topic"], transform="pulsar_stream.transform") - with pytest.raises(Exception) as e_info: + with pytest.raises(GQLAlchemyError) as e_info: memgraph.start_stream(pulsar_stream) - assert "Couldn't find stream 'test_stream'" in str(e_info.value) + assert "Couldn't find stream 'test_stream'" in str(e_info.value.message) def test_kafka_stream_extended_cypher(): diff --git a/tests/integration/test_trigger.py b/tests/integration/test_trigger.py index d82664bd..4b10b714 100644 --- a/tests/integration/test_trigger.py +++ b/tests/integration/test_trigger.py @@ -11,7 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import pytest + from gqlalchemy import Memgraph, MemgraphTrigger from gqlalchemy.models import TriggerEventObject, TriggerEventType, TriggerExecutionPhase diff --git a/tests/memgraph/test_add_query_modules.py b/tests/memgraph/test_add_query_modules.py new file mode 100644 index 00000000..91cd46af --- /dev/null +++ b/tests/memgraph/test_add_query_modules.py @@ -0,0 +1,43 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from gqlalchemy import Memgraph +from gqlalchemy.exceptions import GQLAlchemyFileNotFoundError + + +@pytest.mark.parametrize( + "file_path, module_name, module_remove_name", + [ + ("gqlalchemy/query_modules/push_streams/kafka.py", "kafka.py", "kafka.py"), + ], +) +def test_add_query_module_valid(file_path, module_name, remove_module_memgraph): + memgraph = remove_module_memgraph.add_query_module(file_path=file_path, module_name=module_name) + + module_paths = list(memgraph.execute_and_fetch("CALL mg.get_module_files() YIELD path")) + + assert any("kafka" in path["path"] for path in module_paths) + + +@pytest.mark.parametrize( + "file_path, module_name", + [ + ("path_doesnt_exists", "fake"), + ], +) +def test_add_query_module_invalid(file_path, module_name): + with pytest.raises(GQLAlchemyFileNotFoundError): + Memgraph().add_query_module(file_path=file_path, module_name=module_name) diff --git a/tests/memgraph/test_memgraph.py b/tests/memgraph/test_memgraph.py new file mode 100644 index 00000000..ea842239 --- /dev/null +++ b/tests/memgraph/test_memgraph.py @@ -0,0 +1,25 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gqlalchemy.vendors.memgraph import Memgraph + + +def test_argument_lazy_default(): + memgraph = Memgraph() + assert memgraph._lazy is False + + +def test_argument_lazy_false(): + memgraph = Memgraph(lazy=True) + assert memgraph._lazy is True diff --git a/tests/memgraph/test_query_builder.py b/tests/memgraph/test_query_builder.py deleted file mode 100644 index e21d0ebf..00000000 --- a/tests/memgraph/test_query_builder.py +++ /dev/null @@ -1,1342 +0,0 @@ -# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from gqlalchemy.exceptions import ( - GQLAlchemyLiteralAndExpressionMissingInWhere, - GQLAlchemyExtraKeywordArgumentsInWhere, -) -import pytest -from gqlalchemy import ( - InvalidMatchChainException, - QueryBuilder, - match, - call, - unwind, - with_, - merge, - Node, - Relationship, - Field, -) -from gqlalchemy.memgraph import Memgraph -from typing import Optional -from unittest.mock import patch -from gqlalchemy.exceptions import GQLAlchemyMissingOrder, GQLAlchemyOrderByTypeError -from gqlalchemy.query_builder import Order - - -def test_invalid_match_chain_throws_exception(): - with pytest.raises(InvalidMatchChainException): - QueryBuilder().node(":Label", "n").node(":Label", "m").return_() - - -def test_simple_create(memgraph): - query_builder = QueryBuilder().create().node("L1", variable="n").to("TO").node("L2").return_() - expected_query = " CREATE (n:L1)-[:TO]->(:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_simple_match(memgraph): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2").return_() - expected_query = " MATCH (n:L1)-[:TO]->(:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_simple_with_multiple_labels(memgraph): - query_builder = ( - QueryBuilder().match().node(["L1", "L2", "L3"], variable="n").to("TO").node("L2", variable="m").return_() - ) - expected_query = " MATCH (n:L1:L2:L3)-[:TO]->(m:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_multiple_matches(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node("L1", variable="n") - .to("TO") - .node("L2", variable="m") - .match(True) - .node(variable="n") - .to("TO") - .node("L3") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) OPTIONAL MATCH (n)-[:TO]->(:L3) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_with_empty(memgraph): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").with_() - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WITH * " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_with(memgraph): - query_builder = QueryBuilder().match().node(variable="n").with_({"n": ""}) - expected_query = " MATCH (n) WITH n " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_union(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n1", labels="Node1") - .return_({"n1": ""}) - .union(include_duplicates=False) - .match() - .node(variable="n2", labels="Node2") - .return_({"n2": ""}) - ) - expected_query = " MATCH (n1:Node1) RETURN n1 UNION MATCH (n2:Node2) RETURN n2 " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_union_all(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n1", labels="Node1") - .return_({"n1": ""}) - .union() - .match() - .node(variable="n2", labels="Node2") - .return_({"n2": ""}) - ) - expected_query = " MATCH (n1:Node1) RETURN n1 UNION ALL MATCH (n2:Node2) RETURN n2 " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_delete(memgraph): - query_builder = QueryBuilder().match().node(variable="n1", labels="Node1").delete({"n1"}) - expected_query = " MATCH (n1:Node1) DELETE n1 " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - - -def test_simple_merge(memgraph): - query_builder = merge().node("L1", variable="n").to("TO").node("L2") - expected_query = " MERGE (n:L1)-[:TO]->(:L2)" - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_base_merge(memgraph): - query_builder = QueryBuilder().merge().node("L1", variable="n").to("TO").node("L2").return_() - expected_query = " MERGE (n:L1)-[:TO]->(:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_simple_create_with_variables(memgraph): - query_builder = ( - QueryBuilder().create().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - ) - expected_query = " CREATE (n:L1)-[e:TO]->(m:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_simple_match_with_variables(memgraph): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - ) - expected_query = " MATCH (n:L1)-[e:TO]->(m:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_simple_merge_with_variables(memgraph): - query_builder = merge().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - expected_query = " MERGE (n:L1)-[e:TO]->(m:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_base_merge_with_variables(memgraph): - query_builder = ( - QueryBuilder().merge().node("L1", variable="n").to("TO", variable="e").node("L2", variable="m").return_() - ) - expected_query = " MERGE (n:L1)-[e:TO]->(m:L2) RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_delete_detach(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n1", labels="Node1") - .to(edge_label="EDGE") - .node(variable="n2", labels="Node2") - .delete(["n1", "n2"], True) - ) - expected_query = " MATCH (n1:Node1)-[:EDGE]->(n2:Node2) DETACH DELETE n1, n2 " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_remove_property(memgraph): - query_builder = QueryBuilder().match().node(variable="n", labels="Node").remove({"n.name"}) - expected_query = " MATCH (n:Node) REMOVE n.name " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_multiple_merges(memgraph): - query_builder = ( - QueryBuilder() - .merge() - .node("L1", variable="n") - .to("TO") - .node("L2", variable="m") - .merge() - .node(variable="n") - .to("TO") - .node("L3") - .return_() - ) - expected_query = " MERGE (n:L1)-[:TO]->(m:L2) MERGE (n)-[:TO]->(:L3) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_load_csv_with_header(memgraph): - query_builder = QueryBuilder().load_csv("path/to/my/file.csv", True, "row").return_() - expected_query = " LOAD CSV FROM 'path/to/my/file.csv' WITH HEADER AS row RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - - -def test_load_csv_no_header(memgraph): - query_builder = QueryBuilder().load_csv("path/to/my/file.csv", False, "row").return_() - expected_query = " LOAD CSV FROM 'path/to/my/file.csv' NO HEADER AS row RETURN * " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - mock.assert_called_with(expected_query) - - -def test_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node("L1", variable="n") - .to("TO") - .node("L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_where_not_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where_not(item="n.name", operator="=", expression="m.name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE NOT n.name = m.name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_where_not_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where_not(item="n", operator=":", expression="Node") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE NOT n:Node RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=") - .return_() - ) - - -def test_where_not_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where_not(item="n.name", operator="=") - .return_() - ) - - -def test_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_where_not_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where_not(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_or_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .or_where(item="m.id", operator="<", literal=4) - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_or_not_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .or_not_where(item="m.id", operator="<", literal=4) - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR NOT m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_or_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .or_where(item="m.name", operator="=", expression="n.last_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name OR m.name = n.last_name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_or_not_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .or_not_where(item="m.name", operator="=", expression="n.last_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name OR NOT m.name = n.last_name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_or_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .or_where(item="m", operator=":", expression="User") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node OR m:User RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_or_not_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .or_not_where(item="m", operator=":", expression="User") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node OR NOT m:User RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_or_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="my_name") - .or_where(item="m.name", operator="=") - .return_() - ) - - -def test_or_not_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="my_name") - .or_not_where(item="m.name", operator="=") - .return_() - ) - - -def test_or_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="m.name", operator="=", literal="best_name") - .or_where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_or_not_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="m.name", operator="=", literal="best_name") - .or_not_where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_and_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .and_where(item="m.id", operator="<", literal=4) - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' AND m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_and_not_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .and_not_where(item="m.id", operator="<", literal=4) - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' AND NOT m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_and_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .and_where(item="m.name", operator="=", expression="n.last_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name AND m.name = n.last_name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_and_not_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .and_not_where(item="m.name", operator="=", expression="n.last_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name AND NOT m.name = n.last_name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_and_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .and_where(item="m", operator=":", expression="User") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND m:User RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_and_not_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node("L2", variable="m") - .where(item="n", operator=":", expression="Node") - .and_not_where(item="m", operator=":", expression="User") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND NOT m:User RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_and_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="my_name") - .and_where(item="m.name", operator="=") - .return_() - ) - - -def test_and_not_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="my_name") - .and_not_where(item="m.name", operator="=") - .return_() - ) - - -def test_and_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="m.name", operator="=", literal="best_name") - .and_where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_and_not_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="m.name", operator="=", literal="best_name") - .and_not_where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_xor_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .xor_where(item="m.id", operator="<", literal=4) - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' XOR m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_xor_not_where_literal(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="best_name") - .xor_not_where(item="m.id", operator="<", literal=4) - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' XOR NOT m.id < 4 RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_xor_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .xor_where(item="m.name", operator="=", expression="n.last_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name XOR m.name = n.last_name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_xor_not_where_property(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", expression="m.name") - .xor_not_where(item="m.name", operator="=", expression="n.last_name") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name XOR NOT m.name = n.last_name RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_xor_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .xor_where(item="m", operator=":", expression="User") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node XOR m:User RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_xor_not_where_label(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .xor_not_where(item="m", operator=":", expression="User") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node XOR NOT m:User RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_xor_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="my_name") - .xor_where(item="m.name", operator="=") - .return_() - ) - - -def test_xor_not_where_literal_and_expression_missing(memgraph): - with pytest.raises(GQLAlchemyLiteralAndExpressionMissingInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n.name", operator="=", literal="my_name") - .xor_not_where(item="m.name", operator="=") - .return_() - ) - - -def test_xor_and_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="m.name", operator="=", literal="best_name") - .xor_where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_xor_not_and_where_extra_values(memgraph): - with pytest.raises(GQLAlchemyExtraKeywordArgumentsInWhere): - ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="m.name", operator="=", literal="best_name") - .xor_not_where(item="n.name", operator="=", literal="best_name", expression="Node") - .return_() - ) - - -def test_and_or_xor_not_where(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(labels="L1", variable="n") - .to(edge_label="TO") - .node(labels="L2", variable="m") - .where(item="n", operator=":", expression="Node") - .and_where(item="n.age", operator=">", literal=5) - .or_where(item="n", operator=":", expression="Node2") - .xor_where(item="n.name", operator="=", expression="m.name") - .xor_not_where(item="m", operator=":", expression="User") - .or_not_where(item="m", operator=":", expression="Node") - .and_not_where(item="m.name", operator="=", literal="John") - .return_() - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND n.age > 5 OR n:Node2 XOR n.name = m.name XOR NOT m:User OR NOT m:Node AND NOT m.name = 'John' RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_get_single(memgraph): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"n": ""}) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN n " - - with patch.object(Memgraph, "execute_and_fetch", return_value=iter([{"n": None}])) as mock: - query_builder.get_single(retrieve="n") - - mock.assert_called_with(expected_query) - - -def test_return_empty(memgraph): - query_builder = QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_() - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_return_alias(memgraph): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": "first"}) - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_return_alias_same_as_variable(memgraph): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": "L1"}) - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_return_alias_empty(memgraph): - query_builder = ( - QueryBuilder().match().node("L1", variable="n").to("TO").node("L2", variable="m").return_({"L1": ""}) - ) - expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_call_procedure_pagerank(memgraph): - query_builder = ( - QueryBuilder() - .call(procedure="pagerank.get") - .yield_({"node": "", "rank": ""}) - .return_({"node": "node", "rank": "rank"}) - ) - expected_query = " CALL pagerank.get() YIELD node, rank RETURN node, rank " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_call_procedure_node2vec(memgraph): - query_builder = QueryBuilder().call(procedure="node2vec_online.get_embeddings", arguments="False, 2.0, 0.5") - expected_query = " CALL node2vec_online.get_embeddings(False, 2.0, 0.5) " - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_call_procedure_nxalg_betweenness_centrality(memgraph): - query_builder = ( - QueryBuilder() - .call(procedure="nxalg.betweenness_centrality", arguments="20, True") - .yield_() - .return_({"node": "", "betweenness": ""}) - ) - expected_query = " CALL nxalg.betweenness_centrality(20, True) YIELD * RETURN node, betweenness " - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_unwind(memgraph): - query_builder = ( - QueryBuilder().unwind(list_expression="[1, 2, 3, null]", variable="x").return_({"x": "", "'val'": "y"}) - ) - expected_query = " UNWIND [1, 2, 3, null] AS x RETURN x, 'val' AS y " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_remove_label(memgraph): - query_builder = QueryBuilder().match().node(variable="n", labels=["Node1", "Node2"]).remove({"n:Node2"}) - expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2 " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_remove_property_and_label(memgraph): - query_builder = QueryBuilder().match().node(variable="n", labels=["Node1", "Node2"]).remove(["n:Node2", "n.name"]) - expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2, n.name " - - with patch.object(Memgraph, "execute", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_order_by(memgraph): - query_builder = QueryBuilder().match().node(variable="n").return_().order_by(properties="n.id") - expected_query = " MATCH (n) RETURN * ORDER BY n.id " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_order_by_desc(memgraph): - query_builder = QueryBuilder().match().node(variable="n").return_().order_by(properties=("n.id", Order.DESC)) - expected_query = " MATCH (n) RETURN * ORDER BY n.id DESC " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_order_by_asc(memgraph): - query_builder = QueryBuilder().match().node(variable="n").return_().order_by(properties=("n.id", Order.ASC)) - expected_query = " MATCH (n) RETURN * ORDER BY n.id ASC " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_order_by_wrong_ordering(memgraph): - with pytest.raises(GQLAlchemyMissingOrder): - QueryBuilder().match().node(variable="n").return_().order_by(properties=("n.id", "DESCE")) - - -def test_order_by_wrong_type(memgraph): - with pytest.raises(GQLAlchemyOrderByTypeError): - QueryBuilder().match().node(variable="n").return_().order_by(properties=1) - - -def test_order_by_properties(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n") - .return_() - .order_by(properties=[("n.id", Order.DESC), "n.name", ("n.last_name", Order.DESC)]) - ) - expected_query = " MATCH (n) RETURN * ORDER BY n.id DESC, n.name, n.last_name DESC " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_order_by_asc_desc(memgraph): - query_builder = ( - QueryBuilder() - .match() - .node(variable="n") - .return_() - .order_by( - properties=[ - ("n.id", Order.ASC), - "n.name", - ("n.last_name", Order.DESC), - ("n.age", Order.ASCENDING), - ("n.middle_name", Order.DESCENDING), - ] - ) - ) - expected_query = ( - " MATCH (n) RETURN * ORDER BY n.id ASC, n.name, n.last_name DESC, n.age ASCENDING, n.middle_name DESCENDING " - ) - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_limit(memgraph): - query_builder = QueryBuilder().match().node(variable="n").return_().limit("3") - expected_query = " MATCH (n) RETURN * LIMIT 3 " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_skip(memgraph): - query_builder = QueryBuilder().match().node(variable="n").return_({"n": ""}).skip("1") - expected_query = " MATCH (n) RETURN n SKIP 1 " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_base_class_match(memgraph): - query_builder = match().node(variable="n").return_({"n": ""}) - expected_query = " MATCH (n) RETURN n " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_base_class_call(memgraph): - query_builder = call("pagerank.get").yield_().return_() - expected_query = " CALL pagerank.get() YIELD * RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_base_class_unwind(memgraph): - query_builder = unwind("[1, 2, 3]", "x").return_({"x": "x"}) - expected_query = " UNWIND [1, 2, 3] AS x RETURN x " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_base_class_with(memgraph): - query_builder = with_({"10": "n"}).return_({"n": ""}) - expected_query = " WITH 10 AS n RETURN n " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_from(memgraph): - query_builder = match().node("L1", variable="n").from_("TO", variable="e").node("L2", variable="m").return_() - expected_query = " MATCH (n:L1)<-[e:TO]-(m:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_add_string_partial(memgraph): - query_builder = match().node("Node1", variable="n").to("TO", variable="e").add_custom_cypher("(m:L2) ").return_() - expected_query = " MATCH (n:Node1)-[e:TO]->(m:L2) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_add_string_complete(memgraph): - query_builder = QueryBuilder().add_custom_cypher("MATCH (n) RETURN n") - expected_query = "MATCH (n) RETURN n" - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_node_instance(memgraph): - class User(Node): - name: Optional[str] = Field(index=True, unique=True, db=memgraph) - - user = User(name="Ron").save(memgraph) - query_builder = QueryBuilder().match().node(node=user, variable="u").return_() - expected_query = " MATCH (u:User {name: 'Ron'}) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_unsaved_node_instance(memgraph): - class User(Node): - name: Optional[str] = Field(index=True, unique=True, db=memgraph) - - user = User(name="Ron") - query_builder = QueryBuilder().match().node(node=user, variable="u").return_() - expected_query = " MATCH (u:User {name: 'Ron'}) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_node_relationship_instances(memgraph): - class User(Node): - name: Optional[str] = Field(index=True, unique=True, db=memgraph) - - class Follows_test(Relationship, type="FOLLOWS"): - pass - - user_1 = User(name="Ron").save(memgraph) - user_2 = User(name="Leslie").save(memgraph) - follows = Follows_test(_start_node_id=user_1._id, _end_node_id=user_2._id).save(memgraph) - query_builder = ( - QueryBuilder() - .match() - .node(node=user_1, variable="user_1") - .to(relationship=follows) - .node(node=user_2, variable="user_2") - .return_() - ) - expected_query = " MATCH (user_1:User {name: 'Ron'})-[:FOLLOWS]->(user_2:User {name: 'Leslie'}) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) - - -def test_unsaved_node_relationship_instances(memgraph): - class User(Node): - name: Optional[str] = Field(index=True, unique=True, db=memgraph) - - class Follows_test(Relationship, type="FOLLOWS"): - pass - - user_1 = User(name="Ron") - user_2 = User(name="Leslie") - follows = Follows_test() - query_builder = ( - QueryBuilder() - .match() - .node(node=user_1, variable="user_1") - .to(relationship=follows) - .node(node=user_2, variable="user_2") - .return_() - ) - expected_query = " MATCH (user_1:User {name: 'Ron'})-[:FOLLOWS]->(user_2:User {name: 'Leslie'}) RETURN * " - - with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: - query_builder.execute() - - mock.assert_called_with(expected_query) diff --git a/tests/ogm/__init__.py b/tests/ogm/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/tests/ogm/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/ogm/test_automatic_deserialisation.py b/tests/ogm/test_automatic_deserialisation.py index 7c386422..7ad34791 100644 --- a/tests/ogm/test_automatic_deserialisation.py +++ b/tests/ogm/test_automatic_deserialisation.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from typing import Optional -from gqlalchemy import Node, Relationship, Path + +from gqlalchemy import Node, Path, Relationship from gqlalchemy.models import GraphObject @@ -45,7 +47,8 @@ def test_dictionary_deserialisation(): pass -def test_automatic_deserialisation_from_database(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_automatic_deserialisation_from_database(database): class Person(Node): id: Optional[int] name: Optional[str] @@ -57,11 +60,11 @@ class Alice(Node): class Friends(Relationship, type="FRIENDS"): pass - memgraph.execute("create (:Person {id: 1, name: 'person'});") - memgraph.execute("create (:Alice {id: 8, name: 'alice'});") - memgraph.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") + database.execute("create (:Person {id: 1, name: 'person'});") + database.execute("create (:Alice {id: 8, name: 'alice'});") + database.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") - result = list(memgraph.execute_and_fetch("match (a)-[r]->(b) return a, r, b")) + result = list(database.execute_and_fetch("match (a)-[r]->(b) return a, r, b")) for node in result: a = node["a"] assert isinstance(a, Alice) @@ -73,6 +76,7 @@ class Friends(Relationship, type="FRIENDS"): assert isinstance(a._id, int) r = node["r"] + print(f"r: {r}") assert isinstance(r, Friends) assert r._type == "FRIENDS" assert isinstance(r._id, int) @@ -91,11 +95,12 @@ class Friends(Relationship, type="FRIENDS"): assert isinstance(b._id, int) -def test_path_deserialisation(memgraph): - memgraph.execute("create (:Person {id: 1, name: 'person'});") - memgraph.execute("create (:Alice {id: 8, name: 'alice'});") - memgraph.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") - result = list(memgraph.execute_and_fetch("MATCH p = ()-[*1]-() RETURN p")) +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_path_deserialisation(database): + database.execute("create (:Person {id: 1, name: 'person'});") + database.execute("create (:Alice {id: 8, name: 'alice'});") + database.execute("match (a:Alice) match(b:Person) create (a)-[:FRIENDS]->(b);") + result = list(database.execute_and_fetch("MATCH p = ()-[*1]-() RETURN p")) path = result[0]["p"] assert isinstance(path, Path) assert len(path._nodes) == 2 diff --git a/tests/ogm/test_class_definition.py b/tests/ogm/test_class_definition.py index b19b189e..f781bad5 100644 --- a/tests/ogm/test_class_definition.py +++ b/tests/ogm/test_class_definition.py @@ -1,13 +1,31 @@ -from gqlalchemy import Node, Field +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import pytest from typing import Optional +from gqlalchemy import Node, Field + -def test_node(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node(database): class User(Node): - id: int = Field(index=True, exists=True, unique=True, db=memgraph) - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: int = Field(index=True, db=database) + name: str = Field(unique=True, db=database) - user = User(id=0, name="Kate").save(memgraph) + user = User(id=0, name="Kate").save(database) assert User.label == "User" assert User.labels == {"User"} @@ -19,16 +37,17 @@ class User(Node): assert user.name == "Kate" -def test_node_inheritance(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node_inheritance(database): class User(Node): - id: int = Field(index=True, exists=True, unique=True, db=memgraph) - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: int = Field(index=True, db=database) + name: str = Field(unique=True, db=database) class Admin(User): - admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + admin_id: int = Field(index=True, db=database) - user = User(id=0, name="Kate").save(memgraph) - admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + user = User(id=0, name="Kate").save(database) + admin = Admin(id=1, admin_id=0, name="Admin").save(database) assert User.label == "User" assert User.labels == {"User"} @@ -55,16 +74,17 @@ class Admin(User): assert admin._labels == {"Admin", "User"} -def test_node_custom_label(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node_custom_label(database): class User(Node, label="UserX"): - id: int = Field(index=True, exists=True, unique=True, db=memgraph) - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: int = Field(index=True, db=database) + name: str = Field(unique=True, db=database) class Admin(User, label="AdminX"): - admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + admin_id: int = Field(index=True, db=database) - user = User(id=0, name="Kate").save(memgraph) - admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + user = User(id=0, name="Kate").save(database) + admin = Admin(id=1, admin_id=0, name="Admin").save(database) assert User.label == "UserX" assert User.labels == {"UserX"} @@ -83,15 +103,16 @@ class Admin(User, label="AdminX"): assert admin._labels == {"AdminX", "UserX"} -def test_node_custom_labels(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node_custom_labels(database): class User(Node, labels={"UserX", "UserY"}): - id: int = Field(index=True, exists=True, unique=True, db=memgraph) - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: int = Field(index=True, db=database) + name: str = Field(unique=True, db=database) class Admin(User, label="AdminX", labels={"AdminX", "AdminY"}): - admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + admin_id: int = Field(index=True, db=database) - admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + admin = Admin(id=1, admin_id=0, name="Admin").save(database) assert User.label == "User" assert User.labels == {"User", "UserX", "UserY"} @@ -105,34 +126,35 @@ class Admin(User, label="AdminX", labels={"AdminX", "AdminY"}): assert admin._labels == {"AdminX", "AdminY", "User", "UserX", "UserY"} -def test_node_various_inheritance(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node_various_inheritance(database): class User(Node): - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, db=database) class UserOne(Node, label="User1"): - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, db=database) class UserTwo(User, label="User2", labels={"User3"}): - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(index=True, db=database) class Streamer(User): - id: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: str = Field(index=True, db=database) followers: Optional[int] = Field() class StreamerOne(User, label="Streamer1"): - id: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: str = Field(index=True, db=database) followers: Optional[int] = Field() class StreamerTwo(Streamer, label="Streamer2", labels={"Streamer3", "Streamer4"}): - id: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: str = Field(index=True, db=database) followers: Optional[int] = Field() - user = User(name="Kate").save(memgraph) - userOne = UserOne(name="Mrma").save(memgraph) - userTwo = UserTwo(name="Boris").save(memgraph) - streamer = Streamer(id=7, name="Ivan", followers=172).save(memgraph) - streamerOne = StreamerOne(id=8, name="Bruno", followers=173).save(memgraph) - streamerTwo = StreamerTwo(id=9, name="Marko", followers=174).save(memgraph) + user = User(name="Kate").save(database) + userOne = UserOne(name="Mrma").save(database) + userTwo = UserTwo(name="Boris").save(database) + streamer = Streamer(id=7, name="Ivan", followers=172).save(database) + streamerOne = StreamerOne(id=8, name="Bruno", followers=173).save(database) + streamerTwo = StreamerTwo(id=9, name="Marko", followers=174).save(database) assert "name" in Streamer.__fields__ assert user.name == "Kate" @@ -194,10 +216,11 @@ class StreamerTwo(Streamer, label="Streamer2", labels={"Streamer3", "Streamer4"} } -def test_node_multiple_inheritence(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node_multiple_inheritence(database): class User(Node, labels={"UserX"}): - id: int = Field(index=True, exists=True, unique=True, db=memgraph) - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + id: int = Field(index=True, db=database) + name: str = Field(index=True, db=database) class UserOne(Node, labels={"UserOneX"}): pass @@ -206,9 +229,9 @@ class UserTwo(Node, label="UserTwoX"): pass class Admin(UserOne, UserTwo, User, label="AdminX", labels={"AdminX", "AdminY"}): - admin_id: int = Field(index=True, exists=True, unique=True, db=memgraph) + admin_id: int = Field(index=True, db=database) - admin = Admin(id=1, admin_id=0, name="Admin").save(memgraph) + admin = Admin(id=1, admin_id=0, name="Admin").save(database) assert UserOne.label == "UserOne" assert UserTwo.label == "UserTwoX" diff --git a/tests/ogm/test_custom_fields.py b/tests/ogm/test_custom_fields.py index c415ae15..498ab4d6 100644 --- a/tests/ogm/test_custom_fields.py +++ b/tests/ogm/test_custom_fields.py @@ -11,13 +11,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from pydantic import Field + from gqlalchemy import ( MemgraphConstraintExists, MemgraphConstraintUnique, MemgraphIndex, + Neo4jConstraintUnique, + Neo4jIndex, Node, ) -from pydantic import Field def test_create_constraint_exist(memgraph): @@ -51,3 +54,24 @@ def test_create_index(memgraph): actual_constraints = memgraph.get_indexes() assert actual_constraints == [memgraph_index] + + +def test_create_constraint_unique_neo4j(neo4j): + class Node2(Node): + id: int = Field(db=neo4j) + + neo4j_constraint = Neo4jConstraintUnique("Node2", ("id",)) + + neo4j.create_constraint(neo4j_constraint) + actual_constraints = neo4j.get_constraints() + + assert actual_constraints == [neo4j_constraint] + + +def test_create_index_neo4j(neo4j): + neo4j_index = Neo4jIndex(label="Node2", property="id", type="BTREE", uniqueness="NONUNIQUE") + + neo4j.create_index(neo4j_index) + actual_constraints = neo4j.get_indexes() + + assert neo4j_index in actual_constraints diff --git a/tests/ogm/test_load_node.py b/tests/ogm/test_load_node.py index b5ac005c..31c5a80b 100644 --- a/tests/ogm/test_load_node.py +++ b/tests/ogm/test_load_node.py @@ -1,18 +1,34 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + from gqlalchemy import Node, Field -def test_load_node(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_load_node(database): class User(Node): - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(unique=True, db=database) class Streamer(User): - name: str = Field(index=True, unique=True, db=memgraph) - id: str = Field(index=True, unique=True, db=memgraph) + name: str = Field(unique=True, db=database) + id: str = Field(index=True, db=database) followers: int = Field() totalViewCount: int = Field() - streamer = Streamer(name="Mislav", id="7", followers=777, totalViewCount=7777).save(memgraph) - loaded_streamer = memgraph.load_node(streamer) + streamer = Streamer(name="Mislav", id="7", followers=777, totalViewCount=7777).save(database) + loaded_streamer = database.load_node(streamer) assert loaded_streamer.name == "Mislav" assert loaded_streamer.id == "7" assert loaded_streamer.followers == 777 diff --git a/tests/ogm/test_loading.py b/tests/ogm/test_loading.py index 4e2ae107..cbb7b20e 100644 --- a/tests/ogm/test_loading.py +++ b/tests/ogm/test_loading.py @@ -12,34 +12,38 @@ # limitations under the License. import pytest -from gqlalchemy import Node + from pydantic import ValidationError +from gqlalchemy import Node + -def test_partial_loading(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_partial_loading(database): class User(Node): id: int name: str = None - User(id=1, name="Jane").save(memgraph) + User(id=1, name="Jane").save(database) with pytest.raises(ValidationError): - memgraph.load_node(User(name="Jane")) + database.load_node(User(name="Jane")) - user_by_id = memgraph.load_node(User(id=1)) + user_by_id = database.load_node(User(id=1)) assert user_by_id.id == 1 assert user_by_id.name == "Jane" assert user_by_id._label == "User" -def test_node_loading(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_node_loading(database): class User(Node): id: int name: str - User(id=1, name="Jane").save(memgraph) - user_by_name = memgraph.load_node(User(id=1, name="Jane")) + User(id=1, name="Jane").save(database) + user_by_name = database.load_node(User(id=1, name="Jane")) assert user_by_name.id == 1 assert user_by_name.name == "Jane" diff --git a/tests/ogm/test_multiple_inheritance.py b/tests/ogm/test_multiple_inheritance.py index 8b48e5e7..f8a1ec33 100644 --- a/tests/ogm/test_multiple_inheritance.py +++ b/tests/ogm/test_multiple_inheritance.py @@ -1,17 +1,33 @@ -from gqlalchemy import Node, Field +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest from typing import Optional +from gqlalchemy import Node, Field + -def test_multiple_inheritance(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_multiple_inheritance(database): class User(Node): - name: Optional[str] = Field(index=True, unique=True, db=memgraph) + name: Optional[str] = Field(unique=True, db=database) class Streamer(User): - id: Optional[str] = Field(index=True, unique=True, db=memgraph) - name: Optional[str] = Field(index=True, unique=True, db=memgraph, label="User") + id: Optional[str] = Field(unique=True, db=database) + name: Optional[str] = Field(unique=True, db=database) - user = User(name="Ivan").save(memgraph) - streamer = Streamer(id=7, name="Pero").save(memgraph) + user = User(name="Ivan").save(database) + streamer = Streamer(id=7, name="Pero").save(database) assert User.labels == {"User"} assert Streamer.labels == {"Streamer", "User"} assert user._labels == {"User"} diff --git a/tests/ogm/test_properties.py b/tests/ogm/test_properties.py index fd9b5e01..f979d419 100644 --- a/tests/ogm/test_properties.py +++ b/tests/ogm/test_properties.py @@ -11,21 +11,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from gqlalchemy import Node -def test_properties(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_properties(database): class User(Node): id: int last_name: str _name: str _age: int - user = User(id=1, last_name="Smith", _name="Jane").save(memgraph) - User(id=2, last_name="Scott").save(memgraph) - loaded_user = memgraph.load_node(user) + user = User(id=1, last_name="Smith", _name="Jane").save(database) + User(id=2, last_name="Scott").save(database) + loaded_user = database.load_node(user) loaded_user._age = 24 - loaded_user2 = memgraph.load_node(User(id=2, last_name="Scott")) + loaded_user2 = database.load_node(User(id=2, last_name="Scott")) assert type(loaded_user) is User assert type(loaded_user2) is User diff --git a/tests/ogm/test_serialisation.py b/tests/ogm/test_serialisation.py index dd46f6ea..d53845d1 100644 --- a/tests/ogm/test_serialisation.py +++ b/tests/ogm/test_serialisation.py @@ -11,40 +11,46 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from typing import Optional -from gqlalchemy import Node, Relationship, Field + +from gqlalchemy import Field, Node, Relationship -def test_save_node(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_save_node(database): class SimpleNode(Node): id: Optional[int] = Field() name: Optional[str] = Field() node1 = SimpleNode(id=1, name="First Simple Node") assert node1._id is None - node1.save(memgraph) + node1.save(database) assert node1._id is not None node2 = SimpleNode(id=1) - node2.save(memgraph) + node2.save(database) assert node1._id != node2._id -def test_save_node2(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_save_node2(database): class NodeWithKey(Node): - id: int = Field(exists=True, unique=True, index=True, db=memgraph) + id: int = Field(unique=True, db=database) name: Optional[str] = Field() node1 = NodeWithKey(id=1, name="First NodeWithKey") assert node1._id is None - node1.save(memgraph) + node1.save(database) assert node1._id is not None node2 = NodeWithKey(id=1) - node2.save(memgraph) + node2.save(database) assert node1._id == node2._id assert node1.name == node2.name -def test_save_nodes(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_save_nodes(database): class SimpleNode(Node): id: Optional[int] = Field() name: Optional[str] = Field() @@ -57,7 +63,7 @@ class SimpleNode(Node): assert node2._id is None assert node3._id is None - memgraph.save_nodes([node1, node2, node3]) + database.save_nodes([node1, node2, node3]) assert node1._id is not None assert node2._id is not None @@ -67,58 +73,61 @@ class SimpleNode(Node): node2.name = "2nd Simple Node" node3.name = "3rd Simple Node" - memgraph.save_nodes([node1, node2, node3]) + database.save_nodes([node1, node2, node3]) assert node1.name == "1st Simple Node" assert node2.name == "2nd Simple Node" assert node3.name == "3rd Simple Node" -def test_save_relationship(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_save_relationship(database): class NodeWithKey(Node): - id: int = Field(exists=True, unique=True, index=True, db=memgraph) + id: int = Field(unique=True, db=database) name: Optional[str] = Field() class SimpleRelationship(Relationship, type="SIMPLE_RELATIONSHIP"): pass - node1 = NodeWithKey(id=1, name="First NodeWithKey").save(memgraph) - node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(memgraph) + node1 = NodeWithKey(id=1, name="First NodeWithKey").save(database) + node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(database) relationship = SimpleRelationship( _start_node_id=node1._id, _end_node_id=node2._id, ) assert SimpleRelationship.type == relationship._type assert SimpleRelationship._type is not None - relationship.save(memgraph) + relationship.save(database) assert relationship._id is not None -def test_save_relationship2(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_save_relationship2(database): class NodeWithKey(Node): - id: int = Field(exists=True, unique=True, index=True, db=memgraph) + id: int = Field(unique=True, db=database) name: Optional[str] = Field() class SimpleRelationship(Relationship, type="SIMPLE_RELATIONSHIP"): pass - node1 = NodeWithKey(id=1, name="First NodeWithKey").save(memgraph) - node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(memgraph) + node1 = NodeWithKey(id=1, name="First NodeWithKey").save(database) + node2 = NodeWithKey(id=2, name="Second NodeWithKey").save(database) relationship = SimpleRelationship( _start_node_id=node1._id, _end_node_id=node2._id, ) assert SimpleRelationship.type == relationship._type assert SimpleRelationship.type is not None - relationship.save(memgraph) + relationship.save(database) assert relationship._id is not None - relationship2 = memgraph.load_relationship(relationship) + relationship2 = database.load_relationship(relationship) assert relationship2._id == relationship._id -def test_save_relationships(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_save_relationships(database): class User(Node): - id: int = Field(exists=True, unique=True, index=True, db=memgraph) + id: int = Field(unique=True, db=database) name: Optional[str] = Field() class Follows(Relationship, type="FOLLOWS"): @@ -127,7 +136,7 @@ class Follows(Relationship, type="FOLLOWS"): node1 = User(id=1, name="Marin") node2 = User(id=2, name="Marko") - memgraph.save_nodes([node1, node2]) + database.save_nodes([node1, node2]) assert node1._id is not None assert node2._id is not None @@ -143,6 +152,6 @@ class Follows(Relationship, type="FOLLOWS"): assert Follows.type == relationship1._type assert Follows._type is not None - memgraph.save_relationships([relationship1, relationship2]) + database.save_relationships([relationship1, relationship2]) assert relationship1._id is not None assert relationship2._id is not None diff --git a/tests/ogm/test_validators.py b/tests/ogm/test_validators.py index 7d52a270..55cefad5 100644 --- a/tests/ogm/test_validators.py +++ b/tests/ogm/test_validators.py @@ -13,31 +13,33 @@ # limitations under the License. import pytest -from gqlalchemy import Field, Node, validator from typing import List, Optional +from gqlalchemy import Field, Node, validator + -def test_raise_value_error(memgraph): +@pytest.mark.parametrize("database", ["neo4j", "memgraph"], indirect=True) +def test_raise_value_error(database): class User(Node): - name: str = Field(index=True, exists=True, unique=True, db=memgraph) + name: str = Field(unique=True, db=database) age: int = Field() friends: Optional[List[str]] = Field() - @validator("name") + @validator("name", allow_reuse=True) def name_can_not_be_empty(cls, v): if v == "": raise ValueError("name can't be empty") return v - @validator("age") + @validator("age", allow_reuse=True) def age_must_be_greater_than_zero(cls, v): if v <= 0: raise ValueError("age must be greater than zero") return v - @validator("friends", each_item=True) + @validator("friends", each_item=True, allow_reuse=True) def friends_must_be_(cls, v): if v == "": raise ValueError("name can't be empty") @@ -45,10 +47,10 @@ def friends_must_be_(cls, v): return v with pytest.raises(ValueError): - User(name="", age=26).save(memgraph) + User(name="", age=26).save(database) with pytest.raises(ValueError): - User(name="Kate", age=0).save(memgraph) + User(name="Kate", age=0).save(database) with pytest.raises(ValueError): - User(name="Kate", age=26, friends=["Ema", "Ana", ""]).save(memgraph) + User(name="Kate", age=26, friends=["Ema", "Ana", ""]).save(database) diff --git a/tests/on_disk_property_storage/test_multiprocess.py b/tests/on_disk_property_storage/test_multiprocess.py index 2de5b48c..20a4b10d 100644 --- a/tests/on_disk_property_storage/test_multiprocess.py +++ b/tests/on_disk_property_storage/test_multiprocess.py @@ -13,14 +13,14 @@ # limitations under the License. import multiprocessing as mp +import pytest +import random +import traceback from typing import Optional -import traceback import mgclient -import pytest -import random -from gqlalchemy import Memgraph, Node, Field, SQLitePropertyDatabase, GQLAlchemyError +from gqlalchemy import Field, GQLAlchemyError, Memgraph, Node, SQLitePropertyDatabase db = Memgraph() diff --git a/tests/on_disk_property_storage/test_query.py b/tests/on_disk_property_storage/test_query.py index a520d853..6d28c07e 100644 --- a/tests/on_disk_property_storage/test_query.py +++ b/tests/on_disk_property_storage/test_query.py @@ -13,10 +13,10 @@ # limitations under the License. import pytest - -from gqlalchemy import SQLitePropertyDatabase, Memgraph, Node, Field, Relationship from typing import Optional +from gqlalchemy import Field, Memgraph, Node, Relationship, SQLitePropertyDatabase + memgraph = Memgraph() db = SQLitePropertyDatabase("./tests/on_disk_storage.db", memgraph) diff --git a/tests/query_builders/__init__.py b/tests/query_builders/__init__.py new file mode 100644 index 00000000..34ce70e6 --- /dev/null +++ b/tests/query_builders/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/query_builders/test_memgraph_query_builder.py b/tests/query_builders/test_memgraph_query_builder.py new file mode 100644 index 00000000..27eb2b46 --- /dev/null +++ b/tests/query_builders/test_memgraph_query_builder.py @@ -0,0 +1,406 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from unittest.mock import patch + +from gqlalchemy import InvalidMatchChainException, Memgraph, QueryBuilder +from gqlalchemy.query_builders.memgraph_query_builder import ( + Call, + Create, + Foreach, + LoadCsv, + Match, + Merge, + Return, + Unwind, + With, +) +from gqlalchemy.graph_algorithms.integrated_algorithms import BreadthFirstSearch, DepthFirstSearch, WeightedShortestPath +from gqlalchemy.utilities import PropertyVariable + + +def test_invalid_match_chain_throws_exception(): + with pytest.raises(InvalidMatchChainException): + QueryBuilder().node(labels=":Label", variable="n").node(labels=":Label", variable="m").return_() + + +def test_load_csv_with_header(memgraph): + query_builder = QueryBuilder().load_csv(path="path/to/my/file.csv", header=True, row="row").return_() + expected_query = " LOAD CSV FROM 'path/to/my/file.csv' WITH HEADER AS row RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + +def test_load_csv_no_header(memgraph): + query_builder = QueryBuilder().load_csv(path="path/to/my/file.csv", header=False, row="row").return_() + expected_query = " LOAD CSV FROM 'path/to/my/file.csv' NO HEADER AS row RETURN * " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + +def test_call_procedure_pagerank(memgraph): + query_builder = ( + QueryBuilder() + .call(procedure="pagerank.get") + .yield_(results={"node": "", "rank": ""}) + .return_(results=[("node", "node"), ("rank", "rank")]) + ) + expected_query = " CALL pagerank.get() YIELD node, rank RETURN node, rank " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_call_procedure_pagerank_new_yield(memgraph): + query_builder = QueryBuilder().call(procedure="pagerank.get").yield_(["node", "rank"]).return_(["node", "rank"]) + expected_query = " CALL pagerank.get() YIELD node, rank RETURN node, rank " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_call_procedure_node2vec(memgraph): + query_builder = QueryBuilder().call(procedure="node2vec_online.get_embeddings", arguments="False, 2.0, 0.5") + expected_query = " CALL node2vec_online.get_embeddings(False, 2.0, 0.5) " + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_call_procedure_nxalg_betweenness_centrality(memgraph): + query_builder = ( + QueryBuilder() + .call(procedure="nxalg.betweenness_centrality", arguments="20, True") + .yield_() + .return_(results=["node", "betweenness"]) + ) + expected_query = " CALL nxalg.betweenness_centrality(20, True) YIELD * RETURN node, betweenness " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_yield_multiple_alias(memgraph): + query_builder = ( + QueryBuilder() + .call(procedure="nxalg.betweenness_centrality", arguments="20, True") + .yield_(results=[("node", "n"), "betweenness"]) + .return_(results=["n", "betweenness"]) + ) + expected_query = " CALL nxalg.betweenness_centrality(20, True) YIELD node AS n, betweenness RETURN n, betweenness " + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_base_class_load_csv(memgraph): + query_builder = LoadCsv("path/to/my/file.csv", True, "row").return_() + expected_query = " LOAD CSV FROM 'path/to/my/file.csv' WITH HEADER AS row RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_bfs(): + bfs_alg = BreadthFirstSearch() + + query_builder = ( + QueryBuilder() + .match() + .node(labels="City", name="Zagreb") + .to(relationship_type="Road", algorithm=bfs_alg) + .node(labels="City", name="Paris") + .return_() + ) + expected_query = " MATCH (:City {name: 'Zagreb'})-[:Road *BFS]->(:City {name: 'Paris'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_bfs_filter_label(): + bfs_alg = BreadthFirstSearch(condition="r.length <= 200 AND n.name != 'Metz'") + + query_builder = ( + QueryBuilder() + .match() + .node(labels="City", name="Paris") + .to(relationship_type="Road", algorithm=bfs_alg) + .node(labels="City", name="Berlin") + .return_() + ) + + expected_query = " MATCH (:City {name: 'Paris'})-[:Road *BFS (r, n | r.length <= 200 AND n.name != 'Metz')]->(:City {name: 'Berlin'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +@pytest.mark.parametrize( + "lower_bound, upper_bound, expected_query", + [ + (1, 15, " MATCH (a {id: 723})-[ *BFS 1..15 (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + (3, None, " MATCH (a {id: 723})-[ *BFS 3.. (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + (None, 10, " MATCH (a {id: 723})-[ *BFS ..10 (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + (None, None, " MATCH (a {id: 723})-[ *BFS (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + ], +) +def test_bfs_bounds(lower_bound, upper_bound, expected_query): + bfs_alg = BreadthFirstSearch(lower_bound=lower_bound, upper_bound=upper_bound, condition="r.x > 12 AND n.y < 3") + + query_builder = ( + QueryBuilder().match().node(variable="a", id=723).to(directed=False, algorithm=bfs_alg).node().return_() + ) + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_dfs(): + dfs_alg = DepthFirstSearch() + + query_builder = ( + QueryBuilder() + .match() + .node(labels="City", name="Zagreb") + .to(relationship_type="Road", algorithm=dfs_alg) + .node(labels="City", name="Paris") + .return_() + ) + expected_query = " MATCH (:City {name: 'Zagreb'})-[:Road *]->(:City {name: 'Paris'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_dfs_filter_label(): + dfs_alg = DepthFirstSearch(condition="r.length <= 200 AND n.name != 'Metz'") + + query_builder = ( + QueryBuilder() + .match() + .node(labels="City", name="Paris") + .to(relationship_type="Road", algorithm=dfs_alg) + .node(labels="City", name="Berlin") + .return_() + ) + + expected_query = " MATCH (:City {name: 'Paris'})-[:Road * (r, n | r.length <= 200 AND n.name != 'Metz')]->(:City {name: 'Berlin'}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +@pytest.mark.parametrize( + "lower_bound, upper_bound, expected_query", + [ + (1, 15, " MATCH (a {id: 723})-[ * 1..15 (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + (3, None, " MATCH (a {id: 723})-[ * 3.. (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + (None, 10, " MATCH (a {id: 723})-[ * ..10 (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + (None, None, " MATCH (a {id: 723})-[ * (r, n | r.x > 12 AND n.y < 3)]-() RETURN * "), + ], +) +def test_dfs_bounds(lower_bound, upper_bound, expected_query): + dfs_alg = DepthFirstSearch(lower_bound=lower_bound, upper_bound=upper_bound, condition="r.x > 12 AND n.y < 3") + + query_builder = ( + QueryBuilder().match().node(variable="a", id=723).to(directed=False, algorithm=dfs_alg).node().return_() + ) + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_wshortest(): + weighted_shortest = WeightedShortestPath(weight_property="r.weight") + + query_builder = ( + QueryBuilder() + .match() + .node(variable="a", id=723) + .to(variable="r", directed=False, algorithm=weighted_shortest) + .node(variable="b", id=882) + .return_() + ) + + expected_query = " MATCH (a {id: 723})-[r *WSHORTEST (r, n | r.weight) total_weight]-(b {id: 882}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_wShortest_bound(): + weighted_shortest = WeightedShortestPath(upper_bound=10, weight_property="weight") + + query_builder = ( + QueryBuilder() + .match() + .node(variable="a", id=723) + .to(variable="r", directed=False, algorithm=weighted_shortest) + .node(variable="b", id=882) + .return_() + ) + + expected_query = " MATCH (a {id: 723})-[r *WSHORTEST 10 (r, n | r.weight) total_weight]-(b {id: 882}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +def test_wShortest_filter_label(): + weighted_shortest = WeightedShortestPath( + upper_bound=10, weight_property="weight", condition="r.x > 12 AND n.y < 3", total_weight_var="weight_sum" + ) + + query_builder = ( + QueryBuilder() + .match() + .node(variable="a", id=723) + .to(variable="r", directed=False, algorithm=weighted_shortest) + .node(variable="b", id=882) + .return_() + ) + + expected_query = " MATCH (a {id: 723})-[r *WSHORTEST 10 (r, n | r.weight) weight_sum (r, n | r.x > 12 AND n.y < 3)]-(b {id: 882}) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + +class TestMemgraphBaseClasses: + def test_base_class_call(self): + query_builder = Call("pagerank.get").yield_().return_() + expected_query = " CALL pagerank.get() YIELD * RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_create(self): + query_builder = Create().node(variable="n", labels="TEST", prop="test").return_(results=("n", "n")) + expected_query = " CREATE (n:TEST {prop: 'test'}) RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_foreach(self, memgraph): + update_clause = Create().node(variable="n", id=PropertyVariable(name="i")) + query_builder = Foreach(variable="i", expression="[1, 2, 3]", update_clauses=update_clause.construct_query()) + expected_query = " FOREACH ( i IN [1, 2, 3] | CREATE (n {id: i}) ) " + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_match(self): + query_builder = Match().node(variable="n").return_(results="n") + expected_query = " MATCH (n) RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_merge(self): + query_builder = ( + QueryBuilder() + .merge() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2") + .return_() + ) + expected_query = " MERGE (n:L1)-[:TO]->(:L2) RETURN * " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_simple_merge_with_variables(self): + query_builder = Merge().node(labels="L1", variable="n").to(relationship_type="TO").node(labels="L2") + expected_query = " MERGE (n:L1)-[:TO]->(:L2)" + + with patch.object(Memgraph, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_unwind(self): + query_builder = Unwind("[1, 2, 3]", "x").return_(results=("x", "x")) + expected_query = " UNWIND [1, 2, 3] AS x RETURN x " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_with_dict(self): + query_builder = With(results={"10": "n"}).return_(results={"n": ""}) + expected_query = " WITH 10 AS n RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_with_tuple(self): + query_builder = With(results=("10", "n")).return_(results=("n", "")) + expected_query = " WITH 10 AS n RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_return(self): + query_builder = Return(("n", "n")) + expected_query = " RETURN n " + + with patch.object(Memgraph, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) diff --git a/tests/query_builders/test_neo4j_query_builder.py b/tests/query_builders/test_neo4j_query_builder.py new file mode 100644 index 00000000..ecec3c62 --- /dev/null +++ b/tests/query_builders/test_neo4j_query_builder.py @@ -0,0 +1,133 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import patch + +from gqlalchemy import Neo4j, Neo4jQueryBuilder +from gqlalchemy.query_builders.neo4j_query_builder import ( + Call, + Create, + Foreach, + Match, + Merge, + Return, + Unwind, + With, +) +from gqlalchemy.utilities import PropertyVariable + + +class TestNeo4jBaseClasses: + def test_base_class_call(self, neo4j): + query_builder = Call("pagerank.get", connection=neo4j).yield_().return_() + expected_query = " CALL pagerank.get() YIELD * RETURN * " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_create(self, neo4j): + query_builder = ( + Create(connection=neo4j).node(variable="n", labels="TEST", prop="test").return_(results=("n", "n")) + ) + expected_query = " CREATE (n:TEST {prop: 'test'}) RETURN n " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_foreach(self, neo4j): + update_clause = Neo4jQueryBuilder(connection=neo4j).create().node(variable="n", id=PropertyVariable(name="i")) + query_builder = Foreach( + variable="i", expression="[1, 2, 3]", update_clauses=update_clause.construct_query(), connection=neo4j + ) + expected_query = " FOREACH ( i IN [1, 2, 3] | CREATE (n {id: i}) ) " + + with patch.object(Neo4j, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_match(self, neo4j): + query_builder = Match(connection=neo4j).node(variable="n").return_(results="n") + expected_query = " MATCH (n) RETURN n " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_merge(self, neo4j): + query_builder = ( + Merge(connection=neo4j) + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2") + .return_() + ) + expected_query = " MERGE (n:L1)-[:TO]->(:L2) RETURN * " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_simple_merge_with_variables(self, neo4j): + query_builder = ( + Merge(connection=neo4j).node(labels="L1", variable="n").to(relationship_type="TO").node(labels="L2") + ) + expected_query = " MERGE (n:L1)-[:TO]->(:L2)" + + with patch.object(Neo4j, "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_unwind(self, neo4j): + query_builder = Unwind("[1, 2, 3]", "x", connection=neo4j).return_(results=("x", "x")) + expected_query = " UNWIND [1, 2, 3] AS x RETURN x " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_with_dict(self, neo4j): + query_builder = With(results={"10": "n"}, connection=neo4j).return_(results={"n": ""}) + expected_query = " WITH 10 AS n RETURN n " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_with_tuple(self, neo4j): + query_builder = With(results=("10", "n"), connection=neo4j).return_(results=("n", "")) + expected_query = " WITH 10 AS n RETURN n " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_base_class_return(self, neo4j): + query_builder = Return(("n", "n"), connection=neo4j) + expected_query = " RETURN n " + + with patch.object(Neo4j, "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) diff --git a/tests/query_builders/test_query_builders.py b/tests/query_builders/test_query_builders.py new file mode 100644 index 00000000..06c3e390 --- /dev/null +++ b/tests/query_builders/test_query_builders.py @@ -0,0 +1,1716 @@ +# Copyright (c) 2016-2022 Memgraph Ltd. [https://memgraph.com] +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from typing import Optional +from unittest.mock import patch +from datetime import datetime + +from gqlalchemy.exceptions import ( + GQLAlchemyExtraKeywordArguments, + GQLAlchemyInstantiationError, + GQLAlchemyLiteralAndExpressionMissing, + GQLAlchemyResultQueryTypeError, + GQLAlchemyTooLargeTupleInResultQuery, + GQLAlchemyOperatorTypeError, +) +from gqlalchemy import Field, InvalidMatchChainException, Node, QueryBuilder, Relationship +from gqlalchemy.exceptions import GQLAlchemyMissingOrder, GQLAlchemyOrderByTypeError +from gqlalchemy.query_builders.declarative_base import Operator, Order, _ResultPartialQuery +from gqlalchemy.utilities import PropertyVariable + + +@pytest.mark.parametrize("vendor", ["neo4j_query_builder", "memgraph_query_builder"], indirect=True) +class TestMemgraphNeo4jQueryBuilder: + def test_invalid_match_chain_throws_exception(self, vendor): + with pytest.raises(InvalidMatchChainException): + vendor[1].node(labels=":Label", variable="n").node(labels=":Label", variable="m").return_() + + def test_simple_create(self, vendor): + query_builder = ( + vendor[1].create().node(labels="L1", variable="n").to(relationship_type="TO").node(labels="L2").return_() + ) + expected_query = " CREATE (n:L1)-[:TO]->(:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_simple_match(self, vendor): + query_builder = ( + vendor[1].match().node(labels="L1", variable="n").to(relationship_type="TO").node(labels="L2").return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_simple_with_multiple_labels(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels=["L1", "L2", "L3"], variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_() + ) + expected_query = " MATCH (n:L1:L2:L3)-[:TO]->(m:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_multiple_matches(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .match(optional=True) + .node(variable="n") + .to(relationship_type="TO") + .node(labels="L3") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) OPTIONAL MATCH (n)-[:TO]->(:L3) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_with_empty(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .with_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WITH * " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_with(self, vendor): + query_builder = vendor[1].match().node(variable="n").with_(results={"n": ""}) + expected_query = " MATCH (n) WITH n " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_with_str_args(self, vendor): + query_builder = vendor[1].match().node(variable="n").with_(results="n") + expected_query = " MATCH (n) WITH n " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_union(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n1", labels="Node1") + .return_(results="n1") + .union(include_duplicates=False) + .match() + .node(variable="n2", labels="Node2") + .return_(results="n2") + ) + expected_query = " MATCH (n1:Node1) RETURN n1 UNION MATCH (n2:Node2) RETURN n2 " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_union_2(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="c", labels="Country") + .return_(results=("c.name", "columnName")) + .union(include_duplicates=False) + .match() + .node(variable="p", labels="Person") + .return_(results=("p.name", "columnName")) + ) + expected_query = ( + " MATCH (c:Country) RETURN c.name AS columnName UNION MATCH (p:Person) RETURN p.name AS columnName " + ) + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_union_all(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n1", labels="Node1") + .return_(results="n1") + .union() + .match() + .node(variable="n2", labels="Node2") + .return_(results="n2") + ) + expected_query = " MATCH (n1:Node1) RETURN n1 UNION ALL MATCH (n2:Node2) RETURN n2 " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_union_all_2(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="c", labels="Country") + .return_(results=("c.name", "columnName")) + .union() + .match() + .node(variable="p", labels="Person") + .return_(results=("p.name", "columnName")) + ) + expected_query = ( + " MATCH (c:Country) RETURN c.name AS columnName UNION ALL MATCH (p:Person) RETURN p.name AS columnName " + ) + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_delete(self, vendor): + query_builder = vendor[1].match().node(variable="n1", labels="Node1").delete("n1") + expected_query = " MATCH (n1:Node1) DELETE n1 " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_delete_list(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n1", labels="Node1") + .to() + .node(variable="n2", labels="Node2") + .delete(variable_expressions=["n1", "n2"]) + ) + expected_query = " MATCH (n1:Node1)-[]->(n2:Node2) DELETE n1, n2 " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_simple_create_with_variables(self, vendor): + query_builder = ( + vendor[1] + .create() + .node(labels="L1", variable="n") + .to(relationship_type="TO", variable="e") + .node(labels="L2", variable="m") + .return_() + ) + expected_query = " CREATE (n:L1)-[e:TO]->(m:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_simple_match_with_variables(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO", variable="e") + .node(labels="L2", variable="m") + .return_() + ) + expected_query = " MATCH (n:L1)-[e:TO]->(m:L2) RETURN * " + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_delete_detach(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n1", labels="Node1") + .to(relationship_type="RELATIONSHIP") + .node(variable="n2", labels="Node2") + .delete(["n1", "n2"], True) + ) + expected_query = " MATCH (n1:Node1)-[:RELATIONSHIP]->(n2:Node2) DETACH DELETE n1, n2 " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_remove_property(self, vendor): + query_builder = vendor[1].match().node(variable="n", labels="Node").remove(items="n.name") + expected_query = " MATCH (n:Node) REMOVE n.name " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_multiple_merges(self, vendor): + query_builder = ( + vendor[1] + .merge() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .merge() + .node(variable="n") + .to(relationship_type="TO") + .node(labels="L3") + .return_() + ) + expected_query = " MERGE (n:L1)-[:TO]->(m:L2) MERGE (n)-[:TO]->(:L3) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + @pytest.mark.parametrize("operator", ["=", "<>", "<", "!=", ">", "<=", ">="]) + def test_where_without_operator_enum(self, vendor, operator): + query_builder = ( + vendor[1] + .match() + .node("L1", variable="n") + .to("TO") + .node("L2", variable="m") + .where(item="n.name", operator=operator, literal="best_name") + .return_() + ) + expected_query = f" MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name {operator} 'best_name' RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_not_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where_not(item="n.name", operator=Operator.EQUAL, expression="m.name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE NOT n.name = m.name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_label_without_operator_enum(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=":", expression="Node") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_label_with_rand_string_operator(self, vendor): + with pytest.raises(GQLAlchemyOperatorTypeError): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator="heyhey", expression="Node") + .return_() + ) + + def test_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_not_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where_not(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE NOT n:Node RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL) + .return_() + ) + + def test_where_not_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where_not(item="n.name", operator=Operator.EQUAL) + .return_() + ) + + def test_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_where_not_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where_not(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_or_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .or_where(item="m.id", operator=Operator.LESS_THAN, literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR m.id < 4 RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_or_not_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .or_not_where(item="m.id", operator=Operator.LESS_THAN, literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' OR NOT m.id < 4 RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_or_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .or_where(item="m.name", operator=Operator.EQUAL, expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name OR m.name = n.last_name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_or_not_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .or_not_where(item="m.name", operator=Operator.EQUAL, expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name OR NOT m.name = n.last_name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_or_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .or_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node OR m:User RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_or_not_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .or_not_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node OR NOT m:User RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_or_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="my_name") + .or_where(item="m.name", operator=Operator.EQUAL) + .return_() + ) + + def test_or_not_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="my_name") + .or_not_where(item="m.name", operator=Operator.EQUAL) + .return_() + ) + + def test_or_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator=Operator.EQUAL, literal="best_name") + .or_where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_or_not_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator=Operator.EQUAL, literal="best_name") + .or_not_where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_and_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .and_where(item="m.id", operator=Operator.LEQ_THAN, literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' AND m.id <= 4 RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_and_not_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .and_not_where(item="m.id", operator=Operator.LESS_THAN, literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' AND NOT m.id < 4 RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_and_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .and_where(item="m.name", operator=Operator.EQUAL, expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name AND m.name = n.last_name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_and_not_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .and_not_where(item="m.name", operator=Operator.EQUAL, expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name AND NOT m.name = n.last_name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_and_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .and_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND m:User RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_and_not_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node("L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .and_not_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND NOT m:User RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_and_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="my_name") + .and_where(item="m.name", operator=Operator.EQUAL) + .return_() + ) + + def test_and_not_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="my_name") + .and_not_where(item="m.name", operator=Operator.EQUAL) + .return_() + ) + + def test_and_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator=Operator.EQUAL, literal="best_name") + .and_where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_and_not_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator=Operator.EQUAL, literal="best_name") + .and_not_where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_xor_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .xor_where(item="m.id", operator=Operator.LESS_THAN, literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' XOR m.id < 4 RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_xor_not_where_literal(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="best_name") + .xor_not_where(item="m.id", operator=Operator.LESS_THAN, literal=4) + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = 'best_name' XOR NOT m.id < 4 RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_xor_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .xor_where(item="m.name", operator=Operator.EQUAL, expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name XOR m.name = n.last_name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_xor_not_where_property(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .xor_not_where(item="m.name", operator=Operator.EQUAL, expression="n.last_name") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n.name = m.name XOR NOT m.name = n.last_name RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_xor_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .xor_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node XOR m:User RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_xor_not_where_label(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .xor_not_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node XOR NOT m:User RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_xor_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="my_name") + .xor_where(item="m.name", operator=Operator.EQUAL) + .return_() + ) + + def test_xor_not_where_literal_and_expression_missing(self, vendor): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n.name", operator=Operator.EQUAL, literal="my_name") + .xor_not_where(item="m.name", operator=Operator.EQUAL) + .return_() + ) + + def test_xor_and_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator=Operator.EQUAL, literal="best_name") + .xor_where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_xor_not_and_where_extra_values(self, vendor): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="m.name", operator=Operator.EQUAL, literal="best_name") + .xor_not_where(item="n.name", operator=Operator.EQUAL, literal="best_name", expression="Node") + .return_() + ) + + def test_and_or_xor_not_where(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .where(item="n", operator=Operator.LABEL_FILTER, expression="Node") + .and_where(item="n.age", operator=Operator.GREATER_THAN, literal=5) + .or_where(item="n", operator=Operator.LABEL_FILTER, expression="Node2") + .xor_where(item="n.name", operator=Operator.EQUAL, expression="m.name") + .xor_not_where(item="m", operator=Operator.LABEL_FILTER, expression="User") + .or_not_where(item="m", operator=Operator.LABEL_FILTER, expression="Node") + .and_not_where(item="m.name", operator=Operator.EQUAL, literal="John") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) WHERE n:Node AND n.age > 5 OR n:Node2 XOR n.name = m.name XOR NOT m:User OR NOT m:Node AND NOT m.name = 'John' RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_get_single(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results="n") + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN n " + + with patch.object(vendor[0], "execute_and_fetch", return_value=iter([{"n": None}])) as mock: + query_builder.get_single(retrieve="n") + + mock.assert_called_with(expected_query) + + def test_return_empty(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_() + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results=("L1", "first")) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_dict(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results={"L1": "first"}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_set(self, vendor): + test_set = set() + test_set.add(("L1", "first")) + test_set.add("L2") + + query_builder = vendor[1].return_(results=test_set).construct_query() + expected_query = [" RETURN L1 AS first, L2 ", " RETURN L2, L1 AS first "] + + assert query_builder in expected_query + + def test_return_alias_set_int(self, vendor): + test_set = set() + test_set.add(("L1", 1)) + test_set.add("L2") + + with pytest.raises(GQLAlchemyResultQueryTypeError): + vendor[1].return_(results=test_set).construct_query() + + def test_return_alias_set_datetime(self, vendor): + test_set = set() + test_set.add(("L1", "first")) + test_set.add(datetime.date) + + with pytest.raises(GQLAlchemyResultQueryTypeError): + vendor[1].return_(results=test_set).construct_query() + + def test_return_alias_set_too_large_tuple(self, vendor): + test = ("L1", "first", "L2") + + with pytest.raises(GQLAlchemyTooLargeTupleInResultQuery): + vendor[1].return_(test).construct_query() + + def test_return_alias_set_multiple(self, vendor): + test_set = set() + test_set.add(("L1", "first")) + test_set.add(("L2", "second")) + + query_builder = vendor[1].return_(results=test_set).construct_query() + expected_query = [" RETURN L1 AS first, L2 AS second ", " RETURN L2 AS second, L1 AS first "] + + assert query_builder in expected_query + + def test_return_alias_set_multiple_2(self, vendor): + test_set = set() + test_set.add(("L1", "first")) + test_set.add(("L2", "second")) + test_set.add("L3") + + query_builder = vendor[1].return_(test_set).construct_query() + expected_query = [ + " RETURN L1 AS first, L2 AS second, L3 ", + " RETURN L2 AS second, L3, L1 AS first ", + " RETURN L3, L2 AS second, L1 AS first ", + " RETURN L1 AS first, L3, L2 AS second ", + " RETURN L3, L1 AS first, L2 AS second ", + " RETURN L2 AS second, L1 AS first, L3 ", + ] + + assert query_builder in expected_query + + def test_return_multiple_alias(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results=[("L1", "first"), "L2", ("L3", "third")]) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first, L2, L3 AS third " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_instantiate(self, vendor): + with pytest.raises(GQLAlchemyInstantiationError): + _ResultPartialQuery(keyword="RETURN") + + def test_return_multiple_alias_dict(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results={"L1": "first", "L2": "", "L3": "third"}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 AS first, L2, L3 AS third " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_same_as_variable(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results=("L1", "L1")) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_same_as_variable_dict(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results={"L1": "L1"}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_empty(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results="L1") + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_return_alias_empty_dict(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .to(relationship_type="TO") + .node(labels="L2", variable="m") + .return_(results={"L1": ""}) + ) + expected_query = " MATCH (n:L1)-[:TO]->(m:L2) RETURN L1 " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_unwind(self, vendor): + query_builder = ( + vendor[1] + .unwind(list_expression="[1, 2, 3, null]", variable="x") + .return_(results=[("x", ""), ("'val'", "y")]) + ) + expected_query = " UNWIND [1, 2, 3, null] AS x RETURN x, 'val' AS y " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_remove_label(self, vendor): + query_builder = vendor[1].match().node(variable="n", labels=["Node1", "Node2"]).remove(items="n:Node2") + expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2 " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_remove_property_and_label(self, vendor): + query_builder = ( + vendor[1].match().node(variable="n", labels=["Node1", "Node2"]).remove(items=["n:Node2", "n.name"]) + ) + expected_query = " MATCH (n:Node1:Node2) REMOVE n:Node2, n.name " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_order_by(self, vendor): + query_builder = vendor[1].match().node(variable="n").return_().order_by(properties="n.id") + expected_query = " MATCH (n) RETURN * ORDER BY n.id " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_order_by_desc(self, vendor): + query_builder = vendor[1].match().node(variable="n").return_().order_by(properties=("n.id", Order.DESC)) + expected_query = " MATCH (n) RETURN * ORDER BY n.id DESC " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_order_by_asc(self, vendor): + query_builder = vendor[1].match().node(variable="n").return_().order_by(properties=("n.id", Order.ASC)) + expected_query = " MATCH (n) RETURN * ORDER BY n.id ASC " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_order_by_wrong_ordering(self, vendor): + with pytest.raises(GQLAlchemyMissingOrder): + vendor[1].match().node(variable="n").return_().order_by(properties=("n.id", "DESCE")) + + def test_order_by_wrong_type(self, vendor): + with pytest.raises(GQLAlchemyOrderByTypeError): + vendor[1].match().node(variable="n").return_().order_by(properties=1) + + def test_order_by_properties(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n") + .return_() + .order_by(properties=[("n.id", Order.DESC), "n.name", ("n.last_name", Order.DESC)]) + ) + expected_query = " MATCH (n) RETURN * ORDER BY n.id DESC, n.name, n.last_name DESC " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_order_by_asc_desc(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n") + .return_() + .order_by( + properties=[ + ("n.id", Order.ASC), + "n.name", + ("n.last_name", Order.DESC), + ("n.age", Order.ASCENDING), + ("n.middle_name", Order.DESCENDING), + ] + ) + ) + expected_query = " MATCH (n) RETURN * ORDER BY n.id ASC, n.name, n.last_name DESC, n.age ASCENDING, n.middle_name DESCENDING " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + @pytest.mark.parametrize("integer_expression", ["3", 3]) + def test_limit(self, vendor, integer_expression): + query_builder = vendor[1].match().node(variable="n").return_().limit(integer_expression=integer_expression) + expected_query = f" MATCH (n) RETURN * LIMIT {integer_expression} " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + @pytest.mark.parametrize("integer_expression", ["1", 1]) + def test_skip(self, vendor, integer_expression): + query_builder = ( + vendor[1].match().node(variable="n").return_(results="n").skip(integer_expression=integer_expression) + ) + expected_query = f" MATCH (n) RETURN n SKIP {integer_expression} " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + def test_from(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="L1", variable="n") + .from_(relationship_type="FROM", variable="e") + .node(labels="L2", variable="m") + .return_() + ) + expected_query = " MATCH (n:L1)<-[e:FROM]-(m:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_to(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="Town", variable="t") + .to(relationship_type="BELONGS_TO", variable="b") + .node(labels="Country", variable="c") + .return_(results="b") + ) + expected_query = " MATCH (t:Town)-[b:BELONGS_TO]->(c:Country) RETURN b " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + mock.assert_called_with(expected_query) + + def test_add_string_partial(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(labels="Node1", variable="n") + .to(relationship_type="TO", variable="e") + .add_custom_cypher("(m:L2) ") + .return_() + ) + expected_query = " MATCH (n:Node1)-[e:TO]->(m:L2) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_add_string_complete(self, vendor): + query_builder = vendor[1].add_custom_cypher("MATCH (n) RETURN n") + expected_query = "MATCH (n) RETURN n" + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_set_label_without_operator_enum(self, vendor): + query_builder = vendor[1].set_(item="a", operator=":", expression="L1") + expected_query = " SET a:L1" + + assert query_builder.construct_query() == expected_query + + def test_set_label_with_rand_operator(self, vendor): + with pytest.raises(GQLAlchemyOperatorTypeError): + vendor[1].set_(item="a", operator="heyhey", expression="L1") + + def test_set_label(self, vendor): + query_builder = vendor[1].set_(item="a", operator=Operator.LABEL_FILTER, expression="L1") + expected_query = " SET a:L1" + + assert query_builder.construct_query() == expected_query + + @pytest.mark.parametrize("operator", [Operator.ASSIGNMENT, Operator.INCREMENT]) + def test_set_assign_expression(self, vendor, operator): + query_builder = vendor[1].set_(item="a", operator=operator, expression="value") + expected_query = f" SET a {operator.value} value" + + assert query_builder.construct_query() == expected_query + + @pytest.mark.parametrize("operator", ["=", "+="]) + def test_set_assign_expression_without_operator_enum(self, vendor, operator): + query_builder = vendor[1].set_(item="a", operator=operator, expression="value") + expected_query = f" SET a {operator} value" + + assert query_builder.construct_query() == expected_query + + @pytest.mark.parametrize("operator", [Operator.ASSIGNMENT, Operator.INCREMENT]) + def test_set_assign_literal(self, vendor, operator): + query_builder = vendor[1].set_(item="a", operator=operator, literal="value") + expected_query = f" SET a {operator.value} 'value'" + + assert query_builder.construct_query() == expected_query + + def test_multiple_set_label(self, vendor): + query_builder = ( + vendor[1] + .set_(item="a", operator=Operator.LABEL_FILTER, expression="L1") + .set_(item="a", operator=Operator.ASSIGNMENT, expression="L2") + ) + expected_query = " SET a:L1 SET a = L2" + + assert query_builder.construct_query() == expected_query + + @pytest.mark.parametrize("operator", [Operator.ASSIGNMENT, Operator.INCREMENT]) + def test_set_literal_and_expression_missing(self, vendor, operator): + with pytest.raises(GQLAlchemyLiteralAndExpressionMissing): + vendor[1].set_(item="n.name", operator=operator) + + @pytest.mark.parametrize("operator", [Operator.ASSIGNMENT, Operator.INCREMENT]) + def test_set_extra_values(self, vendor, operator): + with pytest.raises(GQLAlchemyExtraKeywordArguments): + vendor[1].set_(item="n.name", operator=operator, literal="best_name", expression="Node") + + def test_set_docstring_example_1(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n") + .where(item="n.name", operator=Operator.EQUAL, literal="Germany") + .set_(item="n.population", operator=Operator.ASSIGNMENT, literal=83000001) + .return_() + ) + expected_query = " MATCH (n) WHERE n.name = 'Germany' SET n.population = 83000001 RETURN * " + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_set_docstring_example_2(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n") + .where(item="n.name", operator=Operator.EQUAL, literal="Germany") + .set_(item="n.population", operator=Operator.ASSIGNMENT, literal=83000001) + .set_(item="n.capital", operator=Operator.ASSIGNMENT, literal="Berlin") + .return_() + ) + expected_query = ( + " MATCH (n) WHERE n.name = 'Germany' SET n.population = 83000001 SET n.capital = 'Berlin' RETURN * " + ) + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_set_docstring_example_3(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="n") + .where(item="n.name", operator=Operator.EQUAL, literal="Germany") + .set_(item="n", operator=Operator.LABEL_FILTER, expression="Land") + .return_() + ) + expected_query = " MATCH (n) WHERE n.name = 'Germany' SET n:Land RETURN * " + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_set_docstring_example_4(self, vendor): + query_builder = ( + vendor[1] + .match() + .node(variable="c", labels="Country") + .where(item="c.name", operator=Operator.EQUAL, literal="Germany") + .set_(item="c", operator=Operator.INCREMENT, literal={"name": "Germany", "population": "85000000"}) + .return_() + ) + expected_query = ( + " MATCH (c:Country) WHERE c.name = 'Germany' SET c += {name: 'Germany', population: '85000000'} RETURN * " + ) + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_node_instance(self, vendor): + class User(Node): + name: Optional[str] = Field(unique=True, db=vendor[0]) + + user = User(name="Ron").save(vendor[0]) + query_builder = vendor[1].match().node(node=user, variable="u").return_() + expected_query = " MATCH (u:User {name: 'Ron'}) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_unsaved_node_instance(self, vendor): + class User(Node): + name: Optional[str] = Field(unique=True, db=vendor[0]) + + user = User(name="Ron") + query_builder = vendor[1].match().node(node=user, variable="u").return_() + expected_query = " MATCH (u:User {name: 'Ron'}) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_node_relationship_instances(self, vendor): + class User(Node): + name: Optional[str] = Field(unique=True, db=vendor[0]) + + class Follows_test(Relationship, type="FOLLOWS"): + pass + + user_1 = User(name="Ron").save(vendor[0]) + user_2 = User(name="Leslie").save(vendor[0]) + follows = Follows_test(_start_node_id=user_1._id, _end_node_id=user_2._id).save(vendor[0]) + query_builder = ( + vendor[1] + .match() + .node(node=user_1, variable="user_1") + .to(relationship=follows) + .node(node=user_2, variable="user_2") + .return_() + ) + expected_query = " MATCH (user_1:User {name: 'Ron'})-[:FOLLOWS]->(user_2:User {name: 'Leslie'}) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_unsaved_node_relationship_instances(self, vendor): + class User(Node): + name: Optional[str] = Field(unique=True, db=vendor[0]) + + class Follows_test(Relationship, type="FOLLOWS"): + pass + + user_1 = User(name="Ron") + user_2 = User(name="Leslie") + follows = Follows_test() + query_builder = ( + vendor[1] + .match() + .node(node=user_1, variable="user_1") + .to(relationship=follows) + .node(node=user_2, variable="user_2") + .return_() + ) + expected_query = " MATCH (user_1:User {name: 'Ron'})-[:FOLLOWS]->(user_2:User {name: 'Leslie'}) RETURN * " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_property_variable(self, vendor): + query = ( + vendor[1] + .with_(results={"[1,2,3]": "list"}) + .unwind("list", "element") + .create() + .node(num=PropertyVariable(name="element")) + ) + + expected_query = " WITH [1,2,3] AS list UNWIND list AS element CREATE ( {num: element})" + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query.execute() + + mock.assert_called_with(expected_query) + + def test_property_variable_edge(self, vendor): + query = ( + vendor[1] + .with_(results={"15": "number"}) + .create() + .node(variable="n") + .to(relationship_type="REL", num=PropertyVariable(name="number")) + .node(variable="m") + ) + + expected_query = " WITH 15 AS number CREATE (n)-[:REL{num: number}]->(m)" + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query.execute() + + mock.assert_called_with(expected_query) + + def test_foreach(self, vendor): + update_clause = QueryBuilder().create().node(variable="n", id=PropertyVariable(name="i")) + query_builder = vendor[1].foreach( + variable="i", expression="[1, 2, 3]", update_clause=update_clause.construct_query() + ) + expected_query = " FOREACH ( i IN [1, 2, 3] | CREATE (n {id: i}) ) " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_foreach_multiple_update_clauses(self, vendor): + variable_li = PropertyVariable(name="li") + update_clause_1 = QueryBuilder().create().node(labels="F4", prop=variable_li) + update_clause_2 = QueryBuilder().create().node(labels="F5", prop2=variable_li) + query = ( + vendor[1] + .match() + .node(variable="n") + .foreach( + variable="li", + expression="[1, 2]", + update_clause=[update_clause_1.construct_query(), update_clause_2.construct_query()], + ) + .return_({"n": ""}) + ) + expected_query = ( + " MATCH (n) FOREACH ( li IN [1, 2] | CREATE (:F4 {prop: li}) CREATE (:F5 {prop2: li}) ) RETURN n " + ) + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query.execute() + + mock.assert_called_with(expected_query) + + def test_foreach_nested(self, vendor): + create_query = QueryBuilder().create().node(variable="u", prop=PropertyVariable(name="j")) + nested_query = QueryBuilder().foreach( + variable="j", expression="i", update_clause=create_query.construct_query() + ) + query = ( + vendor[1] + .match() + .node(variable="n") + .foreach(variable="i", expression="n.prop", update_clause=nested_query.construct_query()) + ) + + expected_query = " MATCH (n) FOREACH ( i IN n.prop | FOREACH ( j IN i | CREATE (u {prop: j}) ) ) " + + with patch.object(vendor[0], "execute", return_value=None) as mock: + query.execute() + + mock.assert_called_with(expected_query) + + def test_merge(self, vendor): + query_builder = ( + vendor[1] + .merge() + .node(variable="city") + .where(item="city.name", operator="=", literal="London") + .return_(results="city") + ) + expected_query = " MERGE (city) WHERE city.name = 'London' RETURN city " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_create(self, vendor): + query_builder = vendor[1].create().node(labels="Person", variable="p").return_(results="p") + expected_query = " CREATE (p:Person) RETURN p " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) + + def test_create_with_properties(self, vendor): + query_builder = vendor[1].create().node(labels="Person", variable="p", first_name="Kate").return_(results="p") + expected_query = " CREATE (p:Person {first_name: 'Kate'}) RETURN p " + + with patch.object(vendor[0], "execute_and_fetch", return_value=None) as mock: + query_builder.execute() + + mock.assert_called_with(expected_query) diff --git a/tests/test_instance_runner.py b/tests/test_instance_runner.py index 9e91a830..0dc65afb 100644 --- a/tests/test_instance_runner.py +++ b/tests/test_instance_runner.py @@ -12,60 +12,54 @@ # See the License for the specific language governing permissions and # limitations under the License. -import docker import os import pathlib import pytest + +import docker + +from gqlalchemy.exceptions import ( + GQLAlchemyWaitForPortError, + GQLAlchemyWaitForDockerError, +) from gqlalchemy.instance_runner import ( DockerImage, MemgraphInstanceBinary, - MemgraphInstanceDocker, wait_for_port, wait_for_docker_container, ) def test_wait_for_port(): - with pytest.raises(TimeoutError): + with pytest.raises(GQLAlchemyWaitForPortError): wait_for_port(port=0000, timeout=1) @pytest.mark.docker def test_wait_for_docker_container(): container = docker.from_env().containers.create(DockerImage.MEMGRAPH.value) - with pytest.raises(TimeoutError): + with pytest.raises(GQLAlchemyWaitForDockerError): wait_for_docker_container(container, timeout=1) @pytest.mark.docker -def test_start_and_connect_memgraph_docker(): - memgraph_instance = MemgraphInstanceDocker(port=7690) - memgraph = memgraph_instance.start_and_connect() - assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 - assert memgraph_instance.is_running() - memgraph_instance.stop() - assert not memgraph_instance.is_running() +def test_start_memgraph_docker(memgraph_instance_docker_without_config): + memgraph_instance_docker_without_config.start() + assert memgraph_instance_docker_without_config.is_running() @pytest.mark.docker -def test_start_and_connect_memgraph_docker_config(): - memgraph_instance = MemgraphInstanceDocker(port=7691, config={"--log-level": "TRACE"}) - memgraph = memgraph_instance.start_and_connect() - assert memgraph_instance.is_running() - assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 - memgraph_instance.stop() - assert not memgraph_instance.is_running() +def test_start_and_connect_memgraph_docker_config(memgraph_instance_docker_with_config): + memgraph_instance_docker_with_config.start() + conn = memgraph_instance_docker_with_config.connect() + assert list(conn.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 @pytest.mark.docker -def test_start_memgraph_docker_connect(): - memgraph_instance = MemgraphInstanceDocker(port=7692) - memgraph_instance.start() - assert memgraph_instance.is_running() - memgraph = memgraph_instance.connect() +def test_start_and_connect_memgraph_without_docker_config(memgraph_instance_docker_without_config): + memgraph_instance_docker_without_config.start() + memgraph = memgraph_instance_docker_without_config.connect() assert list(memgraph.execute_and_fetch("RETURN 100 AS result"))[0]["result"] == 100 - memgraph_instance.stop() - assert not memgraph_instance.is_running() @pytest.mark.ubuntu diff --git a/tests/test_query.py b/tests/test_query.py index 9f2b0fe8..5921b207 100644 --- a/tests/test_query.py +++ b/tests/test_query.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Iterator - import pytest +from typing import Any, Dict, Iterator from gqlalchemy import Memgraph, Node diff --git a/tests/test_transformations.py b/tests/test_transformations.py index 0af7b9dc..0657557d 100644 --- a/tests/test_transformations.py +++ b/tests/test_transformations.py @@ -15,6 +15,7 @@ import pytest import networkx as nx + from gqlalchemy.transformations import nx_to_cypher, NoNetworkXConfigException, NetworkXCypherBuilder from gqlalchemy.utilities import NetworkXCypherConfig diff --git a/tests/memgraph/test_utilities.py b/tests/test_utilities.py similarity index 83% rename from tests/memgraph/test_utilities.py rename to tests/test_utilities.py index 6df5e5ec..e2072923 100644 --- a/tests/memgraph/test_utilities.py +++ b/tests/test_utilities.py @@ -12,16 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest +import datetime import math +import pytest from gqlalchemy.utilities import ( + NanException, NanValuesHandle, NetworkXCypherConfig, to_cypher_labels, to_cypher_properties, to_cypher_value, - NanException, ) @@ -57,6 +58,18 @@ def test_to_cypher_properties(): assert actual_properties == expected_properties +def test_to_cypher_datetime(): + date = datetime.date(1970, 1, 19) + localtime = datetime.time(12, 12, 12) + localdatetime = datetime.datetime(1999, 12, 12, 12, 12, 12) + duration = datetime.timedelta(days=1, hours=5, minutes=16, seconds=12) + + assert to_cypher_value(date) == "date('1970-01-19')" + assert to_cypher_value(localtime) == "localTime('12:12:12')" + assert to_cypher_value(localdatetime) == "localDateTime('1999-12-12T12:12:12')" + assert to_cypher_value(duration) == "duration('P1DT5H16M12.0S')" + + def test_to_cypher_labels_single_label(): label = "Label" expected_cypher_label = ":Label"