From abe6b5cd6cff3c907b455e6a9c3acb72ff29aee1 Mon Sep 17 00:00:00 2001 From: James Baross Date: Thu, 16 Oct 2025 16:00:46 +0100 Subject: [PATCH 01/13] fix some docstring formatting --- python/python/raphtory/__init__.pyi | 564 ++++++++++++++---- .../python/raphtory/algorithms/__init__.pyi | 168 +++++- python/python/raphtory/filter/__init__.pyi | 91 ++- python/python/raphtory/graph_gen/__init__.pyi | 8 +- .../python/raphtory/graph_loader/__init__.pyi | 16 +- python/python/raphtory/graphql/__init__.pyi | 179 ++++-- python/python/raphtory/iterables/__init__.pyi | 261 +++----- .../python/raphtory/node_state/__init__.pyi | 293 ++++++--- python/python/raphtory/vectors/__init__.pyi | 60 +- raphtory/src/python/packages/vectors.rs | 63 +- 10 files changed, 1185 insertions(+), 518 deletions(-) diff --git a/python/python/raphtory/__init__.pyi b/python/python/raphtory/__init__.pyi index adac628ab2..fd5fd418f9 100644 --- a/python/python/raphtory/__init__.pyi +++ b/python/python/raphtory/__init__.pyi @@ -1,6 +1,7 @@ """ Raphtory graph analytics library """ + from __future__ import annotations ############################################################################### @@ -26,8 +27,42 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['GraphView', 'Graph', 'PersistentGraph', 'Node', 'Nodes', 'PathFromNode', 'PathFromGraph', 'MutableNode', 'Edge', 'Edges', 'NestedEdges', 'MutableEdge', 'Properties', 'PyPropValueList', 'Metadata', 'TemporalProperties', 'PropertiesView', 'TemporalProp', 'WindowSet', 'IndexSpecBuilder', 'IndexSpec', 'version', 'graphql', 'algorithms', 'graph_loader', 'graph_gen', 'vectors', 'node_state', 'filter', 'iterables', 'nullmodels', 'plottingutils'] -class GraphView(object): +__all__ = [ + "GraphView", + "Graph", + "PersistentGraph", + "Node", + "Nodes", + "PathFromNode", + "PathFromGraph", + "MutableNode", + "Edge", + "Edges", + "NestedEdges", + "MutableEdge", + "Properties", + "PyPropValueList", + "Metadata", + "TemporalProperties", + "PropertiesView", + "TemporalProp", + "WindowSet", + "IndexSpecBuilder", + "IndexSpec", + "version", + "graphql", + "algorithms", + "graph_loader", + "graph_gen", + "vectors", + "node_state", + "filter", + "iterables", + "nullmodels", + "plottingutils", +] + +class GraphView(object): """Graph view is a read-only version of a graph at a certain point in time.""" def __eq__(self, value): @@ -237,7 +272,9 @@ class GraphView(object): GraphView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -447,7 +484,12 @@ class GraphView(object): Properties: Properties paired with their names """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -593,7 +635,14 @@ class GraphView(object): GraphView: Returns the subgraph """ - def to_networkx(self, explode_edges: bool = False, include_node_properties: bool = True, include_edge_properties: bool = True, include_update_history: bool = True, include_property_history: bool = True) -> nx.MultiDiGraph: + def to_networkx( + self, + explode_edges: bool = False, + include_node_properties: bool = True, + include_edge_properties: bool = True, + include_update_history: bool = True, + include_property_history: bool = True, + ) -> nx.MultiDiGraph: """ Returns a graph with NetworkX. @@ -612,7 +661,19 @@ class GraphView(object): nx.MultiDiGraph: A Networkx MultiDiGraph. """ - def to_pyvis(self, explode_edges: bool = False, edge_color: str = '#000000', shape: str = 'dot', node_image: Optional[str] = None, edge_weight: Optional[str] = None, edge_label: Optional[str] = None, colour_nodes_by_type: bool = False, directed: bool = True, notebook: bool = False, **kwargs: Any) -> pyvis.network.Network: + def to_pyvis( + self, + explode_edges: bool = False, + edge_color: str = "#000000", + shape: str = "dot", + node_image: Optional[str] = None, + edge_weight: Optional[str] = None, + edge_label: Optional[str] = None, + colour_nodes_by_type: bool = False, + directed: bool = True, + notebook: bool = False, + **kwargs: Any, + ) -> pyvis.network.Network: """ Draw a graph with PyVis. Pyvis is a required dependency. If you intend to use this function make sure that you install Pyvis @@ -673,7 +734,14 @@ class GraphView(object): GraphView: The layered view """ - def vectorise(self, embedding: Callable[[list], list], nodes: bool | str = True, edges: bool | str = True, cache: Optional[str] = None, verbose: bool = False) -> VectorisedGraph: + def vectorise( + self, + embedding: Callable[[list], list], + nodes: bool | str = True, + edges: bool | str = True, + cache: Optional[str] = None, + verbose: bool = False, + ) -> VectorisedGraph: """ Create a VectorisedGraph from the current graph @@ -709,7 +777,7 @@ class GraphView(object): Optional[int]: """ -class Graph(GraphView): +class Graph(GraphView): """ A temporal graph with event semantics. @@ -720,10 +788,16 @@ class Graph(GraphView): def __new__(cls, num_shards: Optional[int] = None) -> Graph: """Create and return a new object. See help(type) for accurate signature.""" - def __reduce__(self): - ... - - def add_edge(self, timestamp: TimeInput, src: str|int, dst: str|int, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableEdge: + def __reduce__(self): ... + def add_edge( + self, + timestamp: TimeInput, + src: str | int, + dst: str | int, + properties: Optional[PropInput] = None, + layer: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> MutableEdge: """ Adds a new edge with the given source and destination nodes and properties to the graph. @@ -756,7 +830,14 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_node(self, timestamp: TimeInput, id: str|int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: + def add_node( + self, + timestamp: TimeInput, + id: str | int, + properties: Optional[PropInput] = None, + node_type: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> MutableNode: """ Adds a new node with the given id and properties to the graph. @@ -774,7 +855,12 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_properties(self, timestamp: TimeInput, properties: PropInput, secondary_index: Optional[int] = None) -> None: + def add_properties( + self, + timestamp: TimeInput, + properties: PropInput, + secondary_index: Optional[int] = None, + ) -> None: """ Adds properties to the graph. @@ -851,7 +937,14 @@ class Graph(GraphView): None: """ - def create_node(self, timestamp: TimeInput, id: str|int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: + def create_node( + self, + timestamp: TimeInput, + id: str | int, + properties: Optional[PropInput] = None, + node_type: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> MutableNode: """ Creates a new node with the given id and properties to the graph. It fails if the node already exists. @@ -881,7 +974,7 @@ class Graph(GraphView): Graph: """ - def edge(self, src: str|int, dst: str|int) -> MutableEdge: + def edge(self, src: str | int, dst: str | int) -> MutableEdge: """ Gets the edge with the specified source and destination nodes @@ -974,7 +1067,9 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_edges_as(self, edges: List[Edge], new_ids: List[Tuple[int, int]], merge: bool = False) -> None: + def import_edges_as( + self, edges: List[Edge], new_ids: List[Tuple[int, int]], merge: bool = False + ) -> None: """ Import multiple edges into the graph with new ids. @@ -1009,7 +1104,9 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_node_as(self, node: Node, new_id: str|int, merge: bool = False) -> MutableNode: + def import_node_as( + self, node: Node, new_id: str | int, merge: bool = False + ) -> MutableNode: """ Import a single node into the graph with new id. @@ -1044,7 +1141,9 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_nodes_as(self, nodes: List[Node], new_ids: List[str|int], merge: bool = False) -> None: + def import_nodes_as( + self, nodes: List[Node], new_ids: List[str | int], merge: bool = False + ) -> None: """ Import multiple nodes into the graph with new ids. @@ -1089,7 +1188,16 @@ class Graph(GraphView): Graph: the loaded graph with initialised cache """ - def load_edge_props_from_pandas(self, df: DataFrame, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edge_props_from_pandas( + self, + df: DataFrame, + src: str, + dst: str, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edge properties from a Pandas DataFrame. @@ -1109,7 +1217,16 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_parquet(self, parquet_path: str, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edge_props_from_parquet( + self, + parquet_path: str, + src: str, + dst: str, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edge properties from parquet file @@ -1129,7 +1246,18 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edges_from_pandas( + self, + df: DataFrame, + time: str, + src: str, + dst: str, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edges from a Pandas DataFrame into the graph. @@ -1151,7 +1279,18 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edges_from_parquet( + self, + parquet_path: str, + time: str, + src: str, + dst: str, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edges from a Parquet file into the graph. @@ -1185,7 +1324,15 @@ class Graph(GraphView): Graph: """ - def load_node_props_from_pandas(self, df: DataFrame, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_node_props_from_pandas( + self, + df: DataFrame, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load node properties from a Pandas DataFrame. @@ -1204,7 +1351,15 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_node_props_from_parquet(self, parquet_path: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_node_props_from_parquet( + self, + parquet_path: str, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load node properties from a parquet file. @@ -1223,7 +1378,17 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_pandas(self, df: DataFrame, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_nodes_from_pandas( + self, + df: DataFrame, + time: str, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load nodes from a Pandas DataFrame into the graph. @@ -1244,7 +1409,17 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_parquet(self, parquet_path: str, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_nodes_from_parquet( + self, + parquet_path: str, + time: str, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load nodes from a Parquet file into the graph. @@ -1265,7 +1440,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def node(self, id: str|int) -> MutableNode: + def node(self, id: str | int) -> MutableNode: """ Gets the node with the specified id @@ -1346,16 +1521,22 @@ class Graph(GraphView): None: """ -class PersistentGraph(GraphView): +class PersistentGraph(GraphView): """A temporal graph that allows edges and nodes to be deleted.""" def __new__(cls) -> PersistentGraph: """Create and return a new object. See help(type) for accurate signature.""" - def __reduce__(self): - ... - - def add_edge(self, timestamp: int, src: str | int, dst: str | int, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> None: + def __reduce__(self): ... + def add_edge( + self, + timestamp: int, + src: str | int, + dst: str | int, + properties: Optional[PropInput] = None, + layer: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> None: """ Adds a new edge with the given source and destination nodes and properties to the graph. @@ -1388,7 +1569,14 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_node(self, timestamp: TimeInput, id: str | int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> None: + def add_node( + self, + timestamp: TimeInput, + id: str | int, + properties: Optional[PropInput] = None, + node_type: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> None: """ Adds a new node with the given id and properties to the graph. @@ -1406,7 +1594,12 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_properties(self, timestamp: TimeInput, properties: dict, secondary_index: Optional[int] = None) -> None: + def add_properties( + self, + timestamp: TimeInput, + properties: dict, + secondary_index: Optional[int] = None, + ) -> None: """ Adds properties to the graph. @@ -1482,7 +1675,14 @@ class PersistentGraph(GraphView): None: """ - def create_node(self, timestamp: TimeInput, id: str | int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: + def create_node( + self, + timestamp: TimeInput, + id: str | int, + properties: Optional[PropInput] = None, + node_type: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> MutableNode: """ Creates a new node with the given id and properties to the graph. It fails if the node already exists. @@ -1500,7 +1700,14 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def delete_edge(self, timestamp: int, src: str | int, dst: str | int, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableEdge: + def delete_edge( + self, + timestamp: int, + src: str | int, + dst: str | int, + layer: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> MutableEdge: """ Deletes an edge given the timestamp, src and dst nodes and layer (optional) @@ -1613,7 +1820,9 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_edges_as(self, edges: List[Edge], new_ids: list[Tuple[GID, GID]], merge: bool = False) -> None: + def import_edges_as( + self, edges: List[Edge], new_ids: list[Tuple[GID, GID]], merge: bool = False + ) -> None: """ Import multiple edges into the graph with new ids. @@ -1650,7 +1859,9 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_node_as(self, node: Node, new_id: str|int, merge: bool = False) -> Node: + def import_node_as( + self, node: Node, new_id: str | int, merge: bool = False + ) -> Node: """ Import a single node into the graph with new id. @@ -1687,7 +1898,9 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_nodes_as(self, nodes: List[Node], new_ids: List[str|int], merge: bool = False) -> None: + def import_nodes_as( + self, nodes: List[Node], new_ids: List[str | int], merge: bool = False + ) -> None: """ Import multiple nodes into the graph with new ids. @@ -1721,7 +1934,15 @@ class PersistentGraph(GraphView): PersistentGraph: the loaded graph with initialised cache """ - def load_edge_deletions_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edge_deletions_from_pandas( + self, + df: DataFrame, + time: str, + src: str, + dst: str, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edges deletions from a Pandas DataFrame into the graph. @@ -1740,7 +1961,15 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_deletions_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edge_deletions_from_parquet( + self, + parquet_path: str, + time: str, + src: str, + dst: str, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edges deletions from a Parquet file into the graph. @@ -1759,7 +1988,16 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_pandas(self, df: DataFrame, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edge_props_from_pandas( + self, + df: DataFrame, + src: str, + dst: str, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edge properties from a Pandas DataFrame. @@ -1779,7 +2017,16 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_parquet(self, parquet_path: str, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edge_props_from_parquet( + self, + parquet_path: str, + src: str, + dst: str, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edge properties from parquet file @@ -1799,7 +2046,18 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edges_from_pandas( + self, + df: DataFrame, + time: str, + src: str, + dst: str, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edges from a Pandas DataFrame into the graph. @@ -1821,7 +2079,18 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: + def load_edges_from_parquet( + self, + parquet_path: str, + time: str, + src: str, + dst: str, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + layer: Optional[str] = None, + layer_col: Optional[str] = None, + ) -> None: """ Load edges from a Parquet file into the graph. @@ -1855,7 +2124,15 @@ class PersistentGraph(GraphView): PersistentGraph: """ - def load_node_props_from_pandas(self, df: DataFrame, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_node_props_from_pandas( + self, + df: DataFrame, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load node properties from a Pandas DataFrame. @@ -1874,7 +2151,15 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_node_props_from_parquet(self, parquet_path: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_node_props_from_parquet( + self, + parquet_path: str, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load node properties from a parquet file. @@ -1893,7 +2178,17 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_pandas(self, df: DataFrame, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_nodes_from_pandas( + self, + df: DataFrame, + time: str, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load nodes from a Pandas DataFrame into the graph. @@ -1914,7 +2209,17 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_parquet(self, parquet_path: str, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: + def load_nodes_from_parquet( + self, + parquet_path: str, + time: str, + id: str, + node_type: Optional[str] = None, + node_type_col: Optional[str] = None, + properties: Optional[List[str]] = None, + metadata: Optional[List[str]] = None, + shared_metadata: Optional[PropInput] = None, + ) -> None: """ Load nodes from a Parquet file into the graph. @@ -2005,7 +2310,7 @@ class PersistentGraph(GraphView): None: """ -class Node(object): +class Node(object): """A node (or node) in the graph.""" def __eq__(self, value): @@ -2180,7 +2485,9 @@ class Node(object): Node: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -2261,7 +2568,7 @@ class Node(object): """ @property - def id(self) -> (str|int): + def id(self) -> str | int: """ Returns the id of the node. This is a unique identifier for the node. @@ -2425,7 +2732,12 @@ class Node(object): Properties: A list of properties. """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -2556,7 +2868,7 @@ class Node(object): Optional[int]: """ -class Nodes(object): +class Nodes(object): """A list of nodes that can be iterated over.""" def __bool__(self): @@ -2745,7 +3057,9 @@ class Nodes(object): Nodes: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -2982,7 +3296,12 @@ class Nodes(object): PropertiesView: A view of the node properties. """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3080,7 +3399,9 @@ class Nodes(object): Optional[datetime]: The earliest datetime that this Nodes is valid or None if the Nodes is valid for all times. """ - def to_df(self, include_property_history: bool = False, convert_datetime: bool = False) -> DataFrame: + def to_df( + self, include_property_history: bool = False, convert_datetime: bool = False + ) -> DataFrame: """ Converts the graph's nodes into a Pandas DataFrame. @@ -3141,8 +3462,7 @@ class Nodes(object): Optional[int]: """ -class PathFromNode(object): - +class PathFromNode(object): def __bool__(self): """True if self else False""" @@ -3299,7 +3619,9 @@ class PathFromNode(object): PathFromNode: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3509,7 +3831,12 @@ class PathFromNode(object): PropertiesView: """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3651,8 +3978,7 @@ class PathFromNode(object): Optional[int]: """ -class PathFromGraph(object): - +class PathFromGraph(object): def __bool__(self): """True if self else False""" @@ -3818,7 +4144,9 @@ class PathFromGraph(object): PathFromGraph: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4053,7 +4381,12 @@ class PathFromGraph(object): NestedPropsIterable: """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -4195,8 +4528,7 @@ class PathFromGraph(object): Optional[int]: """ -class MutableNode(Node): - +class MutableNode(Node): def __repr__(self): """Return repr(self).""" @@ -4213,7 +4545,12 @@ class MutableNode(Node): None: """ - def add_updates(self, t: TimeInput, properties: Optional[PropInput] = None, secondary_index: Optional[int] = None) -> None: + def add_updates( + self, + t: TimeInput, + properties: Optional[PropInput] = None, + secondary_index: Optional[int] = None, + ) -> None: """ Add updates to a node in the graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -4258,7 +4595,7 @@ class MutableNode(Node): None: """ -class Edge(object): +class Edge(object): """ PyEdge is a Python class that represents an edge in the graph. An edge is a directed connection between two nodes. @@ -4445,7 +4782,9 @@ class Edge(object): Edge: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4647,7 +4986,12 @@ class Edge(object): Properties: Properties on the Edge. """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -4796,7 +5140,7 @@ class Edge(object): Optional[int]: """ -class Edges(object): +class Edges(object): """A list of edges that can be iterated over.""" def __bool__(self): @@ -4981,7 +5325,9 @@ class Edges(object): Edges: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -5188,7 +5534,12 @@ class Edges(object): PropertiesView: """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5304,7 +5655,12 @@ class Edges(object): I64Iterable: """ - def to_df(self, include_property_history: bool = True, convert_datetime: bool = False, explode: bool = False) -> DataFrame: + def to_df( + self, + include_property_history: bool = True, + convert_datetime: bool = False, + explode: bool = False, + ) -> DataFrame: """ Converts the graph's edges into a Pandas DataFrame. @@ -5357,8 +5713,7 @@ class Edges(object): Optional[int]: """ -class NestedEdges(object): - +class NestedEdges(object): def __bool__(self): """True if self else False""" @@ -5533,7 +5888,9 @@ class NestedEdges(object): NestedEdges: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -5731,7 +6088,12 @@ class NestedEdges(object): PyNestedPropsIterable: """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5880,8 +6242,7 @@ class NestedEdges(object): Optional[int]: """ -class MutableEdge(Edge): - +class MutableEdge(Edge): def __repr__(self): """Return repr(self).""" @@ -5899,7 +6260,13 @@ class MutableEdge(Edge): None: """ - def add_updates(self, t: TimeInput, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> None: + def add_updates( + self, + t: TimeInput, + properties: Optional[PropInput] = None, + layer: Optional[str] = None, + secondary_index: Optional[int] = None, + ) -> None: """ Add updates to an edge in the graph at a specified time. This function allows for the addition of property updates to an edge within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -5946,7 +6313,7 @@ class MutableEdge(Edge): None: """ -class Properties(object): +class Properties(object): """A view of the properties of an entity""" def __contains__(self, key): @@ -6037,8 +6404,7 @@ class Properties(object): list[PropValue]: """ -class PyPropValueList(object): - +class PyPropValueList(object): def __eq__(self, value): """Return self==value.""" @@ -6074,12 +6440,8 @@ class PyPropValueList(object): PropValue: The average of each property values, or None if count is zero. """ - def collect(self): - ... - - def count(self): - ... - + def collect(self): ... + def count(self): ... def drop_none(self) -> list[PropValue]: """ Drop none. @@ -6128,7 +6490,7 @@ class PyPropValueList(object): PropValue: """ -class Metadata(object): +class Metadata(object): """A view of metadata of an entity""" def __contains__(self, key): @@ -6209,7 +6571,7 @@ class Metadata(object): list[PropValue]: """ -class TemporalProperties(object): +class TemporalProperties(object): """A view of the temporal properties of an entity""" def __contains__(self, key): @@ -6304,8 +6666,7 @@ class TemporalProperties(object): list[TemporalProp]: the list of property views """ -class PropertiesView(object): - +class PropertiesView(object): def __contains__(self, key): """Return bool(key in self).""" @@ -6388,7 +6749,7 @@ class PropertiesView(object): list[list[PropValue]]: """ -class TemporalProp(object): +class TemporalProp(object): """A view of a temporal property""" def __eq__(self, value): @@ -6549,8 +6910,7 @@ class TemporalProp(object): NumpyArray: """ -class WindowSet(object): - +class WindowSet(object): def __iter__(self): """Implement iter(self).""" @@ -6568,8 +6928,7 @@ class WindowSet(object): Iterable: The time index. """ -class IndexSpecBuilder(object): - +class IndexSpecBuilder(object): def __new__(cls, graph) -> IndexSpecBuilder: """Create and return a new object. See help(type) for accurate signature.""" @@ -6673,8 +7032,7 @@ class IndexSpecBuilder(object): dict[str, Any]: """ -class IndexSpec(object): - +class IndexSpec(object): def __repr__(self): """Return repr(self).""" diff --git a/python/python/raphtory/algorithms/__init__.pyi b/python/python/raphtory/algorithms/__init__.pyi index c3005db67e..ae2892f399 100644 --- a/python/python/raphtory/algorithms/__init__.pyi +++ b/python/python/raphtory/algorithms/__init__.pyi @@ -1,6 +1,7 @@ """ Algorithmic functions that can be run on Raphtory graphs """ + from __future__ import annotations ############################################################################### @@ -26,8 +27,59 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['dijkstra_single_source_shortest_paths', 'global_reciprocity', 'betweenness_centrality', 'all_local_reciprocity', 'triplet_count', 'local_triangle_count', 'average_degree', 'directed_graph_density', 'degree_centrality', 'max_degree', 'min_degree', 'max_out_degree', 'max_in_degree', 'min_out_degree', 'min_in_degree', 'pagerank', 'single_source_shortest_path', 'global_clustering_coefficient', 'temporally_reachable_nodes', 'temporal_bipartite_graph_projection', 'local_clustering_coefficient', 'local_clustering_coefficient_batch', 'weakly_connected_components', 'strongly_connected_components', 'in_components', 'in_component', 'out_components', 'out_component', 'fast_rp', 'global_temporal_three_node_motif', 'global_temporal_three_node_motif_multi', 'local_temporal_three_node_motifs', 'hits', 'balance', 'label_propagation', 'k_core', 'temporal_SEIR', 'louvain', 'fruchterman_reingold', 'cohesive_fruchterman_reingold', 'max_weight_matching', 'Matching', 'Infected'] -def dijkstra_single_source_shortest_paths(graph: GraphView, source: NodeInput, targets: list[NodeInput], direction: Direction = "both", weight: str = 'weight') -> NodeStateWeightedSP: +__all__ = [ + "dijkstra_single_source_shortest_paths", + "global_reciprocity", + "betweenness_centrality", + "all_local_reciprocity", + "triplet_count", + "local_triangle_count", + "average_degree", + "directed_graph_density", + "degree_centrality", + "max_degree", + "min_degree", + "max_out_degree", + "max_in_degree", + "min_out_degree", + "min_in_degree", + "pagerank", + "single_source_shortest_path", + "global_clustering_coefficient", + "temporally_reachable_nodes", + "temporal_bipartite_graph_projection", + "local_clustering_coefficient", + "local_clustering_coefficient_batch", + "weakly_connected_components", + "strongly_connected_components", + "in_components", + "in_component", + "out_components", + "out_component", + "fast_rp", + "global_temporal_three_node_motif", + "global_temporal_three_node_motif_multi", + "local_temporal_three_node_motifs", + "hits", + "balance", + "label_propagation", + "k_core", + "temporal_SEIR", + "louvain", + "fruchterman_reingold", + "cohesive_fruchterman_reingold", + "max_weight_matching", + "Matching", + "Infected", +] + +def dijkstra_single_source_shortest_paths( + graph: GraphView, + source: NodeInput, + targets: list[NodeInput], + direction: Direction = "both", + weight: str = "weight", +) -> NodeStateWeightedSP: """ Finds the shortest paths from a single source to multiple targets in a graph. @@ -57,7 +109,9 @@ def global_reciprocity(graph: GraphView) -> float: float: reciprocity of the graph between 0 and 1. """ -def betweenness_centrality(graph: GraphView, k: Optional[int] = None, normalized: bool = True) -> NodeStateF64: +def betweenness_centrality( + graph: GraphView, k: Optional[int] = None, normalized: bool = True +) -> NodeStateF64: """ Computes the betweenness centrality for nodes in a given graph. @@ -225,7 +279,13 @@ def min_in_degree(graph: GraphView) -> int: int: value of the smallest indegree """ -def pagerank(graph: GraphView, iter_count: int = 20, max_diff: Optional[float] = None, use_l2_norm: bool = True, damping_factor: float = 0.85) -> NodeStateF64: +def pagerank( + graph: GraphView, + iter_count: int = 20, + max_diff: Optional[float] = None, + use_l2_norm: bool = True, + damping_factor: float = 0.85, +) -> NodeStateF64: """ Pagerank -- pagerank centrality value of the nodes in a graph @@ -246,7 +306,9 @@ def pagerank(graph: GraphView, iter_count: int = 20, max_diff: Optional[float] = NodeStateF64: Mapping of nodes to their pagerank value. """ -def single_source_shortest_path(graph: GraphView, source: NodeInput, cutoff: Optional[int] = None) -> NodeStateNodes: +def single_source_shortest_path( + graph: GraphView, source: NodeInput, cutoff: Optional[int] = None +) -> NodeStateNodes: """ Calculates the single source shortest paths from a given source node. @@ -277,7 +339,13 @@ def global_clustering_coefficient(graph: GraphView) -> float: [`Triplet Count`](triplet_count) """ -def temporally_reachable_nodes(graph: GraphView, max_hops: int, start_time: int, seed_nodes: list[NodeInput], stop_nodes: Optional[list[NodeInput]] = None) -> NodeStateReachability: +def temporally_reachable_nodes( + graph: GraphView, + max_hops: int, + start_time: int, + seed_nodes: list[NodeInput], + stop_nodes: Optional[list[NodeInput]] = None, +) -> NodeStateReachability: """ Temporally reachable nodes -- the nodes that are reachable by a time respecting path followed out from a set of seed nodes at a starting time. @@ -296,7 +364,9 @@ def temporally_reachable_nodes(graph: GraphView, max_hops: int, start_time: int, NodeStateReachability: Mapping of nodes to their reachability history. """ -def temporal_bipartite_graph_projection(graph: GraphView, delta: int, pivot_type: str) -> Graph: +def temporal_bipartite_graph_projection( + graph: GraphView, delta: int, pivot_type: str +) -> Graph: """ Projects a temporal bipartite graph into an undirected temporal graph over the pivot node type. Let `G` be a bipartite graph with node types `A` and `B`. Given `delta > 0`, the projection graph `G'` pivoting over type `B` nodes, will make a connection between nodes `n1` and `n2` (of type `A`) at time `(t1 + t2)/2` if they respectively have an edge at time `t1`, `t2` with the same node of type `B` in `G`, and `|t2-t1| < delta`. @@ -409,7 +479,14 @@ def out_component(node: Node) -> NodeStateUsize: NodeStateUsize: A NodeState mapping the nodes in the out-component to their distance from the starting node. """ -def fast_rp(graph: GraphView, embedding_dim: int, normalization_strength: float, iter_weights: list[float], seed: Optional[int] = None, threads: Optional[int] = None) -> NodeStateListF64: +def fast_rp( + graph: GraphView, + embedding_dim: int, + normalization_strength: float, + iter_weights: list[float], + seed: Optional[int] = None, + threads: Optional[int] = None, +) -> NodeStateListF64: """ Computes embedding vectors for each vertex of an undirected/bidirectional graph according to the Fast RP algorithm. Original Paper: https://doi.org/10.48550/arXiv.1908.11512 @@ -425,7 +502,9 @@ def fast_rp(graph: GraphView, embedding_dim: int, normalization_strength: float, NodeStateListF64: Mapping from nodes to embedding vectors. """ -def global_temporal_three_node_motif(graph: GraphView, delta: int, threads: Optional[int] = None) -> list[int]: +def global_temporal_three_node_motif( + graph: GraphView, delta: int, threads: Optional[int] = None +) -> list[int]: """ Computes the number of three edge, up-to-three node delta-temporal motifs in the graph, using the algorithm of Paranjape et al, Motifs in Temporal Networks (2017). We point the reader to this reference for more information on the algorithm and background, but provide a short summary below. @@ -474,7 +553,9 @@ def global_temporal_three_node_motif(graph: GraphView, delta: int, threads: Opti """ -def global_temporal_three_node_motif_multi(graph: GraphView, deltas: list[int], threads: Optional[int] = None) -> list[list[int]]: +def global_temporal_three_node_motif_multi( + graph: GraphView, deltas: list[int], threads: Optional[int] = None +) -> list[list[int]]: """ Computes the global counts of three-edge up-to-three node temporal motifs for a range of timescales. See `global_temporal_three_node_motif` for an interpretation of each row returned. @@ -487,7 +568,9 @@ def global_temporal_three_node_motif_multi(graph: GraphView, deltas: list[int], list[list[int]]: A list of 40d arrays, each array is the motif count for a particular value of delta, returned in the order that the deltas were given as input. """ -def local_temporal_three_node_motifs(graph: GraphView, delta: int, threads=None) -> NodeStateMotifs: +def local_temporal_three_node_motifs( + graph: GraphView, delta: int, threads=None +) -> NodeStateMotifs: """ Computes the number of each type of motif that each node participates in. See global_temporal_three_node_motifs for a summary of the motifs involved. @@ -503,7 +586,9 @@ def local_temporal_three_node_motifs(graph: GraphView, delta: int, threads=None) the motif. For two node motifs, both constituent nodes count the motif. For triangles, all three constituent nodes count the motif. """ -def hits(graph: GraphView, iter_count: int = 20, threads: Optional[int] = None) -> NodeStateHits: +def hits( + graph: GraphView, iter_count: int = 20, threads: Optional[int] = None +) -> NodeStateHits: """ HITS (Hubs and Authority) Algorithm: @@ -522,7 +607,9 @@ def hits(graph: GraphView, iter_count: int = 20, threads: Optional[int] = None) NodeStateHits: A mapping from nodes their hub and authority scores """ -def balance(graph: GraphView, name: str = "weight", direction: Direction = "both") -> NodeStateF64: +def balance( + graph: GraphView, name: str = "weight", direction: Direction = "both" +) -> NodeStateF64: """ Sums the weights of edges in the graph based on the specified direction. @@ -541,7 +628,9 @@ def balance(graph: GraphView, name: str = "weight", direction: Direction = "both """ -def label_propagation(graph: GraphView, seed: Optional[bytes] = None) -> list[set[Node]]: +def label_propagation( + graph: GraphView, seed: Optional[bytes] = None +) -> list[set[Node]]: """ Computes components using a label propagation algorithm @@ -554,7 +643,9 @@ def label_propagation(graph: GraphView, seed: Optional[bytes] = None) -> list[se """ -def k_core(graph: GraphView, k: int, iter_count: int, threads: Optional[int] = None) -> list[Node]: +def k_core( + graph: GraphView, k: int, iter_count: int, threads: Optional[int] = None +) -> list[Node]: """ Determines which nodes are in the k-core for a given value of k @@ -569,7 +660,15 @@ def k_core(graph: GraphView, k: int, iter_count: int, threads: Optional[int] = N """ -def temporal_SEIR(graph: GraphView, seeds: int | float | list[NodeInput], infection_prob: float, initial_infection: int | str | datetime, recovery_rate: float | None = None, incubation_rate: float | None = None, rng_seed: int | None = None) -> NodeStateSEIR: +def temporal_SEIR( + graph: GraphView, + seeds: int | float | list[NodeInput], + infection_prob: float, + initial_infection: int | str | datetime, + recovery_rate: float | None = None, + incubation_rate: float | None = None, + rng_seed: int | None = None, +) -> NodeStateSEIR: """ Simulate an SEIR dynamic on the network @@ -599,7 +698,12 @@ def temporal_SEIR(graph: GraphView, seeds: int | float | list[NodeInput], infect """ -def louvain(graph: GraphView, resolution: float = 1.0, weight_prop: str | None = None, tol: None | float = None) -> NodeStateUsize: +def louvain( + graph: GraphView, + resolution: float = 1.0, + weight_prop: str | None = None, + tol: None | float = None, +) -> NodeStateUsize: """ Louvain algorithm for community detection @@ -613,7 +717,14 @@ def louvain(graph: GraphView, resolution: float = 1.0, weight_prop: str | None = NodeStateUsize: Mapping of nodes to their community assignment """ -def fruchterman_reingold(graph: GraphView, iterations: int | None = 100, scale: float | None = 1.0, node_start_size: float | None = 1.0, cooloff_factor: float | None = 0.95, dt: float | None = 0.1) -> NodeLayout: +def fruchterman_reingold( + graph: GraphView, + iterations: int | None = 100, + scale: float | None = 1.0, + node_start_size: float | None = 1.0, + cooloff_factor: float | None = 0.95, + dt: float | None = 0.1, +) -> NodeLayout: """ Fruchterman Reingold layout algorithm @@ -629,7 +740,14 @@ def fruchterman_reingold(graph: GraphView, iterations: int | None = 100, scale: NodeLayout: A mapping from nodes to their [x, y] positions """ -def cohesive_fruchterman_reingold(graph: GraphView, iter_count: int = 100, scale: float = 1.0, node_start_size: float = 1.0, cooloff_factor: float = 0.95, dt: float = 0.1) -> NodeLayout: +def cohesive_fruchterman_reingold( + graph: GraphView, + iter_count: int = 100, + scale: float = 1.0, + node_start_size: float = 1.0, + cooloff_factor: float = 0.95, + dt: float = 0.1, +) -> NodeLayout: """ Cohesive version of `fruchterman_reingold` that adds virtual edges between isolated nodes Arguments: @@ -645,7 +763,12 @@ def cohesive_fruchterman_reingold(graph: GraphView, iter_count: int = 100, scale """ -def max_weight_matching(graph: GraphView, weight_prop: Optional[str] = None, max_cardinality: bool = True, verify_optimum_flag: bool = False) -> Matching: +def max_weight_matching( + graph: GraphView, + weight_prop: Optional[str] = None, + max_cardinality: bool = True, + verify_optimum_flag: bool = False, +) -> Matching: """ Compute a maximum-weighted matching in the general undirected weighted graph given by "edges". If `max_cardinality` is true, only @@ -682,7 +805,7 @@ def max_weight_matching(graph: GraphView, weight_prop: Optional[str] = None, max Matching: The matching """ -class Matching(object): +class Matching(object): """A Matching (i.e., a set of edges that do not share any nodes)""" def __bool__(self): @@ -754,8 +877,7 @@ class Matching(object): """ -class Infected(object): - +class Infected(object): def __repr__(self): """Return repr(self).""" diff --git a/python/python/raphtory/filter/__init__.pyi b/python/python/raphtory/filter/__init__.pyi index 023a0325ff..aae82a1c26 100644 --- a/python/python/raphtory/filter/__init__.pyi +++ b/python/python/raphtory/filter/__init__.pyi @@ -23,9 +23,20 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['FilterExpr', 'PropertyFilterOps', 'NodeFilterBuilder', 'Node', 'EdgeFilterOp', 'EdgeEndpoint', 'Edge', 'Property', 'Metadata', 'TemporalPropertyFilterBuilder'] -class FilterExpr(object): - +__all__ = [ + "FilterExpr", + "PropertyFilterOps", + "NodeFilterBuilder", + "Node", + "EdgeFilterOp", + "EdgeEndpoint", + "Edge", + "Property", + "Metadata", + "TemporalPropertyFilterBuilder", +] + +class FilterExpr(object): def __and__(self, value): """Return self&value.""" @@ -41,8 +52,7 @@ class FilterExpr(object): def __ror__(self, value): """Return value|self.""" -class PropertyFilterOps(object): - +class PropertyFilterOps(object): def __eq__(self, value): """Return self==value.""" @@ -64,7 +74,7 @@ class PropertyFilterOps(object): def contains(self, value): """ Returns a filter expression that checks if this object contains a specified property. - + Arguments: PropValue: @@ -72,7 +82,9 @@ class PropertyFilterOps(object): FilterExpr: """ - def fuzzy_search(self, prop_value: str, levenshtein_distance: Any, prefix_match: bool): + def fuzzy_search( + self, prop_value: str, levenshtein_distance: Any, prefix_match: bool + ): """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -82,7 +94,7 @@ class PropertyFilterOps(object): prop_value (str): levenshtein_distance (usize): prefix_match (bool): - + Returns: FilterExpr: """ @@ -90,7 +102,7 @@ class PropertyFilterOps(object): def is_in(self, values): """ Returns a filter expression that checks if a given value is in a specified iterable of properties. - + Arguments: list[PropValue]: @@ -101,7 +113,7 @@ class PropertyFilterOps(object): def is_none(self): """ Returns a filter expression that checks if a given value is none. - + Returns: FilterExpr: """ @@ -109,7 +121,7 @@ class PropertyFilterOps(object): def is_not_in(self, values): """ Returns a filter expression that checks if a given value is not in a specified iterable of properties. - + Arguments: list[PropValue]: @@ -120,7 +132,7 @@ class PropertyFilterOps(object): def is_some(self): """ Returns a filter expression that checks if a given value is some. - + Returns: FilterExpr: """ @@ -128,7 +140,7 @@ class PropertyFilterOps(object): def not_contains(self, value): """ Returns a filter expression that checks if this object does not contain a specified property. - + Arguments: PropValue: @@ -136,7 +148,7 @@ class PropertyFilterOps(object): FilterExpr: """ -class NodeFilterBuilder(object): +class NodeFilterBuilder(object): """ A builder for constructing node filters @@ -213,7 +225,7 @@ class NodeFilterBuilder(object): """ Returns a filter expression that checks if the specified iterable of strings does not contain a given value. - + Arguments: str: @@ -221,8 +233,7 @@ class NodeFilterBuilder(object): filter expression """ -class Node(object): - +class Node(object): @staticmethod def name(): """ @@ -241,8 +252,7 @@ class Node(object): NodeFilterBuilder: A filter builder for filtering by node type """ -class EdgeFilterOp(object): - +class EdgeFilterOp(object): def __eq__(self, value): """Return self==value.""" @@ -264,7 +274,7 @@ class EdgeFilterOp(object): def contains(self, value: str): """ Returns a filter expression that checks if a given value contains the specified string. - + Arguments: value (str): @@ -282,7 +292,7 @@ class EdgeFilterOp(object): prop_value (str): levenshtein_distance (usize): prefix_match (bool): - + Returns: FilterExpr: """ @@ -290,7 +300,7 @@ class EdgeFilterOp(object): def is_in(self, values: list[str]): """ Returns a filter expression that checks if a given value is contained within the specified iterable of strings. - + Arguments: values (list[str]): @@ -301,7 +311,7 @@ class EdgeFilterOp(object): def is_not_in(self, values: list[str]): """ Returns a filter expression that checks if a given value is not contained within the provided iterable of strings. - + Arguments: values (list[str]): @@ -312,7 +322,7 @@ class EdgeFilterOp(object): def not_contains(self, value: str): """ Returns a filter expression that checks if a given value does not contain the specified string. - + Arguments: value (str): @@ -320,22 +330,16 @@ class EdgeFilterOp(object): filter expression """ -class EdgeEndpoint(object): - - def name(self): - ... - -class Edge(object): +class EdgeEndpoint(object): + def name(self): ... +class Edge(object): @staticmethod - def dst(): - ... - + def dst(): ... @staticmethod - def src(): - ... + def src(): ... -class Property(PropertyFilterOps): +class Property(PropertyFilterOps): """ Construct a property filter @@ -346,10 +350,9 @@ class Property(PropertyFilterOps): def __new__(cls, name: str) -> Property: """Create and return a new object. See help(type) for accurate signature.""" - def temporal(self): - ... + def temporal(self): ... -class Metadata(PropertyFilterOps): +class Metadata(PropertyFilterOps): """ Construct a metadata filter @@ -360,10 +363,6 @@ class Metadata(PropertyFilterOps): def __new__(cls, name: str) -> Metadata: """Create and return a new object. See help(type) for accurate signature.""" -class TemporalPropertyFilterBuilder(object): - - def any(self): - ... - - def latest(self): - ... +class TemporalPropertyFilterBuilder(object): + def any(self): ... + def latest(self): ... diff --git a/python/python/raphtory/graph_gen/__init__.pyi b/python/python/raphtory/graph_gen/__init__.pyi index 3ec394b85c..3a9f849f05 100644 --- a/python/python/raphtory/graph_gen/__init__.pyi +++ b/python/python/raphtory/graph_gen/__init__.pyi @@ -1,6 +1,7 @@ """ Generate Raphtory graphs from attachment models """ + from __future__ import annotations ############################################################################### @@ -27,7 +28,8 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['random_attachment', 'ba_preferential_attachment'] +__all__ = ["random_attachment", "ba_preferential_attachment"] + def random_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None): """ Generates a graph using the random attachment model @@ -46,7 +48,9 @@ def random_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any None """ -def ba_preferential_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None): +def ba_preferential_attachment( + g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None +): """ Generates a graph using the preferential attachment model. diff --git a/python/python/raphtory/graph_loader/__init__.pyi b/python/python/raphtory/graph_loader/__init__.pyi index 10ba033c37..e0b31f720f 100644 --- a/python/python/raphtory/graph_loader/__init__.pyi +++ b/python/python/raphtory/graph_loader/__init__.pyi @@ -1,6 +1,7 @@ """ Load and save Raphtory graphs from/to file(s) """ + from __future__ import annotations ############################################################################### @@ -27,7 +28,16 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['lotr_graph', 'lotr_graph_with_props', 'neo4j_movie_graph', 'stable_coin_graph', 'reddit_hyperlink_graph', 'reddit_hyperlink_graph_local', 'karate_club_graph'] +__all__ = [ + "lotr_graph", + "lotr_graph_with_props", + "neo4j_movie_graph", + "stable_coin_graph", + "reddit_hyperlink_graph", + "reddit_hyperlink_graph_local", + "karate_club_graph", +] + def lotr_graph() -> Graph: """ Load the Lord of the Rings dataset into a graph. @@ -56,7 +66,9 @@ def lotr_graph_with_props() -> Graph: Graph: """ -def neo4j_movie_graph(uri: str, username: str, password: str, database: str = ...) -> Graph: +def neo4j_movie_graph( + uri: str, username: str, password: str, database: str = ... +) -> Graph: """ Returns the neo4j movie graph example. diff --git a/python/python/raphtory/graphql/__init__.pyi b/python/python/raphtory/graphql/__init__.pyi index b8315a8395..4cd4d5c51d 100644 --- a/python/python/raphtory/graphql/__init__.pyi +++ b/python/python/raphtory/graphql/__init__.pyi @@ -23,8 +23,26 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['GraphServer', 'RunningGraphServer', 'RaphtoryClient', 'RemoteGraph', 'RemoteEdge', 'RemoteNode', 'RemoteNodeAddition', 'RemoteUpdate', 'RemoteEdgeAddition', 'RemoteIndexSpec', 'PropsInput', 'SomePropertySpec', 'AllPropertySpec', 'encode_graph', 'decode_graph', 'schema'] -class GraphServer(object): +__all__ = [ + "GraphServer", + "RunningGraphServer", + "RaphtoryClient", + "RemoteGraph", + "RemoteEdge", + "RemoteNode", + "RemoteNodeAddition", + "RemoteUpdate", + "RemoteEdgeAddition", + "RemoteIndexSpec", + "PropsInput", + "SomePropertySpec", + "AllPropertySpec", + "encode_graph", + "decode_graph", + "schema", +] + +class GraphServer(object): """ A class for defining and running a Raphtory GraphQL server @@ -43,7 +61,21 @@ class GraphServer(object): create_index: """ - def __new__(cls, work_dir: str | PathLike, cache_capacity: Optional[int] = None, cache_tti_seconds: Optional[int] = None, log_level: Optional[str] = None, tracing: Optional[bool] = None, otlp_agent_host: Optional[str] = None, otlp_agent_port: Optional[str] = None, otlp_tracing_service_name: Optional[str] = None, auth_public_key: Any = None, auth_enabled_for_reads: Any = None, config_path: Optional[str | PathLike] = None, create_index: Any = None) -> GraphServer: + def __new__( + cls, + work_dir: str | PathLike, + cache_capacity: Optional[int] = None, + cache_tti_seconds: Optional[int] = None, + log_level: Optional[str] = None, + tracing: Optional[bool] = None, + otlp_agent_host: Optional[str] = None, + otlp_agent_port: Optional[str] = None, + otlp_tracing_service_name: Optional[str] = None, + auth_public_key: Any = None, + auth_enabled_for_reads: Any = None, + config_path: Optional[str | PathLike] = None, + create_index: Any = None, + ) -> GraphServer: """Create and return a new object. See help(type) for accurate signature.""" def run(self, port: int = 1736, timeout_ms: int = 180000) -> None: @@ -58,7 +90,13 @@ class GraphServer(object): None: """ - def set_embeddings(self, cache: str, embedding: Optional[Callable] = None, nodes: bool | str = True, edges: bool | str = True) -> GraphServer: + def set_embeddings( + self, + cache: str, + embedding: Optional[Callable] = None, + nodes: bool | str = True, + edges: bool | str = True, + ) -> GraphServer: """ Setup the server to vectorise graphs with a default template. @@ -94,7 +132,9 @@ class GraphServer(object): GraphServer: The server with indexing disabled """ - def with_vectorised_graphs(self, graph_names: list[str], nodes: bool | str = True, edges: bool | str = True) -> GraphServer: + def with_vectorised_graphs( + self, graph_names: list[str], nodes: bool | str = True, edges: bool | str = True + ) -> GraphServer: """ Vectorise a subset of the graphs of the server. @@ -107,15 +147,11 @@ class GraphServer(object): GraphServer: A new server object containing the vectorised graphs. """ -class RunningGraphServer(object): +class RunningGraphServer(object): """A Raphtory server handler that also enables querying the server""" - def __enter__(self): - ... - - def __exit__(self, _exc_type, _exc_val, _exc_tb): - ... - + def __enter__(self): ... + def __exit__(self, _exc_type, _exc_val, _exc_tb): ... def get_client(self): """ Get the client for the server @@ -132,7 +168,7 @@ class RunningGraphServer(object): None: """ -class RaphtoryClient(object): +class RaphtoryClient(object): """ A client for handling GraphQL operations in the context of Raphtory. @@ -214,7 +250,9 @@ class RaphtoryClient(object): """ - def query(self, query: str, variables: Optional[dict[str, Any]] = None) -> dict[str, Any]: + def query( + self, query: str, variables: Optional[dict[str, Any]] = None + ) -> dict[str, Any]: """ Make a GraphQL query against the server. @@ -252,7 +290,9 @@ class RaphtoryClient(object): """ - def send_graph(self, path: str, graph: Graph | PersistentGraph, overwrite: bool = False) -> dict[str, Any]: + def send_graph( + self, path: str, graph: Graph | PersistentGraph, overwrite: bool = False + ) -> dict[str, Any]: """ Send a graph to the server @@ -265,7 +305,9 @@ class RaphtoryClient(object): dict[str, Any]: The data field from the graphQL response after executing the mutation. """ - def upload_graph(self, path: str, file_path: str, overwrite: bool = False) -> dict[str, Any]: + def upload_graph( + self, path: str, file_path: str, overwrite: bool = False + ) -> dict[str, Any]: """ Upload graph file from a path file_path on the client @@ -278,9 +320,15 @@ class RaphtoryClient(object): dict[str, Any]: The data field from the graphQL response after executing the mutation. """ -class RemoteGraph(object): - - def add_edge(self, timestamp: int | str | datetime, src: str | int, dst: str | int, properties: Optional[dict] = None, layer: Optional[str] = None) -> RemoteEdge: +class RemoteGraph(object): + def add_edge( + self, + timestamp: int | str | datetime, + src: str | int, + dst: str | int, + properties: Optional[dict] = None, + layer: Optional[str] = None, + ) -> RemoteEdge: """ Adds a new edge with the given source and destination nodes and properties to the remote graph. @@ -317,7 +365,13 @@ class RemoteGraph(object): None: """ - def add_node(self, timestamp: int | str | datetime, id: str | int, properties: Optional[dict] = None, node_type: Optional[str] = None) -> RemoteNode: + def add_node( + self, + timestamp: int | str | datetime, + id: str | int, + properties: Optional[dict] = None, + node_type: Optional[str] = None, + ) -> RemoteNode: """ Adds a new node with the given id and properties to the remote graph. @@ -354,7 +408,13 @@ class RemoteGraph(object): None: """ - def create_node(self, timestamp: int | str | datetime, id: str | int, properties: Optional[dict] = None, node_type: Optional[str] = None) -> RemoteNode: + def create_node( + self, + timestamp: int | str | datetime, + id: str | int, + properties: Optional[dict] = None, + node_type: Optional[str] = None, + ) -> RemoteNode: """ Create a new node with the given id and properties to the remote graph and fail if the node already exists. @@ -368,7 +428,13 @@ class RemoteGraph(object): RemoteNode: the new remote node """ - def delete_edge(self, timestamp: int, src: str | int, dst: str | int, layer: Optional[str] = None) -> RemoteEdge: + def delete_edge( + self, + timestamp: int, + src: str | int, + dst: str | int, + layer: Optional[str] = None, + ) -> RemoteEdge: """ Deletes an edge in the remote graph, given the timestamp, src and dst nodes and layer (optional) @@ -416,7 +482,7 @@ class RemoteGraph(object): None: """ -class RemoteEdge(object): +class RemoteEdge(object): """ A remote edge reference @@ -425,7 +491,9 @@ class RemoteEdge(object): and [RemoteGraph.delete_edge][raphtory.graphql.RemoteGraph.delete_edge]. """ - def add_metadata(self, properties: dict[str, PropValue], layer: Optional[str] = None) -> None: + def add_metadata( + self, properties: dict[str, PropValue], layer: Optional[str] = None + ) -> None: """ Add metadata to the edge within the remote graph. This function is used to add metadata to an edge that does not @@ -439,7 +507,12 @@ class RemoteEdge(object): None: """ - def add_updates(self, t: int | str | datetime, properties: Optional[dict[str, PropValue]] = None, layer: Optional[str] = None) -> None: + def add_updates( + self, + t: int | str | datetime, + properties: Optional[dict[str, PropValue]] = None, + layer: Optional[str] = None, + ) -> None: """ Add updates to an edge in the remote graph at a specified time. @@ -470,7 +543,9 @@ class RemoteEdge(object): GraphError: If the operation fails. """ - def update_metadata(self, properties: dict[str, PropValue], layer: Optional[str] = None) -> None: + def update_metadata( + self, properties: dict[str, PropValue], layer: Optional[str] = None + ) -> None: """ Update metadata of an edge in the remote graph overwriting existing values. This function is used to add properties to an edge that does not @@ -484,8 +559,7 @@ class RemoteEdge(object): None: """ -class RemoteNode(object): - +class RemoteNode(object): def add_metadata(self, properties: dict[str, PropValue]) -> None: """ Add metadata to a node in the remote graph. @@ -499,7 +573,9 @@ class RemoteNode(object): None: """ - def add_updates(self, t: int | str | datetime, properties: Optional[dict[str, PropValue]] = None) -> None: + def add_updates( + self, t: int | str | datetime, properties: Optional[dict[str, PropValue]] = None + ) -> None: """ Add updates to a node in the remote graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -537,7 +613,7 @@ class RemoteNode(object): None: """ -class RemoteNodeAddition(object): +class RemoteNodeAddition(object): """ Node addition update @@ -548,10 +624,16 @@ class RemoteNodeAddition(object): updates (list[RemoteUpdate], optional): the temporal updates """ - def __new__(cls, name: GID, node_type: Optional[str] = None, metadata: Optional[PropInput] = None, updates: Optional[list[RemoteUpdate]] = None) -> RemoteNodeAddition: + def __new__( + cls, + name: GID, + node_type: Optional[str] = None, + metadata: Optional[PropInput] = None, + updates: Optional[list[RemoteUpdate]] = None, + ) -> RemoteNodeAddition: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteUpdate(object): +class RemoteUpdate(object): """ A temporal update @@ -560,10 +642,12 @@ class RemoteUpdate(object): properties (PropInput, optional): the properties for the update """ - def __new__(cls, time: TimeInput, properties: Optional[PropInput] = None) -> RemoteUpdate: + def __new__( + cls, time: TimeInput, properties: Optional[PropInput] = None + ) -> RemoteUpdate: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteEdgeAddition(object): +class RemoteEdgeAddition(object): """ An edge update @@ -575,10 +659,17 @@ class RemoteEdgeAddition(object): updates (list[RemoteUpdate], optional): the temporal updates for the edge """ - def __new__(cls, src: GID, dst: GID, layer: Optional[str] = None, metadata: Optional[PropInput] = None, updates: Optional[list[RemoteUpdate]] = None) -> RemoteEdgeAddition: + def __new__( + cls, + src: GID, + dst: GID, + layer: Optional[str] = None, + metadata: Optional[PropInput] = None, + updates: Optional[list[RemoteUpdate]] = None, + ) -> RemoteEdgeAddition: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteIndexSpec(object): +class RemoteIndexSpec(object): """ Create a RemoteIndexSpec specifying which node and edge properties to index. @@ -590,7 +681,7 @@ class RemoteIndexSpec(object): def __new__(cls, node_props: PropsInput, edge_props: PropsInput) -> RemoteIndexSpec: """Create and return a new object. See help(type) for accurate signature.""" -class PropsInput(object): +class PropsInput(object): """ Create a PropsInput by choosing to include all/some properties explicitly. @@ -602,10 +693,14 @@ class PropsInput(object): ValueError: If neither all and some are specified. """ - def __new__(cls, all: Optional[AllPropertySpec] = None, some: Optional[SomePropertySpec] = None) -> PropsInput: + def __new__( + cls, + all: Optional[AllPropertySpec] = None, + some: Optional[SomePropertySpec] = None, + ) -> PropsInput: """Create and return a new object. See help(type) for accurate signature.""" -class SomePropertySpec(object): +class SomePropertySpec(object): """ Create a SomePropertySpec by explicitly listing metadata and/or temporal property names. @@ -614,10 +709,12 @@ class SomePropertySpec(object): properties (list[str]): Temporal property names. Defaults to []. """ - def __new__(cls, metadata: list[str] = [], properties: list[str] = []) -> SomePropertySpec: + def __new__( + cls, metadata: list[str] = [], properties: list[str] = [] + ) -> SomePropertySpec: """Create and return a new object. See help(type) for accurate signature.""" -class AllPropertySpec(object): +class AllPropertySpec(object): """ Specifies that **all** properties should be included when creating an index. Use one of the predefined variants: ALL , ALL_METADATA , or ALL_TEMPORAL . diff --git a/python/python/raphtory/iterables/__init__.pyi b/python/python/raphtory/iterables/__init__.pyi index ec2c4d6ee9..2a80bbc5cb 100644 --- a/python/python/raphtory/iterables/__init__.pyi +++ b/python/python/raphtory/iterables/__init__.pyi @@ -23,9 +23,33 @@ from os import PathLike import networkx as nx # type: ignore import pyvis # type: ignore -__all__ = ['NestedUtcDateTimeIterable', 'NestedGIDIterable', 'GIDIterable', 'StringIterable', 'OptionArcStringIterable', 'UsizeIterable', 'OptionI64Iterable', 'NestedOptionArcStringIterable', 'NestedStringIterable', 'NestedOptionI64Iterable', 'NestedI64VecIterable', 'NestedUsizeIterable', 'BoolIterable', 'ArcStringIterable', 'NestedVecUtcDateTimeIterable', 'OptionVecUtcDateTimeIterable', 'GIDGIDIterable', 'NestedGIDGIDIterable', 'NestedBoolIterable', 'U64Iterable', 'OptionUtcDateTimeIterable', 'ArcStringVecIterable', 'NestedArcStringVecIterable'] -class NestedUtcDateTimeIterable(object): - +__all__ = [ + "NestedUtcDateTimeIterable", + "NestedGIDIterable", + "GIDIterable", + "StringIterable", + "OptionArcStringIterable", + "UsizeIterable", + "OptionI64Iterable", + "NestedOptionArcStringIterable", + "NestedStringIterable", + "NestedOptionI64Iterable", + "NestedI64VecIterable", + "NestedUsizeIterable", + "BoolIterable", + "ArcStringIterable", + "NestedVecUtcDateTimeIterable", + "OptionVecUtcDateTimeIterable", + "GIDGIDIterable", + "NestedGIDGIDIterable", + "NestedBoolIterable", + "U64Iterable", + "OptionUtcDateTimeIterable", + "ArcStringVecIterable", + "NestedArcStringVecIterable", +] + +class NestedUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -53,11 +77,9 @@ class NestedUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class NestedGIDIterable(object): + def collect(self): ... +class NestedGIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -85,17 +107,11 @@ class NestedGIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def min(self): - ... - -class GIDIterable(object): + def collect(self): ... + def max(self): ... + def min(self): ... +class GIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -123,17 +139,11 @@ class GIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def min(self): - ... - -class StringIterable(object): + def collect(self): ... + def max(self): ... + def min(self): ... +class StringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -161,11 +171,9 @@ class StringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class OptionArcStringIterable(object): + def collect(self): ... +class OptionArcStringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -193,11 +201,9 @@ class OptionArcStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class UsizeIterable(object): + def collect(self): ... +class UsizeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -225,23 +231,13 @@ class UsizeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def mean(self): - ... - - def min(self): - ... - - def sum(self): - ... - -class OptionI64Iterable(object): + def collect(self): ... + def max(self): ... + def mean(self): ... + def min(self): ... + def sum(self): ... +class OptionI64Iterable(object): def __eq__(self, value): """Return self==value.""" @@ -269,17 +265,11 @@ class OptionI64Iterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def min(self): - ... - -class NestedOptionArcStringIterable(object): + def collect(self): ... + def max(self): ... + def min(self): ... +class NestedOptionArcStringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -307,11 +297,9 @@ class NestedOptionArcStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class NestedStringIterable(object): + def collect(self): ... +class NestedStringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -339,11 +327,9 @@ class NestedStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class NestedOptionI64Iterable(object): + def collect(self): ... +class NestedOptionI64Iterable(object): def __eq__(self, value): """Return self==value.""" @@ -371,17 +357,11 @@ class NestedOptionI64Iterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def min(self): - ... - -class NestedI64VecIterable(object): + def collect(self): ... + def max(self): ... + def min(self): ... +class NestedI64VecIterable(object): def __eq__(self, value): """Return self==value.""" @@ -409,11 +389,9 @@ class NestedI64VecIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class NestedUsizeIterable(object): + def collect(self): ... +class NestedUsizeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -441,23 +419,13 @@ class NestedUsizeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def mean(self): - ... - - def min(self): - ... - - def sum(self): - ... - -class BoolIterable(object): + def collect(self): ... + def max(self): ... + def mean(self): ... + def min(self): ... + def sum(self): ... +class BoolIterable(object): def __eq__(self, value): """Return self==value.""" @@ -485,11 +453,9 @@ class BoolIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class ArcStringIterable(object): + def collect(self): ... +class ArcStringIterable(object): def __iter__(self): """Implement iter(self).""" @@ -499,11 +465,9 @@ class ArcStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class NestedVecUtcDateTimeIterable(object): + def collect(self): ... +class NestedVecUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -531,11 +495,9 @@ class NestedVecUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class OptionVecUtcDateTimeIterable(object): + def collect(self): ... +class OptionVecUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -563,11 +525,9 @@ class OptionVecUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class GIDGIDIterable(object): + def collect(self): ... +class GIDGIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -595,17 +555,11 @@ class GIDGIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def min(self): - ... - -class NestedGIDGIDIterable(object): + def collect(self): ... + def max(self): ... + def min(self): ... +class NestedGIDGIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -633,17 +587,11 @@ class NestedGIDGIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def min(self): - ... - -class NestedBoolIterable(object): + def collect(self): ... + def max(self): ... + def min(self): ... +class NestedBoolIterable(object): def __eq__(self, value): """Return self==value.""" @@ -671,11 +619,9 @@ class NestedBoolIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class U64Iterable(object): + def collect(self): ... +class U64Iterable(object): def __eq__(self, value): """Return self==value.""" @@ -703,23 +649,13 @@ class U64Iterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - - def max(self): - ... - - def mean(self): - ... - - def min(self): - ... - - def sum(self): - ... - -class OptionUtcDateTimeIterable(object): + def collect(self): ... + def max(self): ... + def mean(self): ... + def min(self): ... + def sum(self): ... +class OptionUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -747,11 +683,9 @@ class OptionUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class ArcStringVecIterable(object): + def collect(self): ... +class ArcStringVecIterable(object): def __eq__(self, value): """Return self==value.""" @@ -779,11 +713,9 @@ class ArcStringVecIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... - -class NestedArcStringVecIterable(object): + def collect(self): ... +class NestedArcStringVecIterable(object): def __eq__(self, value): """Return self==value.""" @@ -811,5 +743,4 @@ class NestedArcStringVecIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): - ... + def collect(self): ... diff --git a/python/python/raphtory/node_state/__init__.pyi b/python/python/raphtory/node_state/__init__.pyi index 469a550b2e..456f7240dd 100644 --- a/python/python/raphtory/node_state/__init__.pyi +++ b/python/python/raphtory/node_state/__init__.pyi @@ -23,9 +23,42 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['NodeGroups', 'DegreeView', 'NodeStateUsize', 'NodeStateU64', 'NodeStateOptionI64', 'IdView', 'NodeStateGID', 'EarliestTimeView', 'LatestTimeView', 'NameView', 'NodeStateString', 'EarliestDateTimeView', 'LatestDateTimeView', 'NodeStateOptionDateTime', 'HistoryView', 'EdgeHistoryCountView', 'NodeStateListI64', 'HistoryDateTimeView', 'NodeStateOptionListDateTime', 'NodeTypeView', 'NodeStateOptionStr', 'NodeStateListDateTime', 'NodeStateWeightedSP', 'NodeStateF64', 'NodeStateNodes', 'NodeStateReachability', 'NodeStateListF64', 'NodeStateMotifs', 'NodeStateHits', 'NodeStateSEIR', 'NodeLayout', 'NodeStateF64String'] -class NodeGroups(object): - +__all__ = [ + "NodeGroups", + "DegreeView", + "NodeStateUsize", + "NodeStateU64", + "NodeStateOptionI64", + "IdView", + "NodeStateGID", + "EarliestTimeView", + "LatestTimeView", + "NameView", + "NodeStateString", + "EarliestDateTimeView", + "LatestDateTimeView", + "NodeStateOptionDateTime", + "HistoryView", + "EdgeHistoryCountView", + "NodeStateListI64", + "HistoryDateTimeView", + "NodeStateOptionListDateTime", + "NodeTypeView", + "NodeStateOptionStr", + "NodeStateListDateTime", + "NodeStateWeightedSP", + "NodeStateF64", + "NodeStateNodes", + "NodeStateReachability", + "NodeStateListF64", + "NodeStateMotifs", + "NodeStateHits", + "NodeStateSEIR", + "NodeLayout", + "NodeStateF64String", +] + +class NodeGroups(object): def __bool__(self): """True if self else False""" @@ -68,7 +101,7 @@ class NodeGroups(object): Iterator[Tuple[Any, GraphView]]: Iterator over subgraphs with corresponding value """ -class DegreeView(object): +class DegreeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -230,7 +263,9 @@ class DegreeView(object): DegreeView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -385,7 +420,12 @@ class DegreeView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -574,8 +614,7 @@ class DegreeView(object): Optional[int]: """ -class NodeStateUsize(object): - +class NodeStateUsize(object): def __eq__(self, value): """Return self==value.""" @@ -768,8 +807,7 @@ class NodeStateUsize(object): Iterator[int]: Iterator over values """ -class NodeStateU64(object): - +class NodeStateU64(object): def __eq__(self, value): """Return self==value.""" @@ -954,8 +992,7 @@ class NodeStateU64(object): Iterator[int]: Iterator over values """ -class NodeStateOptionI64(object): - +class NodeStateOptionI64(object): def __eq__(self, value): """Return self==value.""" @@ -997,7 +1034,9 @@ class NodeStateOptionI64(object): NodeStateOptionI64: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[Optional[int]] = None) -> Optional[Optional[int]]: + def get( + self, node: NodeInput, default: Optional[Optional[int]] = None + ) -> Optional[Optional[int]]: """ Get value for node @@ -1131,7 +1170,7 @@ class NodeStateOptionI64(object): Iterator[Optional[int]]: Iterator over values """ -class IdView(object): +class IdView(object): """A lazy view over node values""" def __eq__(self, value): @@ -1317,8 +1356,7 @@ class IdView(object): Iterator[GID]: Iterator over values """ -class NodeStateGID(object): - +class NodeStateGID(object): def __eq__(self, value): """Return self==value.""" @@ -1486,7 +1524,7 @@ class NodeStateGID(object): Iterator[GID]: Iterator over values """ -class EarliestTimeView(object): +class EarliestTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -1648,7 +1686,9 @@ class EarliestTimeView(object): EarliestTimeView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -1667,7 +1707,9 @@ class EarliestTimeView(object): WindowSet: A `WindowSet` object. """ - def get(self, node: NodeInput, default: Optional[Optional[int]] = None) -> Optional[Optional[int]]: + def get( + self, node: NodeInput, default: Optional[Optional[int]] = None + ) -> Optional[Optional[int]]: """ Get value for node @@ -1795,7 +1837,12 @@ class EarliestTimeView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -1975,7 +2022,7 @@ class EarliestTimeView(object): Optional[int]: """ -class LatestTimeView(object): +class LatestTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2137,7 +2184,9 @@ class LatestTimeView(object): LatestTimeView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -2156,7 +2205,9 @@ class LatestTimeView(object): WindowSet: A `WindowSet` object. """ - def get(self, node: NodeInput, default: Optional[Optional[int]] = None) -> Optional[Optional[int]]: + def get( + self, node: NodeInput, default: Optional[Optional[int]] = None + ) -> Optional[Optional[int]]: """ Get value for node @@ -2284,7 +2335,12 @@ class LatestTimeView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -2464,7 +2520,7 @@ class LatestTimeView(object): Optional[int]: """ -class NameView(object): +class NameView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2658,8 +2714,7 @@ class NameView(object): Iterator[str]: Iterator over values """ -class NodeStateString(object): - +class NodeStateString(object): def __eq__(self, value): """Return self==value.""" @@ -2835,7 +2890,7 @@ class NodeStateString(object): Iterator[str]: Iterator over values """ -class EarliestDateTimeView(object): +class EarliestDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2997,7 +3052,9 @@ class EarliestDateTimeView(object): EarliestDateTimeView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3016,7 +3073,9 @@ class EarliestDateTimeView(object): WindowSet: A `WindowSet` object. """ - def get(self, node: NodeInput, default: Optional[Optional[datetime]] = None) -> Optional[Optional[datetime]]: + def get( + self, node: NodeInput, default: Optional[Optional[datetime]] = None + ) -> Optional[Optional[datetime]]: """ Get value for node @@ -3144,7 +3203,12 @@ class EarliestDateTimeView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3324,7 +3388,7 @@ class EarliestDateTimeView(object): Optional[int]: """ -class LatestDateTimeView(object): +class LatestDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -3486,7 +3550,9 @@ class LatestDateTimeView(object): LatestDateTimeView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3505,7 +3571,9 @@ class LatestDateTimeView(object): WindowSet: A `WindowSet` object. """ - def get(self, node: NodeInput, default: Optional[Optional[datetime]] = None) -> Optional[Optional[datetime]]: + def get( + self, node: NodeInput, default: Optional[Optional[datetime]] = None + ) -> Optional[Optional[datetime]]: """ Get value for node @@ -3633,7 +3701,12 @@ class LatestDateTimeView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3813,8 +3886,7 @@ class LatestDateTimeView(object): Optional[int]: """ -class NodeStateOptionDateTime(object): - +class NodeStateOptionDateTime(object): def __eq__(self, value): """Return self==value.""" @@ -3856,7 +3928,9 @@ class NodeStateOptionDateTime(object): NodeStateOptionDateTime: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[Optional[datetime]] = None) -> Optional[Optional[datetime]]: + def get( + self, node: NodeInput, default: Optional[Optional[datetime]] = None + ) -> Optional[Optional[datetime]]: """ Get value for node @@ -3990,7 +4064,7 @@ class NodeStateOptionDateTime(object): Iterator[Optional[datetime]]: Iterator over values """ -class HistoryView(object): +class HistoryView(object): """A lazy view over node values""" def __eq__(self, value): @@ -4152,7 +4226,9 @@ class HistoryView(object): HistoryView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4171,7 +4247,9 @@ class HistoryView(object): WindowSet: A `WindowSet` object. """ - def get(self, node: NodeInput, default: Optional[list[int]] = None) -> Optional[list[int]]: + def get( + self, node: NodeInput, default: Optional[list[int]] = None + ) -> Optional[list[int]]: """ Get value for node @@ -4291,7 +4369,12 @@ class HistoryView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -4471,7 +4554,7 @@ class HistoryView(object): Optional[int]: """ -class EdgeHistoryCountView(object): +class EdgeHistoryCountView(object): """A lazy view over node values""" def __eq__(self, value): @@ -4633,7 +4716,9 @@ class EdgeHistoryCountView(object): EdgeHistoryCountView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4780,7 +4865,12 @@ class EdgeHistoryCountView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -4969,8 +5059,7 @@ class EdgeHistoryCountView(object): Optional[int]: """ -class NodeStateListI64(object): - +class NodeStateListI64(object): def __eq__(self, value): """Return self==value.""" @@ -5012,7 +5101,9 @@ class NodeStateListI64(object): NodeStateListI64: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[list[int]] = None) -> Optional[list[int]]: + def get( + self, node: NodeInput, default: Optional[list[int]] = None + ) -> Optional[list[int]]: """ Get value for node @@ -5138,7 +5229,7 @@ class NodeStateListI64(object): Iterator[list[int]]: Iterator over values """ -class HistoryDateTimeView(object): +class HistoryDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -5300,7 +5391,9 @@ class HistoryDateTimeView(object): HistoryDateTimeView: The layered view """ - def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: + def expanding( + self, step: int | str, alignment_unit: str | None = None + ) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -5319,7 +5412,9 @@ class HistoryDateTimeView(object): WindowSet: A `WindowSet` object. """ - def get(self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None) -> Optional[Optional[list[datetime]]]: + def get( + self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None + ) -> Optional[Optional[list[datetime]]]: """ Get value for node @@ -5439,7 +5534,12 @@ class HistoryDateTimeView(object): Nodes: The nodes """ - def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: + def rolling( + self, + window: int | str, + step: int | str | None = None, + alignment_unit: str | None = None, + ) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5619,8 +5719,7 @@ class HistoryDateTimeView(object): Optional[int]: """ -class NodeStateOptionListDateTime(object): - +class NodeStateOptionListDateTime(object): def __eq__(self, value): """Return self==value.""" @@ -5662,7 +5761,9 @@ class NodeStateOptionListDateTime(object): NodeStateOptionListDateTime: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None) -> Optional[Optional[list[datetime]]]: + def get( + self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None + ) -> Optional[Optional[list[datetime]]]: """ Get value for node @@ -5788,7 +5889,7 @@ class NodeStateOptionListDateTime(object): Iterator[Optional[list[datetime]]]: Iterator over values """ -class NodeTypeView(object): +class NodeTypeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -5848,7 +5949,9 @@ class NodeTypeView(object): NodeStateOptionStr: the computed `NodeState` """ - def get(self, node: NodeInput, default: Optional[Optional[str]] = None) -> Optional[Optional[str]]: + def get( + self, node: NodeInput, default: Optional[Optional[str]] = None + ) -> Optional[Optional[str]]: """ Get value for node @@ -5982,8 +6085,7 @@ class NodeTypeView(object): Iterator[Optional[str]]: Iterator over values """ -class NodeStateOptionStr(object): - +class NodeStateOptionStr(object): def __eq__(self, value): """Return self==value.""" @@ -6025,7 +6127,9 @@ class NodeStateOptionStr(object): NodeStateOptionStr: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[Optional[str]] = None) -> Optional[Optional[str]]: + def get( + self, node: NodeInput, default: Optional[Optional[str]] = None + ) -> Optional[Optional[str]]: """ Get value for node @@ -6159,8 +6263,7 @@ class NodeStateOptionStr(object): Iterator[Optional[str]]: Iterator over values """ -class NodeStateListDateTime(object): - +class NodeStateListDateTime(object): def __eq__(self, value): """Return self==value.""" @@ -6202,7 +6305,9 @@ class NodeStateListDateTime(object): NodeStateListDateTime: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[list[datetime]] = None) -> Optional[list[datetime]]: + def get( + self, node: NodeInput, default: Optional[list[datetime]] = None + ) -> Optional[list[datetime]]: """ Get value for node @@ -6328,8 +6433,7 @@ class NodeStateListDateTime(object): Iterator[list[datetime]]: Iterator over values """ -class NodeStateWeightedSP(object): - +class NodeStateWeightedSP(object): def __eq__(self, value): """Return self==value.""" @@ -6360,7 +6464,9 @@ class NodeStateWeightedSP(object): def __repr__(self): """Return repr(self).""" - def get(self, node: NodeInput, default: Optional[Tuple[float, Nodes]] = None) -> Optional[Tuple[float, Nodes]]: + def get( + self, node: NodeInput, default: Optional[Tuple[float, Nodes]] = None + ) -> Optional[Tuple[float, Nodes]]: """ Get value for node @@ -6415,8 +6521,7 @@ class NodeStateWeightedSP(object): Iterator[Tuple[float, Nodes]]: Iterator over values """ -class NodeStateF64(object): - +class NodeStateF64(object): def __eq__(self, value): """Return self==value.""" @@ -6601,8 +6706,7 @@ class NodeStateF64(object): Iterator[float]: Iterator over values """ -class NodeStateNodes(object): - +class NodeStateNodes(object): def __eq__(self, value): """Return self==value.""" @@ -6688,8 +6792,7 @@ class NodeStateNodes(object): Iterator[Nodes]: Iterator over values """ -class NodeStateReachability(object): - +class NodeStateReachability(object): def __eq__(self, value): """Return self==value.""" @@ -6720,7 +6823,9 @@ class NodeStateReachability(object): def __repr__(self): """Return repr(self).""" - def get(self, node: NodeInput, default: Optional[list[Tuple[int, str]]] = None) -> Optional[list[Tuple[int, str]]]: + def get( + self, node: NodeInput, default: Optional[list[Tuple[int, str]]] = None + ) -> Optional[list[Tuple[int, str]]]: """ Get value for node @@ -6775,8 +6880,7 @@ class NodeStateReachability(object): Iterator[list[Tuple[int, str]]]: Iterator over values """ -class NodeStateListF64(object): - +class NodeStateListF64(object): def __eq__(self, value): """Return self==value.""" @@ -6807,7 +6911,9 @@ class NodeStateListF64(object): def __repr__(self): """Return repr(self).""" - def get(self, node: NodeInput, default: Optional[list[float]] = None) -> Optional[list[float]]: + def get( + self, node: NodeInput, default: Optional[list[float]] = None + ) -> Optional[list[float]]: """ Get value for node @@ -6862,8 +6968,7 @@ class NodeStateListF64(object): Iterator[list[float]]: Iterator over values """ -class NodeStateMotifs(object): - +class NodeStateMotifs(object): def __eq__(self, value): """Return self==value.""" @@ -6905,7 +7010,9 @@ class NodeStateMotifs(object): NodeStateMotifs: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[list[int]] = None) -> Optional[list[int]]: + def get( + self, node: NodeInput, default: Optional[list[int]] = None + ) -> Optional[list[int]]: """ Get value for node @@ -7031,8 +7138,7 @@ class NodeStateMotifs(object): Iterator[list[int]]: Iterator over values """ -class NodeStateHits(object): - +class NodeStateHits(object): def __eq__(self, value): """Return self==value.""" @@ -7074,7 +7180,9 @@ class NodeStateHits(object): NodeStateHits: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[Tuple[float, float]] = None) -> Optional[Tuple[float, float]]: + def get( + self, node: NodeInput, default: Optional[Tuple[float, float]] = None + ) -> Optional[Tuple[float, float]]: """ Get value for node @@ -7200,8 +7308,7 @@ class NodeStateHits(object): Iterator[Tuple[float, float]]: Iterator over values """ -class NodeStateSEIR(object): - +class NodeStateSEIR(object): def __eq__(self, value): """Return self==value.""" @@ -7243,7 +7350,9 @@ class NodeStateSEIR(object): NodeStateSEIR: The k smallest values as a node state """ - def get(self, node: NodeInput, default: Optional[Infected] = None) -> Optional[Infected]: + def get( + self, node: NodeInput, default: Optional[Infected] = None + ) -> Optional[Infected]: """ Get value for node @@ -7369,8 +7478,7 @@ class NodeStateSEIR(object): Iterator[Infected]: Iterator over values """ -class NodeLayout(object): - +class NodeLayout(object): def __eq__(self, value): """Return self==value.""" @@ -7401,7 +7509,9 @@ class NodeLayout(object): def __repr__(self): """Return repr(self).""" - def get(self, node: NodeInput, default: Optional[list[float]] = None) -> Optional[list[float]]: + def get( + self, node: NodeInput, default: Optional[list[float]] = None + ) -> Optional[list[float]]: """ Get value for node @@ -7456,8 +7566,7 @@ class NodeLayout(object): Iterator[list[float]]: Iterator over values """ -class NodeStateF64String(object): - +class NodeStateF64String(object): def __eq__(self, value): """Return self==value.""" @@ -7488,7 +7597,9 @@ class NodeStateF64String(object): def __repr__(self): """Return repr(self).""" - def get(self, node: NodeInput, default: Optional[Tuple[float, str]] = None) -> Optional[Tuple[float, str]]: + def get( + self, node: NodeInput, default: Optional[Tuple[float, str]] = None + ) -> Optional[Tuple[float, str]]: """ Get value for node diff --git a/python/python/raphtory/vectors/__init__.pyi b/python/python/raphtory/vectors/__init__.pyi index bd615cda2f..638085f37f 100644 --- a/python/python/raphtory/vectors/__init__.pyi +++ b/python/python/raphtory/vectors/__init__.pyi @@ -23,10 +23,17 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ['VectorisedGraph', 'Document', 'Embedding', 'VectorSelection'] -class VectorisedGraph(object): +__all__ = ["VectorisedGraph", "Document", "Embedding", "VectorSelection"] - def edges_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: +class VectorisedGraph(object): + """Vectorised GRaphs 123""" + + def edges_by_similarity( + self, + query: str | list, + limit: int, + window: Optional[Tuple[int | str, int | str]] = None, + ) -> VectorSelection: """ Search the top scoring edges according to `query` with no more than `limit` edges @@ -42,7 +49,12 @@ class VectorisedGraph(object): def empty_selection(self): """Return an empty selection of documents""" - def entities_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: + def entities_by_similarity( + self, + query: str | list, + limit: int, + window: Optional[Tuple[int | str, int | str]] = None, + ) -> VectorSelection: """ Search the top scoring entities according to `query` with no more than `limit` entities @@ -55,7 +67,12 @@ class VectorisedGraph(object): VectorSelection: The vector selection resulting from the search """ - def nodes_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: + def nodes_by_similarity( + self, + query: str | list, + limit: int, + window: Optional[Tuple[int | str, int | str]] = None, + ) -> VectorSelection: """ Search the top scoring nodes according to `query` with no more than `limit` nodes @@ -68,7 +85,7 @@ class VectorisedGraph(object): VectorSelection: The vector selection resulting from the search """ -class Document(object): +class Document(object): """ A Document @@ -109,13 +126,11 @@ class Document(object): Optional[Any]: """ -class Embedding(object): - +class Embedding(object): def __repr__(self): """Return repr(self).""" -class VectorSelection(object): - +class VectorSelection(object): def add_edges(self, edges: list) -> None: """ Add all the documents associated with the `edges` to the current selection @@ -161,7 +176,9 @@ class VectorSelection(object): list[Edge]: list of edges in the current selection """ - def expand(self, hops: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: + def expand( + self, hops: int, window: Optional[Tuple[int | str, int | str]] = None + ) -> None: """ Add all the documents `hops` hops away to the selection @@ -178,7 +195,12 @@ class VectorSelection(object): None: """ - def expand_edges_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: + def expand_edges_by_similarity( + self, + query: str | list, + limit: int, + window: Optional[Tuple[int | str, int | str]] = None, + ) -> None: """ Add the top `limit` adjacent edges with higher score for `query` to the selection @@ -193,7 +215,12 @@ class VectorSelection(object): None: """ - def expand_entities_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: + def expand_entities_by_similarity( + self, + query: str | list, + limit: int, + window: Optional[Tuple[int | str, int | str]] = None, + ) -> None: """ Add the top `limit` adjacent entities with higher score for `query` to the selection @@ -215,7 +242,12 @@ class VectorSelection(object): None: """ - def expand_nodes_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: + def expand_nodes_by_similarity( + self, + query: str | list, + limit: int, + window: Optional[Tuple[int | str, int | str]] = None, + ) -> None: """ Add the top `limit` adjacent nodes with higher score for `query` to the selection diff --git a/raphtory/src/python/packages/vectors.rs b/raphtory/src/python/packages/vectors.rs index db02094c8e..711e986f74 100644 --- a/raphtory/src/python/packages/vectors.rs +++ b/raphtory/src/python/packages/vectors.rs @@ -174,6 +174,7 @@ impl PyGraphView { } #[pyclass(name = "VectorisedGraph", module = "raphtory.vectors", frozen)] +/// VectorisedGraph object that contains the embedded documents that correspond to graph entities. pub struct PyVectorisedGraph(DynamicVectorisedGraph); impl From for PyVectorisedGraph { @@ -208,8 +209,8 @@ impl<'py> IntoPyObject<'py> for DynamicVectorSelection { } } -/// A vectorised graph, containing a set of documents positioned in the graph space and a selection -/// over those documents +/// A vectorised graph, containing a set of documents positioned in the graph space and allows you to get a selection +/// over those documents. #[pymethods] impl PyVectorisedGraph { /// Return an empty selection of documents @@ -217,15 +218,15 @@ impl PyVectorisedGraph { self.0.empty_selection() } - /// Search the top scoring entities according to `query` with no more than `limit` entities + /// Search the top scoring entities according to `query` with no more than `limit` entities. /// /// Args: - /// query (str | list): the text or the embedding to score against - /// limit (int): the maximum number of new entities to search - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// query (str | list): The text or the embedding to score against. + /// limit (int): The maximum number of new entities in the result. + /// window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. /// /// Returns: - /// VectorSelection: The vector selection resulting from the search + /// VectorSelection: The vector selection resulting from the search. #[pyo3(signature = (query, limit, window=None))] pub fn entities_by_similarity( &self, @@ -239,15 +240,15 @@ impl PyVectorisedGraph { .entities_by_similarity(&embedding, limit, translate_window(window))?) } - /// Search the top scoring nodes according to `query` with no more than `limit` nodes + /// Search the top scoring nodes according to `query` with no more than `limit` nodes. /// /// Args: - /// query (str | list): the text or the embedding to score against - /// limit (int): the maximum number of new nodes to search - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// query (str | list): The text or the embedding to score against. + /// limit (int): The maximum number of new nodes in the result. + /// window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. /// /// Returns: - /// VectorSelection: The vector selection resulting from the search + /// VectorSelection: The vector selection resulting from the search. #[pyo3(signature = (query, limit, window=None))] pub fn nodes_by_similarity( &self, @@ -264,12 +265,12 @@ impl PyVectorisedGraph { /// Search the top scoring edges according to `query` with no more than `limit` edges /// /// Args: - /// query (str | list): the text or the embedding to score against - /// limit (int): the maximum number of new edges to search - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// query (str | list): The text or the embedding to score against. + /// limit (int): the maximum number of new edges in the results. + /// window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. /// /// Returns: - /// VectorSelection: The vector selection resulting from the search + /// VectorSelection: The vector selection resulting from the search. #[pyo3(signature = (query, limit, window=None))] pub fn edges_by_similarity( &self, @@ -291,10 +292,10 @@ pub struct PyVectorSelection(DynamicVectorSelection); /// over those documents #[pymethods] impl PyVectorSelection { - /// Return the nodes present in the current selection + /// Returns the nodes present in the current selection. /// /// Returns: - /// list[Node]: list of nodes in the current selection + /// list[Node]: List of nodes in the current selection. fn nodes(&self) -> Vec { self.0 .nodes() @@ -303,10 +304,10 @@ impl PyVectorSelection { .collect_vec() } - /// Return the edges present in the current selection + /// Returns the edges present in the current selection. /// /// Returns: - /// list[Edge]: list of edges in the current selection + /// list[Edge]: List of edges in the current selection. fn edges(&self) -> Vec { self.0 .edges() @@ -315,28 +316,28 @@ impl PyVectorSelection { .collect_vec() } - /// Return the documents present in the current selection + /// Returns the documents present in the current selection. /// /// Returns: - /// list[Document]: list of documents in the current selection + /// list[Document]: List of documents in the current selection. fn get_documents(&self) -> PyResult>> { Ok(self.0.get_documents()?) } - /// Return the documents alongside their scores present in the current selection + /// Returns the documents alongside their scores present in the current selection. /// /// Returns: - /// list[Tuple[Document, float]]: list of documents and scores + /// list[Tuple[Document, float]]: List of documents and scores. fn get_documents_with_scores(&self) -> PyResult, f32)>> { Ok(self.0.get_documents_with_scores()?) } - /// Add all the documents associated with the `nodes` to the current selection + /// Add all the documents associated with the `nodes` to the current selection. /// /// Documents added by this call are assumed to have a score of 0. /// /// Args: - /// nodes (list): a list of the node ids or nodes to add + /// nodes (list): List of the node ids or nodes to add. /// /// Returns: /// None: @@ -344,12 +345,12 @@ impl PyVectorSelection { self_.0.add_nodes(nodes) } - /// Add all the documents associated with the `edges` to the current selection + /// Add all the documents associated with the `edges` to the current selection. /// /// Documents added by this call are assumed to have a score of 0. /// /// Args: - /// edges (list): a list of the edge ids or edges to add + /// edges (list): List of the edge ids or edges to add. /// /// Returns: /// None: @@ -357,13 +358,13 @@ impl PyVectorSelection { self_.0.add_edges(edges) } - /// Add all the documents in `selection` to the current selection + /// Add all the documents in `selection` to the current selection. /// /// Args: - /// selection (VectorSelection): a selection to be added + /// selection (VectorSelection): Selection to be added. /// /// Returns: - /// VectorSelection: The selection with the new documents + /// VectorSelection: The selection with the new documents. pub fn append(mut self_: PyRefMut<'_, Self>, selection: &Self) -> DynamicVectorSelection { self_.0.append(&selection.0).clone() } From 55f6af02e2277863efc4cc382753d1814fe4a57a Mon Sep 17 00:00:00 2001 From: James Baross Date: Tue, 21 Oct 2025 16:32:00 +0100 Subject: [PATCH 02/13] add vector user guide --- docs/user-guide/algorithms/6_vectorisation.md | 8 +++ mkdocs.yml | 1 + python/python/raphtory/filter/__init__.pyi | 16 +++-- python/python/raphtory/vectors/__init__.pyi | 60 +++++++++---------- raphtory/src/python/packages/vectors.rs | 2 +- .../src/python/types/wrappers/document.rs | 2 +- 6 files changed, 52 insertions(+), 37 deletions(-) create mode 100644 docs/user-guide/algorithms/6_vectorisation.md diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md new file mode 100644 index 0000000000..236af9939b --- /dev/null +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -0,0 +1,8 @@ +# Vectorisation + +The [vectors][raphtory.vectors] module allows you to transform a graph into a collection of documents and vectorise those documents using an embedding function. + +Using this you can perform [semantic search](https://en.wikipedia.org/wiki/Semantic_search) over your graph data and build powerful AI systems with graph based RAG. + +## Documents + diff --git a/mkdocs.yml b/mkdocs.yml index 184dc72f51..e35180f582 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -120,6 +120,7 @@ nav: - user-guide/algorithms/3_node-algorithms.md - user-guide/algorithms/4_view-algorithms.md - user-guide/algorithms/5_community_detection.md + - user-guide/algorithms/6_vectorisation.md - Exporting: - user-guide/export/0_dummy_index.md - user-guide/export/1_intro.md diff --git a/python/python/raphtory/filter/__init__.pyi b/python/python/raphtory/filter/__init__.pyi index 4e3cd7cdba..5f33a18fcb 100644 --- a/python/python/raphtory/filter/__init__.pyi +++ b/python/python/raphtory/filter/__init__.pyi @@ -82,7 +82,9 @@ class PropertyFilterOps(object): filter.FilterExpr: """ - def fuzzy_search(self, prop_value: str, levenshtein_distance: int, prefix_match: bool) -> filter.FilterExpr: + def fuzzy_search( + self, prop_value: str, levenshtein_distance: int, prefix_match: bool + ) -> filter.FilterExpr: """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -92,7 +94,7 @@ class PropertyFilterOps(object): prop_value (str): Property to match against. levenshtein_distance (int): Maximum levenshtein distance between the specified prop_value and the result. prefix_match (bool): Enable prefix matching. - + Returns: filter.FilterExpr: """ @@ -182,7 +184,9 @@ class NodeFilterBuilder(object): filter.FilterExpr: """ - def fuzzy_search(self, value, levenshtein_distance: int, prefix_match: bool) -> filter.FilterExpr: + def fuzzy_search( + self, value, levenshtein_distance: int, prefix_match: bool + ) -> filter.FilterExpr: """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -280,7 +284,9 @@ class EdgeFilterOp(object): filter.FilterExpr: """ - def fuzzy_search(self, value, levenshtein_distance: int, prefix_match: bool) -> filter.FilterExpr: + def fuzzy_search( + self, value, levenshtein_distance: int, prefix_match: bool + ) -> filter.FilterExpr: """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -290,7 +296,7 @@ class EdgeFilterOp(object): prop_value (str): Property to match against. levenshtein_distance (int): Maximum levenshtein distance between the specified prop_value and the result. prefix_match (bool): Enable prefix matching. - + Returns: filter.FilterExpr: """ diff --git a/python/python/raphtory/vectors/__init__.pyi b/python/python/raphtory/vectors/__init__.pyi index 638085f37f..28644fe9bc 100644 --- a/python/python/raphtory/vectors/__init__.pyi +++ b/python/python/raphtory/vectors/__init__.pyi @@ -26,7 +26,7 @@ from raphtory.iterables import * __all__ = ["VectorisedGraph", "Document", "Embedding", "VectorSelection"] class VectorisedGraph(object): - """Vectorised GRaphs 123""" + """VectorisedGraph object that contains the embedded documents that correspond to graph entities.""" def edges_by_similarity( self, @@ -38,12 +38,12 @@ class VectorisedGraph(object): Search the top scoring edges according to `query` with no more than `limit` edges Args: - query (str | list): the text or the embedding to score against - limit (int): the maximum number of new edges to search - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + query (str | list): The text or the embedding to score against. + limit (int): the maximum number of new edges in the results. + window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. Returns: - VectorSelection: The vector selection resulting from the search + VectorSelection: The vector selection resulting from the search. """ def empty_selection(self): @@ -56,15 +56,15 @@ class VectorisedGraph(object): window: Optional[Tuple[int | str, int | str]] = None, ) -> VectorSelection: """ - Search the top scoring entities according to `query` with no more than `limit` entities + Search the top scoring entities according to `query` with no more than `limit` entities. Args: - query (str | list): the text or the embedding to score against - limit (int): the maximum number of new entities to search - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + query (str | list): The text or the embedding to score against. + limit (int): The maximum number of new entities in the result. + window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. Returns: - VectorSelection: The vector selection resulting from the search + VectorSelection: The vector selection resulting from the search. """ def nodes_by_similarity( @@ -74,15 +74,15 @@ class VectorisedGraph(object): window: Optional[Tuple[int | str, int | str]] = None, ) -> VectorSelection: """ - Search the top scoring nodes according to `query` with no more than `limit` nodes + Search the top scoring nodes according to `query` with no more than `limit` nodes. Args: - query (str | list): the text or the embedding to score against - limit (int): the maximum number of new nodes to search - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + query (str | list): The text or the embedding to score against. + limit (int): The maximum number of new nodes in the result. + window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. Returns: - VectorSelection: The vector selection resulting from the search + VectorSelection: The vector selection resulting from the search. """ class Document(object): @@ -133,12 +133,12 @@ class Embedding(object): class VectorSelection(object): def add_edges(self, edges: list) -> None: """ - Add all the documents associated with the `edges` to the current selection + Add all the documents associated with the `edges` to the current selection. Documents added by this call are assumed to have a score of 0. Args: - edges (list): a list of the edge ids or edges to add + edges (list): List of the edge ids or edges to add. Returns: None: @@ -146,12 +146,12 @@ class VectorSelection(object): def add_nodes(self, nodes: list) -> None: """ - Add all the documents associated with the `nodes` to the current selection + Add all the documents associated with the `nodes` to the current selection. Documents added by this call are assumed to have a score of 0. Args: - nodes (list): a list of the node ids or nodes to add + nodes (list): List of the node ids or nodes to add. Returns: None: @@ -159,21 +159,21 @@ class VectorSelection(object): def append(self, selection: VectorSelection) -> VectorSelection: """ - Add all the documents in `selection` to the current selection + Add all the documents in `selection` to the current selection. Args: - selection (VectorSelection): a selection to be added + selection (VectorSelection): Selection to be added. Returns: - VectorSelection: The selection with the new documents + VectorSelection: The selection with the new documents. """ def edges(self) -> list[Edge]: """ - Return the edges present in the current selection + Returns the edges present in the current selection. Returns: - list[Edge]: list of edges in the current selection + list[Edge]: List of edges in the current selection. """ def expand( @@ -264,24 +264,24 @@ class VectorSelection(object): def get_documents(self) -> list[Document]: """ - Return the documents present in the current selection + Returns the documents present in the current selection. Returns: - list[Document]: list of documents in the current selection + list[Document]: List of documents in the current selection. """ def get_documents_with_scores(self) -> list[Tuple[Document, float]]: """ - Return the documents alongside their scores present in the current selection + Returns the documents alongside their scores present in the current selection. Returns: - list[Tuple[Document, float]]: list of documents and scores + list[Tuple[Document, float]]: List of documents and scores. """ def nodes(self) -> list[Node]: """ - Return the nodes present in the current selection + Returns the nodes present in the current selection. Returns: - list[Node]: list of nodes in the current selection + list[Node]: List of nodes in the current selection. """ diff --git a/raphtory/src/python/packages/vectors.rs b/raphtory/src/python/packages/vectors.rs index 711e986f74..dd77282cb0 100644 --- a/raphtory/src/python/packages/vectors.rs +++ b/raphtory/src/python/packages/vectors.rs @@ -174,7 +174,7 @@ impl PyGraphView { } #[pyclass(name = "VectorisedGraph", module = "raphtory.vectors", frozen)] -/// VectorisedGraph object that contains the embedded documents that correspond to graph entities. +/// VectorisedGraph object that contains embedded documents that correspond to graph entities. pub struct PyVectorisedGraph(DynamicVectorisedGraph); impl From for PyVectorisedGraph { diff --git a/raphtory/src/python/types/wrappers/document.rs b/raphtory/src/python/types/wrappers/document.rs index a345f2f10a..da02da1121 100644 --- a/raphtory/src/python/types/wrappers/document.rs +++ b/raphtory/src/python/types/wrappers/document.rs @@ -24,7 +24,7 @@ impl From for Document { #[pymethods] impl PyDocument { - /// the document content + /// The document content /// /// Returns: /// str: From 2fa50fa2e12b96afd3e1e3f43d4c0bd5fa57d6e1 Mon Sep 17 00:00:00 2001 From: James Baross Date: Thu, 23 Oct 2025 17:21:16 +0100 Subject: [PATCH 03/13] add vector user guide --- docs/user-guide/algorithms/6_vectorisation.md | 21 +++++++++++++++++-- python/python/raphtory/vectors/__init__.pyi | 4 ++-- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 236af9939b..2749e2b4a1 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -1,8 +1,25 @@ # Vectorisation -The [vectors][raphtory.vectors] module allows you to transform a graph into a collection of documents and vectorise those documents using an embedding function. +The [vectors][raphtory.vectors] module allows you to transform a graph into a collection of documents and vectorise those documents using an embedding function. Since the AI space moves quickly, Raphtory allows you to plug in your preferred embedding model either locally or from an API. Using this you can perform [semantic search](https://en.wikipedia.org/wiki/Semantic_search) over your graph data and build powerful AI systems with graph based RAG. -## Documents +## Vectorise a graph + +To vectorise a graph you must create an embeddings function that takes a list of strings and returns a matching list of embeddings. This function can use any model or library you prefer, in this example we use the openai library and direct it to a local API compatible ollama service. + +```python +def get_embeddings(documents, model="text-embedding-3-small"): + client = OpenAI(base_url='http://localhost:11434/v1/' api_key="ollama") + return [client.embeddings.create(input=text, model=model).data[0].embedding for text in documents] + +v = g.vectorise(get_embeddings, nodes=node_document, edges=edge_document, verbose=True) +``` + +When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically creates documents for each node and edge entity in your graph, optionally you can create documents explicitly as properties and pass the property names to `vectorise()`. This is useful when you already have a deep understanding of your graphs semantics. Additionally, you can cache the embedded graph to disk to avoid having to recompute the vectors when nothing has changed. + +## Retrive documents + + +## Example diff --git a/python/python/raphtory/vectors/__init__.pyi b/python/python/raphtory/vectors/__init__.pyi index 28644fe9bc..7bb6dd65de 100644 --- a/python/python/raphtory/vectors/__init__.pyi +++ b/python/python/raphtory/vectors/__init__.pyi @@ -26,7 +26,7 @@ from raphtory.iterables import * __all__ = ["VectorisedGraph", "Document", "Embedding", "VectorSelection"] class VectorisedGraph(object): - """VectorisedGraph object that contains the embedded documents that correspond to graph entities.""" + """VectorisedGraph object that contains embedded documents that correspond to graph entities.""" def edges_by_similarity( self, @@ -102,7 +102,7 @@ class Document(object): @property def content(self) -> str: """ - the document content + The document content Returns: str: From aaf2087b1c88b8cb6ae0e97b6b2bb0f802507e9e Mon Sep 17 00:00:00 2001 From: James Baross Date: Thu, 23 Oct 2025 17:44:51 +0100 Subject: [PATCH 04/13] add vector user guide --- docs/user-guide/algorithms/6_vectorisation.md | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 2749e2b4a1..ec0fab6db2 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -18,7 +18,20 @@ v = g.vectorise(get_embeddings, nodes=node_document, edges=edge_document, verbos When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically creates documents for each node and edge entity in your graph, optionally you can create documents explicitly as properties and pass the property names to `vectorise()`. This is useful when you already have a deep understanding of your graphs semantics. Additionally, you can cache the embedded graph to disk to avoid having to recompute the vectors when nothing has changed. -## Retrive documents +## Retrieve documents + +You can retrieve relevant information from the [VectorisedGraph][raphtory.vectors.VectorisedGraph] by making selections. + +A [VectorSelection][raphtory.vectors.VectorSelection] is a general object for holding embedded documents, you can create an empty selection or perform a similarity query against a `VectorisedGraph` to populate a new selection. + +You can add to a selection by... + +Once you have a selection containing the information you want you can: + +- Get the associated graph entities using [nodes()][raphtory.vectors.VectorSelection.nodes] or [edges()][raphtory.vectors.VectorSelection.edges]. +- Get the associated documents using [get_documents()][raphtory.vectors.VectorSelection.get_documents] or [get_documents_with_scores()][raphtory.vectors.VectorSelection.get_documents_with_scores]. + +Each [Document][raphtory.vectors.Document] corresponds to unique entity in the graph, the contents of the asossiated document and it's vector representation. ## Example From dad7e478bd3c68ef9cef11638f8c7eda7d91a9be Mon Sep 17 00:00:00 2001 From: James Baross Date: Fri, 24 Oct 2025 12:01:02 +0100 Subject: [PATCH 05/13] add vector user guide --- docs/user-guide/algorithms/6_vectorisation.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index ec0fab6db2..2047d945cb 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -8,13 +8,15 @@ Using this you can perform [semantic search](https://en.wikipedia.org/wiki/Seman To vectorise a graph you must create an embeddings function that takes a list of strings and returns a matching list of embeddings. This function can use any model or library you prefer, in this example we use the openai library and direct it to a local API compatible ollama service. -```python -def get_embeddings(documents, model="text-embedding-3-small"): +/// tab | :fontawesome-brands-python: Python +```{.python notest} +def get_embeddings(documents, model="embeddinggemma"): client = OpenAI(base_url='http://localhost:11434/v1/' api_key="ollama") return [client.embeddings.create(input=text, model=model).data[0].embedding for text in documents] v = g.vectorise(get_embeddings, nodes=node_document, edges=edge_document, verbose=True) ``` +/// When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically creates documents for each node and edge entity in your graph, optionally you can create documents explicitly as properties and pass the property names to `vectorise()`. This is useful when you already have a deep understanding of your graphs semantics. Additionally, you can cache the embedded graph to disk to avoid having to recompute the vectors when nothing has changed. @@ -24,15 +26,14 @@ You can retrieve relevant information from the [VectorisedGraph][raphtory.vector A [VectorSelection][raphtory.vectors.VectorSelection] is a general object for holding embedded documents, you can create an empty selection or perform a similarity query against a `VectorisedGraph` to populate a new selection. -You can add to a selection by... +You can add to a selection by combining existing selections or by adding new documents associated with specific nodes and edges by their IDs. Additionally, you can [expand][raphtory.vectors.VectorSelection.expand_entities_by_similarity] a selection by making similarity queries relative to the entities in the current selection, this uses the power of the graph relationships to constrain your query. Once you have a selection containing the information you want you can: - Get the associated graph entities using [nodes()][raphtory.vectors.VectorSelection.nodes] or [edges()][raphtory.vectors.VectorSelection.edges]. - Get the associated documents using [get_documents()][raphtory.vectors.VectorSelection.get_documents] or [get_documents_with_scores()][raphtory.vectors.VectorSelection.get_documents_with_scores]. -Each [Document][raphtory.vectors.Document] corresponds to unique entity in the graph, the contents of the asossiated document and it's vector representation. - +Each [Document][raphtory.vectors.Document] corresponds to unique entity in the graph, the contents of the associated document and it's vector representation. You can pull any of these out to retrieve information about an entity for a RAG system, compose a subgraph to analyse using Raphtory's algorithms, or feed into some more complex pipeline. ## Example From 5f584f22e52f12de1c8b29aec525a0e11c6d7825 Mon Sep 17 00:00:00 2001 From: James Baross Date: Fri, 24 Oct 2025 17:08:43 +0100 Subject: [PATCH 06/13] improved docstrings --- python/python/raphtory/__init__.pyi | 12 +-- python/python/raphtory/vectors/__init__.pyi | 29 +++--- raphtory/src/python/packages/vectors.rs | 89 ++++++++++--------- .../src/python/types/wrappers/document.rs | 18 ++-- raphtory/src/vectors/vector_selection.rs | 19 ++-- 5 files changed, 87 insertions(+), 80 deletions(-) diff --git a/python/python/raphtory/__init__.pyi b/python/python/raphtory/__init__.pyi index fd5fd418f9..c38983b992 100644 --- a/python/python/raphtory/__init__.pyi +++ b/python/python/raphtory/__init__.pyi @@ -743,14 +743,14 @@ class GraphView(object): verbose: bool = False, ) -> VectorisedGraph: """ - Create a VectorisedGraph from the current graph + Create a VectorisedGraph from the current graph. Args: - embedding (Callable[[list], list]): the embedding function to translate documents to embeddings - nodes (bool | str): if nodes have to be embedded or not or the custom template to use if a str is provided. Defaults to True. - edges (bool | str): if edges have to be embedded or not or the custom template to use if a str is provided. Defaults to True. - cache (str, optional): the path to use to store the cache for embeddings. - verbose (bool): whether or not to print logs reporting the progress. Defaults to False. + embedding (Callable[[list], list]): The embedding function to translate documents to embeddings. + nodes (bool | str): Enable for nodes to be embedded, disable for nodes to not be embedded or specify a custom document property to use if a string is provided. Defaults to True. + edges (bool | str): Enable for edges to be embedded, disable for edges to not be embedded or specify a custom document property to use if a string is provided. Defaults to True. + cache (str, optional): Path used to store the cache of embeddings. + verbose (bool): Enable to print logs reporting progress. Defaults to False. Returns: VectorisedGraph: A VectorisedGraph with all the documents/embeddings computed and with an initial empty selection diff --git a/python/python/raphtory/vectors/__init__.pyi b/python/python/raphtory/vectors/__init__.pyi index 7bb6dd65de..e235a8dded 100644 --- a/python/python/raphtory/vectors/__init__.pyi +++ b/python/python/raphtory/vectors/__init__.pyi @@ -39,7 +39,7 @@ class VectorisedGraph(object): Args: query (str | list): The text or the embedding to score against. - limit (int): the maximum number of new edges in the results. + limit (int): The maximum number of new edges in the results. window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. Returns: @@ -87,10 +87,10 @@ class VectorisedGraph(object): class Document(object): """ - A Document + A document corresponding to a graph entity. Used to generate embeddings. Args: - content (str): the document content + content (str): The document content. life (int | Tuple[int, int], optional): the optional lifespan for the document (single value corresponds to an event, a tuple corresponds to a window). @@ -102,7 +102,7 @@ class Document(object): @property def content(self) -> str: """ - The document content + The document content. Returns: str: @@ -111,16 +111,16 @@ class Document(object): @property def embedding(self) -> Optional[Embedding]: """ - the embedding + The embedding. Returns: - Optional[Embedding]: the embedding for the document if it was computed + Optional[Embedding]: The embedding for the document if it was computed. """ @property def entity(self) -> Optional[Any]: """ - the entity corresponding to the document + The entity corresponding to the document. Returns: Optional[Any]: @@ -133,7 +133,7 @@ class Embedding(object): class VectorSelection(object): def add_edges(self, edges: list) -> None: """ - Add all the documents associated with the `edges` to the current selection. + Add all the documents associated with the specified `edges` to the current selection. Documents added by this call are assumed to have a score of 0. @@ -146,7 +146,7 @@ class VectorSelection(object): def add_nodes(self, nodes: list) -> None: """ - Add all the documents associated with the `nodes` to the current selection. + Add all the documents associated with the specified `nodes` to the current selection. Documents added by this call are assumed to have a score of 0. @@ -225,10 +225,11 @@ class VectorSelection(object): Add the top `limit` adjacent entities with higher score for `query` to the selection The expansion algorithm is a loop with two steps on each iteration: - 1. All the entities 1 hop away of some of the entities included on the selection (and - not already selected) are marked as candidates. - 2. Those candidates are added to the selection in descending order according to the - similarity score obtained against the `query`. + + 1. All the entities 1 hop away of some of the entities included on the selection (and + not already selected) are marked as candidates. + 2. Those candidates are added to the selection in descending order according to the + similarity score obtained against the `query`. This loops goes on until the number of new entities reaches a total of `limit` entities or until no more documents are available @@ -251,7 +252,7 @@ class VectorSelection(object): """ Add the top `limit` adjacent nodes with higher score for `query` to the selection - This function has the same behavior as expand_entities_by_similarity but it only considers nodes. + This function has the same behaviour as expand_entities_by_similarity but it only considers nodes. Args: query (str | list): the text or the embedding to score against diff --git a/raphtory/src/python/packages/vectors.rs b/raphtory/src/python/packages/vectors.rs index dd77282cb0..0545a08860 100644 --- a/raphtory/src/python/packages/vectors.rs +++ b/raphtory/src/python/packages/vectors.rs @@ -136,17 +136,17 @@ impl TemplateConfig { #[pymethods] impl PyGraphView { - /// Create a VectorisedGraph from the current graph + /// Create a VectorisedGraph from the current graph. /// /// Args: - /// embedding (Callable[[list], list]): the embedding function to translate documents to embeddings - /// nodes (bool | str): if nodes have to be embedded or not or the custom template to use if a str is provided. Defaults to True. - /// edges (bool | str): if edges have to be embedded or not or the custom template to use if a str is provided. Defaults to True. - /// cache (str, optional): the path to use to store the cache for embeddings. - /// verbose (bool): whether or not to print logs reporting the progress. Defaults to False. + /// embedding (Callable[[list], list]): Specify the embedding function used to vectorise documents into embeddings. + /// nodes (bool | str): Enable for nodes to be embedded, disable for nodes to not be embedded or specify a custom document property to use if a string is provided. Defaults to True. + /// edges (bool | str): Enable for edges to be embedded, disable for edges to not be embedded or specify a custom document property to use if a string is provided. Defaults to True. + /// cache (str, optional): Path used to store the cache of embeddings. + /// verbose (bool): Enable to print logs reporting progress. Defaults to False. /// /// Returns: - /// VectorisedGraph: A VectorisedGraph with all the documents/embeddings computed and with an initial empty selection + /// VectorisedGraph: A VectorisedGraph with all the documents and their embeddings, with an initial empty selection. #[pyo3(signature = (embedding, nodes = TemplateConfig::Bool(true), edges = TemplateConfig::Bool(true), cache = None, verbose = false))] fn vectorise( &self, @@ -209,21 +209,21 @@ impl<'py> IntoPyObject<'py> for DynamicVectorSelection { } } -/// A vectorised graph, containing a set of documents positioned in the graph space and allows you to get a selection -/// over those documents. +/// A VectorisedGraph, containing a set of documents positioned in an embedding space. This object allows you to get a selection +/// of those documents using a query and similarity scores. #[pymethods] impl PyVectorisedGraph { - /// Return an empty selection of documents + /// Return an empty selection of entities. fn empty_selection(&self) -> DynamicVectorSelection { self.0.empty_selection() } - /// Search the top scoring entities according to `query` with no more than `limit` entities. + /// Search the top similarity scoring entities according to matching a specified `query` with no more than `limit` entities in the result. /// /// Args: /// query (str | list): The text or the embedding to score against. /// limit (int): The maximum number of new entities in the result. - /// window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// VectorSelection: The vector selection resulting from the search. @@ -240,12 +240,12 @@ impl PyVectorisedGraph { .entities_by_similarity(&embedding, limit, translate_window(window))?) } - /// Search the top scoring nodes according to `query` with no more than `limit` nodes. + /// Search the top similarity scoring nodes according to matching a specified `query` with no more than `limit` nodes in the result. /// /// Args: /// query (str | list): The text or the embedding to score against. /// limit (int): The maximum number of new nodes in the result. - /// window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// VectorSelection: The vector selection resulting from the search. @@ -262,12 +262,12 @@ impl PyVectorisedGraph { .nodes_by_similarity(&embedding, limit, translate_window(window))?) } - /// Search the top scoring edges according to `query` with no more than `limit` edges + /// Search the top similarity scoring edges according to matching a specified `query` with no more than `limit` edges in the result. /// /// Args: /// query (str | list): The text or the embedding to score against. - /// limit (int): the maximum number of new edges in the results. - /// window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. + /// limit (int): The maximum number of new edges in the results. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// VectorSelection: The vector selection resulting from the search. @@ -288,7 +288,7 @@ impl PyVectorisedGraph { #[pyclass(name = "VectorSelection", module = "raphtory.vectors")] pub struct PyVectorSelection(DynamicVectorSelection); -/// A vectorised graph, containing a set of documents positioned in the graph space and a selection +/// A VectorisedGraph, containing a set of documents positioned in the graph space and a selection /// over those documents #[pymethods] impl PyVectorSelection { @@ -324,7 +324,7 @@ impl PyVectorSelection { Ok(self.0.get_documents()?) } - /// Returns the documents alongside their scores present in the current selection. + /// Returns the documents present in the current selection alongside their scores. /// /// Returns: /// list[Tuple[Document, float]]: List of documents and scores. @@ -332,7 +332,7 @@ impl PyVectorSelection { Ok(self.0.get_documents_with_scores()?) } - /// Add all the documents associated with the `nodes` to the current selection. + /// Add all the documents associated with the specified `nodes` to the current selection. /// /// Documents added by this call are assumed to have a score of 0. /// @@ -345,7 +345,7 @@ impl PyVectorSelection { self_.0.add_nodes(nodes) } - /// Add all the documents associated with the `edges` to the current selection. + /// Add all the documents associated with the specified `edges` to the current selection. /// /// Documents added by this call are assumed to have a score of 0. /// @@ -358,27 +358,27 @@ impl PyVectorSelection { self_.0.add_edges(edges) } - /// Add all the documents in `selection` to the current selection. + /// Add all the documents in a specified `selection` to the current selection. /// /// Args: /// selection (VectorSelection): Selection to be added. /// /// Returns: - /// VectorSelection: The selection with the new documents. + /// VectorSelection: The combined selection. pub fn append(mut self_: PyRefMut<'_, Self>, selection: &Self) -> DynamicVectorSelection { self_.0.append(&selection.0).clone() } - /// Add all the documents `hops` hops away to the selection + /// Add all the documents a specified number of `hops` away from the selection. /// - /// Two documents A and B are considered to be 1 hop away of each other if they are on the same - /// entity or if they are on the same node/edge pair. Provided that, two nodes A and C are n - /// hops away of each other if there is a document B such that A is n - 1 hops away of B and B + /// Two documents A and B are considered to be 1 hop away from each other if they are on the same + /// entity or if they are on the same node/edge pair. Provided that two nodes A and C are n + /// hops away of each other if there is a document B such that A is n - 1 hops away of B and B /// is 1 hop away of C. /// /// Args: - /// hops (int): the number of hops to carry out the expansion - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// hops (int): The number of hops to carry out the expansion. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// None: @@ -390,18 +390,19 @@ impl PyVectorSelection { /// Add the top `limit` adjacent entities with higher score for `query` to the selection /// /// The expansion algorithm is a loop with two steps on each iteration: - /// 1. All the entities 1 hop away of some of the entities included on the selection (and - /// not already selected) are marked as candidates. - /// 2. Those candidates are added to the selection in descending order according to the - /// similarity score obtained against the `query`. + /// + /// 1. All the entities 1 hop away of some of the entities included on the selection (and + /// not already selected) are marked as candidates. + /// 2. Those candidates are added to the selection in descending order according to the + /// similarity score obtained against the `query`. /// /// This loops goes on until the number of new entities reaches a total of `limit` /// entities or until no more documents are available /// /// Args: - /// query (str | list): the text or the embedding to score against - /// limit (int): the number of documents to add - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// query (str | list): The text or the embedding to score against. + /// limit (int): The number of documents to add. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// None: @@ -421,12 +422,12 @@ impl PyVectorSelection { /// Add the top `limit` adjacent nodes with higher score for `query` to the selection /// - /// This function has the same behavior as expand_entities_by_similarity but it only considers nodes. + /// This function has the same behaviour as expand_entities_by_similarity but it only considers nodes. /// /// Args: - /// query (str | list): the text or the embedding to score against - /// limit (int): the maximum number of new nodes to add - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// query (str | list): The text or the embedding to score against. + /// limit (int): The maximum number of new nodes to add. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// None: @@ -446,12 +447,12 @@ impl PyVectorSelection { /// Add the top `limit` adjacent edges with higher score for `query` to the selection /// - /// This function has the same behavior as expand_entities_by_similarity but it only considers edges. + /// This function has the same behaviour as expand_entities_by_similarity but it only considers edges. /// /// Args: - /// query (str | list): the text or the embedding to score against - /// limit (int): the maximum number of new edges to add - /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// query (str | list): The text or the embedding to score against. + /// limit (int): The maximum number of new edges to add. + /// window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. /// /// Returns: /// None: diff --git a/raphtory/src/python/types/wrappers/document.rs b/raphtory/src/python/types/wrappers/document.rs index da02da1121..17000e8366 100644 --- a/raphtory/src/python/types/wrappers/document.rs +++ b/raphtory/src/python/types/wrappers/document.rs @@ -5,13 +5,13 @@ use crate::{ }; use pyo3::{prelude::*, IntoPyObjectExt}; -/// A Document +/// A document corresponding to a graph entity. Used to generate embeddings. /// /// Args: -/// content (str): the document content -/// life (int | Tuple[int, int], optional): the optional lifespan for the document (single value +/// content (str): The document content. +/// life (int | Tuple[int, int], optional): The optional lifespan of the document. A single value /// corresponds to an event, a tuple corresponds to a -/// window). +/// window. #[pyclass(name = "Document", module = "raphtory.vectors", frozen)] #[derive(Clone)] pub struct PyDocument(pub(crate) Document); @@ -24,16 +24,16 @@ impl From for Document { #[pymethods] impl PyDocument { - /// The document content + /// The document content. /// /// Returns: - /// str: + /// str: Content of the document. #[getter] fn content(&self) -> &str { &self.0.content } - /// the entity corresponding to the document + /// The graph entity corresponding to the document. /// /// Returns: /// Optional[Any]: @@ -45,10 +45,10 @@ impl PyDocument { } } - /// the embedding + /// The embedding of the document. /// /// Returns: - /// Optional[Embedding]: the embedding for the document if it was computed + /// Optional[Embedding]: The embedding of the document if it was computed. #[getter] fn embedding(&self) -> PyEmbedding { PyEmbedding(self.0.embedding.clone()) diff --git a/raphtory/src/vectors/vector_selection.rs b/raphtory/src/vectors/vector_selection.rs index dc2dbe8a94..461f57329c 100644 --- a/raphtory/src/vectors/vector_selection.rs +++ b/raphtory/src/vectors/vector_selection.rs @@ -195,17 +195,22 @@ impl VectorSelection { /// Add the top `limit` adjacent entities with higher score for `query` to the selection /// /// The expansion algorithm is a loop with two steps on each iteration: - /// 1. All the entities 1 hop away of some of the entities included on the selection (and - /// not already selected) are marked as candidates. - /// 2. Those candidates are added to the selection in descending order according to the - /// similarity score obtained against the `query`. + /// + /// 1. All the entities 1 hop away of some of the entities included on the selection (and + /// not already selected) are marked as candidates. + /// 2. Those candidates are added to the selection in descending order according to the + /// similarity score obtained against the `query`. /// /// This loops goes on until the number of new entities reaches a total of `limit` /// entities or until no more documents are available /// - /// # Arguments - /// * query - the embedding to score against - /// * window - the window where documents need to belong to in order to be considered + /// Args: + /// query (str | list): the text or the embedding to score against + /// limit (int): the number of documents to add + /// window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + /// + /// Returns: + /// None: pub fn expand_entities_by_similarity( &mut self, query: &Embedding, From 6be4588240a957f4e19fb23d6cec12381fa46a74 Mon Sep 17 00:00:00 2001 From: James Baross Date: Mon, 27 Oct 2025 16:42:06 +0000 Subject: [PATCH 07/13] clarify document format --- docs/user-guide/algorithms/6_vectorisation.md | 50 ++++++++++++++++++- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 2047d945cb..5d982442b3 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -11,14 +11,60 @@ To vectorise a graph you must create an embeddings function that takes a list of /// tab | :fontawesome-brands-python: Python ```{.python notest} def get_embeddings(documents, model="embeddinggemma"): - client = OpenAI(base_url='http://localhost:11434/v1/' api_key="ollama") + client = OpenAI(base_url='http://localhost:11434/v1/', api_key="ollama") return [client.embeddings.create(input=text, model=model).data[0].embedding for text in documents] v = g.vectorise(get_embeddings, nodes=node_document, edges=edge_document, verbose=True) ``` /// -When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically creates documents for each node and edge entity in your graph, optionally you can create documents explicitly as properties and pass the property names to `vectorise()`. This is useful when you already have a deep understanding of your graphs semantics. Additionally, you can cache the embedded graph to disk to avoid having to recompute the vectors when nothing has changed. +When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically creates documents for each node and edge entity in your graph, optionally you can provide template strings to format documents and pass these to `vectorise()`. This is useful when you know which properties are semantically relevant or want to present information in a specific format when retrieved by a human or machine user. Additionally, you can cache the embedded graph to disk to avoid having to recompute the vectors when nothing has changed. + +### Document templates + +The templates for entity documents follow a subset of [Jinja](https://jinja.palletsprojects.com/en/stable/templates/) and graph attributes and properties are exposed so that you can use them in template expressions. + +Most attributes of graph entities are exposed and can be used in Jinja expressions. The nesting of attributes reflects the Python interface and the final result of any chain such as `properties.prop_name` or `src.name` should be a string. + +For example, in a money laundering case a simple template might be: + +/// tab | :fontawesome-brands-python: Python +```{.python notest} +node_document = """ +{% if properties.type == "Company" %} +{{ name }} is a company with the following details: +Employee count: {{ properties.employeeCount}} +Account: {{ properties.account}} +Location: {{ properties.location}} +Jurisdiction: {{ properties.jurisdiction}} +Partnerships: {{ properties.partnerships}} +{% endif %} + +{% if properties.type == "Person" %} +{{ name }} is a director with the follwing details: +Age: {{ properties.age }} +Mobile: {{ properties.mobile }} +Home address: {{ properties.homeAddress }} +Email: {{ properties.email }} +{% endif %} + +{% if properties.type == "Report" %} +{{name}} is a suspicious activity report with the following content: +{{ properties.document }} +{% endif %} +""" + +edge_document = """ +{% if layers[0] == "report" %} +{{ src.name }} was raised against {{ dst.name}} +{% elif layers[0] == "director" %} +{{ dst.name }} is a director of {{ src.name }} +{% else %} +{{ src.name }} transferred ${{ properties.amount_usd }} to {{ dst.name }} +{% endif %} +""" +``` +/// ## Retrieve documents From cd60c59ce6bda7c5dc1b9e1ade55e394878162c8 Mon Sep 17 00:00:00 2001 From: James Baross Date: Tue, 28 Oct 2025 11:51:37 +0000 Subject: [PATCH 08/13] simplest rag-like example --- docs/user-guide/algorithms/6_vectorisation.md | 72 ++++++++++++++++++- 1 file changed, 71 insertions(+), 1 deletion(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 5d982442b3..525cb48cfb 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -81,5 +81,75 @@ Once you have a selection containing the information you want you can: Each [Document][raphtory.vectors.Document] corresponds to unique entity in the graph, the contents of the associated document and it's vector representation. You can pull any of these out to retrieve information about an entity for a RAG system, compose a subgraph to analyse using Raphtory's algorithms, or feed into some more complex pipeline. -## Example +## Asking questions about your network +Using the [Network example]() from XYZ you can set up a graph and add some simple AI tools in order to create a `VectorisedGraph`: + +/// tab | :fontawesome-brands-python: Python +```{.python notest} +from raphtory import Graph +import pandas as pd +from openai import OpenAI + +server_edges_df = pd.read_csv("./network_traffic_edges.csv") +server_edges_df["timestamp"] = pd.to_datetime(server_edges_df["timestamp"]) + +server_nodes_df = pd.read_csv("./network_traffic_nodes.csv") +server_nodes_df["timestamp"] = pd.to_datetime(server_nodes_df["timestamp"]) + +traffic_graph = Graph() +traffic_graph.load_edges_from_pandas( + df=server_edges_df, + src="source", + dst="destination", + time="timestamp", + properties=["data_size_MB"], + layer_col="transaction_type", + metadata=["is_encrypted"], + shared_metadata={"datasource": "./network_traffic_edges.csv"}, +) +traffic_graph.load_nodes_from_pandas( + df=server_nodes_df, + id="server_id", + time="timestamp", + properties=["OS_version", "primary_function", "uptime_days"], + metadata=["server_name", "hardware_type"], + shared_metadata={"datasource": "./network_traffic_edges.csv"}, +) + +def get_embeddings(documents, model="embeddinggemma"): + client = OpenAI(base_url='http://localhost:11434/v1/', api_key="ollama") + return [client.embeddings.create(input=text, model=model).data[0].embedding for text in documents] + +def send_query_with_docs(query: str, selection): + formatted_docs = "\n".join(doc.content for doc in selection.get_documents()) + instruct_client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama") + instructions = f"You are helpful assistant. Answer the user question using the following context:\n{formatted_docs}" + + completion = instruct_client.chat.completions.create( + model="gemma3", + messages = [ + {"role": "system", "content": f"You are helpful assistant. Answer the user question using the following context:\n{formatted_docs}"}, + {"role": "user", "content": query} + ] + ) + + return completion.choices[0].message.content + +v = traffic_graph.vectorise(get_embeddings, verbose=True) +``` +/// + +Using this `VectorisedGraph` you can perform similarity queries and feed the results into an LLM to ground it's responses in your data. + +/// tab | :fontawesome-brands-python: Python +```{.python notest} +query = "What's the status of my linux boxes?" + +s = v.nodes_by_similarity(query, limit=3) + +print(send_query_with_docs(query, s)) +``` +/// + +However, you must note that LLM responses are still statistical and variations will occur. In production systems you \ No newline at end of file From 4fd4dd4a3311b16616dc4efcb4dbcbb02404f3bf Mon Sep 17 00:00:00 2001 From: James Baross Date: Tue, 28 Oct 2025 13:25:46 +0000 Subject: [PATCH 09/13] simplest rag-like example --- docs/user-guide/algorithms/6_vectorisation.md | 81 ++++++++----------- 1 file changed, 34 insertions(+), 47 deletions(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 525cb48cfb..4e7fdbec58 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -26,46 +26,6 @@ The templates for entity documents follow a subset of [Jinja](https://jinja.pall Most attributes of graph entities are exposed and can be used in Jinja expressions. The nesting of attributes reflects the Python interface and the final result of any chain such as `properties.prop_name` or `src.name` should be a string. -For example, in a money laundering case a simple template might be: - -/// tab | :fontawesome-brands-python: Python -```{.python notest} -node_document = """ -{% if properties.type == "Company" %} -{{ name }} is a company with the following details: -Employee count: {{ properties.employeeCount}} -Account: {{ properties.account}} -Location: {{ properties.location}} -Jurisdiction: {{ properties.jurisdiction}} -Partnerships: {{ properties.partnerships}} -{% endif %} - -{% if properties.type == "Person" %} -{{ name }} is a director with the follwing details: -Age: {{ properties.age }} -Mobile: {{ properties.mobile }} -Home address: {{ properties.homeAddress }} -Email: {{ properties.email }} -{% endif %} - -{% if properties.type == "Report" %} -{{name}} is a suspicious activity report with the following content: -{{ properties.document }} -{% endif %} -""" - -edge_document = """ -{% if layers[0] == "report" %} -{{ src.name }} was raised against {{ dst.name}} -{% elif layers[0] == "director" %} -{{ dst.name }} is a director of {{ src.name }} -{% else %} -{{ src.name }} transferred ${{ properties.amount_usd }} to {{ dst.name }} -{% endif %} -""" -``` -/// - ## Retrieve documents You can retrieve relevant information from the [VectorisedGraph][raphtory.vectors.VectorisedGraph] by making selections. @@ -83,7 +43,7 @@ Each [Document][raphtory.vectors.Document] corresponds to unique entity in the g ## Asking questions about your network -Using the [Network example]() from XYZ you can set up a graph and add some simple AI tools in order to create a `VectorisedGraph`: +Using the Network example from the [ingestion using dataframes](../ingestion/3_dataframes.md) discussion you can set up a graph and add some simple AI tools in order to create a `VectorisedGraph`: /// tab | :fontawesome-brands-python: Python ```{.python notest} @@ -123,17 +83,16 @@ def get_embeddings(documents, model="embeddinggemma"): def send_query_with_docs(query: str, selection): formatted_docs = "\n".join(doc.content for doc in selection.get_documents()) - instruct_client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama") + client = OpenAI(base_url="http://localhost:11434/v1/", api_key="ollama") instructions = f"You are helpful assistant. Answer the user question using the following context:\n{formatted_docs}" - completion = instruct_client.chat.completions.create( + completion = client.chat.completions.create( model="gemma3", messages = [ {"role": "system", "content": f"You are helpful assistant. Answer the user question using the following context:\n{formatted_docs}"}, {"role": "user", "content": query} ] ) - return completion.choices[0].message.content v = traffic_graph.vectorise(get_embeddings, verbose=True) @@ -146,10 +105,38 @@ Using this `VectorisedGraph` you can perform similarity queries and feed the res ```{.python notest} query = "What's the status of my linux boxes?" -s = v.nodes_by_similarity(query, limit=3) +node_selection = v.nodes_by_similarity(query, limit=3) -print(send_query_with_docs(query, s)) +print(send_query_with_docs(query, node_selection)) ``` /// -However, you must note that LLM responses are still statistical and variations will occur. In production systems you \ No newline at end of file +However, you must always be aware that LLM responses are still statistical and variations will occur. In production systems you may want to use a structured output tool to enforce a specific format. + +The output of the example query should be similar to the following: + +!!! Output + ```output + Okay, here’s a rundown of the status of your Linux boxes as of today, September 3, 2023: + + * **ServerA (Alpha):** + * Datasource: ./network_traffic_edges.csv + * Hardware Type: Blade Server + * OS Version: Ubuntu 20.04 (Changed Sep 1, 2023 08:00) + * Primary Function: Database + * Uptime: 120 days + * **ServerD (Delta):** + * Datasource: ./network_traffic_edges.csv + * Hardware Type: Tower Server + * OS Version: Ubuntu 20.04 (Changed Sep 1, 2023 08:15) + * Primary Function: Application Server + * Uptime: 60 days + * **ServerE (Echo):** + * Datasource: ./network_traffic_edges.csv + * Hardware Type: Rack Server + * OS Version: Red Hat 8.1 (Changed Sep 1, 2023 08:20) + * Primary Function: Backup + * Uptime: 30 days + + Do you need any more details about any of these servers? + ``` From ee72b40d0445905e37ffc264a520baa7a3c7f731 Mon Sep 17 00:00:00 2001 From: James Baross Date: Tue, 28 Oct 2025 13:32:06 +0000 Subject: [PATCH 10/13] fix misc docs errors --- docs/user-guide/views/2_time.md | 2 +- docs/user-guide/views/3_layer.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/user-guide/views/2_time.md b/docs/user-guide/views/2_time.md index c13d5f4aa5..77bfd3890a 100644 --- a/docs/user-guide/views/2_time.md +++ b/docs/user-guide/views/2_time.md @@ -38,7 +38,7 @@ While `before()` and `after()` are more useful for continuous time datasets, `at In the example below we print the degree of `Lome` across the full dataset, before 12:17 on the 13th of June, and after 9:07 on the 30th of June. We also use two time functions here, [start_date_time][raphtory.GraphView.start_date_time] and [end_date_time][raphtory.GraphView.end_date_time], which return information about a view. !!! note - In this code example we have called the `before()` on the graph and `after()` on the node. This is important, as there are some subtle differences in outcomes that depend on where these functions are called. This is discussed in detail [below](2_time.md#traversing-the-graph-with-views). + In this code example we have called the `before()` on the graph and `after()` on the node. This is important, as there are some subtle differences in outcomes that depend on where these functions are called. This is discussed in detail [below](2_time.md#propagation-of-time-filters). /// tab | :fontawesome-brands-python: Python ```python diff --git a/docs/user-guide/views/3_layer.md b/docs/user-guide/views/3_layer.md index baccb5d554..11aa9f629f 100644 --- a/docs/user-guide/views/3_layer.md +++ b/docs/user-guide/views/3_layer.md @@ -6,7 +6,7 @@ Before reading this topic, please ensure you are familiar with: - [Edge layers](../ingestion/2_direct-updates.md#edge-layers) - [Exploded Edges](../querying/4_edge-metrics.md#exploded-edges) -- [Traversing graphs](2_time.md#traversing-the-graph-with-views) +- [Traversing graphs](2_time.md#propagation-of-time-filters) ## Creating layers views @@ -81,7 +81,7 @@ assert str(f"Total weight across Grooming and Resting between {start_day} and {e ## Traversing the graph with layers -Expanding on the example from [the time views](2_time.md#traversing-the-graph-with-views), if you wanted to look at which neighbours LOME has groomed, followed by who those monkeys have rested with, then you could write the following query. +Expanding on the example from [the time views](2_time.md#propagation-of-time-filters), if you wanted to look at which neighbours LOME has groomed, followed by who those monkeys have rested with, then you could write the following query. !!! note Similar to the time based filters, if a layer view is applied to the graph then all extracted entities will have this view applied to them. However, if the layer view is applied to a node or edge, it will only last until you have moved to a new node. From dbb805773c6ad948108d87c9d82166570dcef9b0 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 28 Oct 2025 14:35:06 +0000 Subject: [PATCH 11/13] chore: apply tidy-public auto-fixes --- python/python/raphtory/__init__.pyi | 568 ++++-------------- .../python/raphtory/algorithms/__init__.pyi | 168 +----- python/python/raphtory/filter/__init__.pyi | 99 ++- python/python/raphtory/graph_gen/__init__.pyi | 8 +- .../python/raphtory/graph_loader/__init__.pyi | 16 +- python/python/raphtory/graphql/__init__.pyi | 179 ++---- python/python/raphtory/iterables/__init__.pyi | 261 +++++--- .../python/raphtory/node_state/__init__.pyi | 293 +++------ python/python/raphtory/vectors/__init__.pyi | 123 ++-- 9 files changed, 523 insertions(+), 1192 deletions(-) diff --git a/python/python/raphtory/__init__.pyi b/python/python/raphtory/__init__.pyi index c38983b992..74a5d749e8 100644 --- a/python/python/raphtory/__init__.pyi +++ b/python/python/raphtory/__init__.pyi @@ -1,7 +1,6 @@ """ Raphtory graph analytics library """ - from __future__ import annotations ############################################################################### @@ -27,42 +26,8 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = [ - "GraphView", - "Graph", - "PersistentGraph", - "Node", - "Nodes", - "PathFromNode", - "PathFromGraph", - "MutableNode", - "Edge", - "Edges", - "NestedEdges", - "MutableEdge", - "Properties", - "PyPropValueList", - "Metadata", - "TemporalProperties", - "PropertiesView", - "TemporalProp", - "WindowSet", - "IndexSpecBuilder", - "IndexSpec", - "version", - "graphql", - "algorithms", - "graph_loader", - "graph_gen", - "vectors", - "node_state", - "filter", - "iterables", - "nullmodels", - "plottingutils", -] - -class GraphView(object): +__all__ = ['GraphView', 'Graph', 'PersistentGraph', 'Node', 'Nodes', 'PathFromNode', 'PathFromGraph', 'MutableNode', 'Edge', 'Edges', 'NestedEdges', 'MutableEdge', 'Properties', 'PyPropValueList', 'Metadata', 'TemporalProperties', 'PropertiesView', 'TemporalProp', 'WindowSet', 'IndexSpecBuilder', 'IndexSpec', 'version', 'graphql', 'algorithms', 'graph_loader', 'graph_gen', 'vectors', 'node_state', 'filter', 'iterables', 'nullmodels', 'plottingutils'] +class GraphView(object): """Graph view is a read-only version of a graph at a certain point in time.""" def __eq__(self, value): @@ -272,9 +237,7 @@ class GraphView(object): GraphView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -484,12 +447,7 @@ class GraphView(object): Properties: Properties paired with their names """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -635,14 +593,7 @@ class GraphView(object): GraphView: Returns the subgraph """ - def to_networkx( - self, - explode_edges: bool = False, - include_node_properties: bool = True, - include_edge_properties: bool = True, - include_update_history: bool = True, - include_property_history: bool = True, - ) -> nx.MultiDiGraph: + def to_networkx(self, explode_edges: bool = False, include_node_properties: bool = True, include_edge_properties: bool = True, include_update_history: bool = True, include_property_history: bool = True) -> nx.MultiDiGraph: """ Returns a graph with NetworkX. @@ -661,19 +612,7 @@ class GraphView(object): nx.MultiDiGraph: A Networkx MultiDiGraph. """ - def to_pyvis( - self, - explode_edges: bool = False, - edge_color: str = "#000000", - shape: str = "dot", - node_image: Optional[str] = None, - edge_weight: Optional[str] = None, - edge_label: Optional[str] = None, - colour_nodes_by_type: bool = False, - directed: bool = True, - notebook: bool = False, - **kwargs: Any, - ) -> pyvis.network.Network: + def to_pyvis(self, explode_edges: bool = False, edge_color: str = '#000000', shape: str = 'dot', node_image: Optional[str] = None, edge_weight: Optional[str] = None, edge_label: Optional[str] = None, colour_nodes_by_type: bool = False, directed: bool = True, notebook: bool = False, **kwargs: Any) -> pyvis.network.Network: """ Draw a graph with PyVis. Pyvis is a required dependency. If you intend to use this function make sure that you install Pyvis @@ -734,26 +673,19 @@ class GraphView(object): GraphView: The layered view """ - def vectorise( - self, - embedding: Callable[[list], list], - nodes: bool | str = True, - edges: bool | str = True, - cache: Optional[str] = None, - verbose: bool = False, - ) -> VectorisedGraph: + def vectorise(self, embedding: Callable[[list], list], nodes: bool | str = True, edges: bool | str = True, cache: Optional[str] = None, verbose: bool = False) -> VectorisedGraph: """ Create a VectorisedGraph from the current graph. Args: - embedding (Callable[[list], list]): The embedding function to translate documents to embeddings. + embedding (Callable[[list], list]): Specify the embedding function used to vectorise documents into embeddings. nodes (bool | str): Enable for nodes to be embedded, disable for nodes to not be embedded or specify a custom document property to use if a string is provided. Defaults to True. edges (bool | str): Enable for edges to be embedded, disable for edges to not be embedded or specify a custom document property to use if a string is provided. Defaults to True. cache (str, optional): Path used to store the cache of embeddings. verbose (bool): Enable to print logs reporting progress. Defaults to False. Returns: - VectorisedGraph: A VectorisedGraph with all the documents/embeddings computed and with an initial empty selection + VectorisedGraph: A VectorisedGraph with all the documents and their embeddings, with an initial empty selection. """ def window(self, start: TimeInput, end: TimeInput) -> GraphView: @@ -777,7 +709,7 @@ class GraphView(object): Optional[int]: """ -class Graph(GraphView): +class Graph(GraphView): """ A temporal graph with event semantics. @@ -788,16 +720,10 @@ class Graph(GraphView): def __new__(cls, num_shards: Optional[int] = None) -> Graph: """Create and return a new object. See help(type) for accurate signature.""" - def __reduce__(self): ... - def add_edge( - self, - timestamp: TimeInput, - src: str | int, - dst: str | int, - properties: Optional[PropInput] = None, - layer: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableEdge: + def __reduce__(self): + ... + + def add_edge(self, timestamp: TimeInput, src: str|int, dst: str|int, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableEdge: """ Adds a new edge with the given source and destination nodes and properties to the graph. @@ -830,14 +756,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_node( - self, - timestamp: TimeInput, - id: str | int, - properties: Optional[PropInput] = None, - node_type: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableNode: + def add_node(self, timestamp: TimeInput, id: str|int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: """ Adds a new node with the given id and properties to the graph. @@ -855,12 +774,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_properties( - self, - timestamp: TimeInput, - properties: PropInput, - secondary_index: Optional[int] = None, - ) -> None: + def add_properties(self, timestamp: TimeInput, properties: PropInput, secondary_index: Optional[int] = None) -> None: """ Adds properties to the graph. @@ -937,14 +851,7 @@ class Graph(GraphView): None: """ - def create_node( - self, - timestamp: TimeInput, - id: str | int, - properties: Optional[PropInput] = None, - node_type: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableNode: + def create_node(self, timestamp: TimeInput, id: str|int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: """ Creates a new node with the given id and properties to the graph. It fails if the node already exists. @@ -974,7 +881,7 @@ class Graph(GraphView): Graph: """ - def edge(self, src: str | int, dst: str | int) -> MutableEdge: + def edge(self, src: str|int, dst: str|int) -> MutableEdge: """ Gets the edge with the specified source and destination nodes @@ -1067,9 +974,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_edges_as( - self, edges: List[Edge], new_ids: List[Tuple[int, int]], merge: bool = False - ) -> None: + def import_edges_as(self, edges: List[Edge], new_ids: List[Tuple[int, int]], merge: bool = False) -> None: """ Import multiple edges into the graph with new ids. @@ -1104,9 +1009,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_node_as( - self, node: Node, new_id: str | int, merge: bool = False - ) -> MutableNode: + def import_node_as(self, node: Node, new_id: str|int, merge: bool = False) -> MutableNode: """ Import a single node into the graph with new id. @@ -1141,9 +1044,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_nodes_as( - self, nodes: List[Node], new_ids: List[str | int], merge: bool = False - ) -> None: + def import_nodes_as(self, nodes: List[Node], new_ids: List[str|int], merge: bool = False) -> None: """ Import multiple nodes into the graph with new ids. @@ -1188,16 +1089,7 @@ class Graph(GraphView): Graph: the loaded graph with initialised cache """ - def load_edge_props_from_pandas( - self, - df: DataFrame, - src: str, - dst: str, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edge_props_from_pandas(self, df: DataFrame, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edge properties from a Pandas DataFrame. @@ -1217,16 +1109,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_parquet( - self, - parquet_path: str, - src: str, - dst: str, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edge_props_from_parquet(self, parquet_path: str, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edge properties from parquet file @@ -1246,18 +1129,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_pandas( - self, - df: DataFrame, - time: str, - src: str, - dst: str, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edges_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edges from a Pandas DataFrame into the graph. @@ -1279,18 +1151,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_parquet( - self, - parquet_path: str, - time: str, - src: str, - dst: str, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edges_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edges from a Parquet file into the graph. @@ -1324,15 +1185,7 @@ class Graph(GraphView): Graph: """ - def load_node_props_from_pandas( - self, - df: DataFrame, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_node_props_from_pandas(self, df: DataFrame, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load node properties from a Pandas DataFrame. @@ -1351,15 +1204,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_node_props_from_parquet( - self, - parquet_path: str, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_node_props_from_parquet(self, parquet_path: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load node properties from a parquet file. @@ -1378,17 +1223,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_pandas( - self, - df: DataFrame, - time: str, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_nodes_from_pandas(self, df: DataFrame, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load nodes from a Pandas DataFrame into the graph. @@ -1409,17 +1244,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_parquet( - self, - parquet_path: str, - time: str, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_nodes_from_parquet(self, parquet_path: str, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load nodes from a Parquet file into the graph. @@ -1440,7 +1265,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def node(self, id: str | int) -> MutableNode: + def node(self, id: str|int) -> MutableNode: """ Gets the node with the specified id @@ -1521,22 +1346,16 @@ class Graph(GraphView): None: """ -class PersistentGraph(GraphView): +class PersistentGraph(GraphView): """A temporal graph that allows edges and nodes to be deleted.""" def __new__(cls) -> PersistentGraph: """Create and return a new object. See help(type) for accurate signature.""" - def __reduce__(self): ... - def add_edge( - self, - timestamp: int, - src: str | int, - dst: str | int, - properties: Optional[PropInput] = None, - layer: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> None: + def __reduce__(self): + ... + + def add_edge(self, timestamp: int, src: str | int, dst: str | int, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> None: """ Adds a new edge with the given source and destination nodes and properties to the graph. @@ -1569,14 +1388,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_node( - self, - timestamp: TimeInput, - id: str | int, - properties: Optional[PropInput] = None, - node_type: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_node(self, timestamp: TimeInput, id: str | int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> None: """ Adds a new node with the given id and properties to the graph. @@ -1594,12 +1406,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_properties( - self, - timestamp: TimeInput, - properties: dict, - secondary_index: Optional[int] = None, - ) -> None: + def add_properties(self, timestamp: TimeInput, properties: dict, secondary_index: Optional[int] = None) -> None: """ Adds properties to the graph. @@ -1675,14 +1482,7 @@ class PersistentGraph(GraphView): None: """ - def create_node( - self, - timestamp: TimeInput, - id: str | int, - properties: Optional[PropInput] = None, - node_type: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableNode: + def create_node(self, timestamp: TimeInput, id: str | int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: """ Creates a new node with the given id and properties to the graph. It fails if the node already exists. @@ -1700,14 +1500,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def delete_edge( - self, - timestamp: int, - src: str | int, - dst: str | int, - layer: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableEdge: + def delete_edge(self, timestamp: int, src: str | int, dst: str | int, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableEdge: """ Deletes an edge given the timestamp, src and dst nodes and layer (optional) @@ -1820,9 +1613,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_edges_as( - self, edges: List[Edge], new_ids: list[Tuple[GID, GID]], merge: bool = False - ) -> None: + def import_edges_as(self, edges: List[Edge], new_ids: list[Tuple[GID, GID]], merge: bool = False) -> None: """ Import multiple edges into the graph with new ids. @@ -1859,9 +1650,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_node_as( - self, node: Node, new_id: str | int, merge: bool = False - ) -> Node: + def import_node_as(self, node: Node, new_id: str|int, merge: bool = False) -> Node: """ Import a single node into the graph with new id. @@ -1898,9 +1687,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_nodes_as( - self, nodes: List[Node], new_ids: List[str | int], merge: bool = False - ) -> None: + def import_nodes_as(self, nodes: List[Node], new_ids: List[str|int], merge: bool = False) -> None: """ Import multiple nodes into the graph with new ids. @@ -1934,15 +1721,7 @@ class PersistentGraph(GraphView): PersistentGraph: the loaded graph with initialised cache """ - def load_edge_deletions_from_pandas( - self, - df: DataFrame, - time: str, - src: str, - dst: str, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edge_deletions_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edges deletions from a Pandas DataFrame into the graph. @@ -1961,15 +1740,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_deletions_from_parquet( - self, - parquet_path: str, - time: str, - src: str, - dst: str, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edge_deletions_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edges deletions from a Parquet file into the graph. @@ -1988,16 +1759,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_pandas( - self, - df: DataFrame, - src: str, - dst: str, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edge_props_from_pandas(self, df: DataFrame, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edge properties from a Pandas DataFrame. @@ -2017,16 +1779,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_parquet( - self, - parquet_path: str, - src: str, - dst: str, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edge_props_from_parquet(self, parquet_path: str, src: str, dst: str, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edge properties from parquet file @@ -2046,18 +1799,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_pandas( - self, - df: DataFrame, - time: str, - src: str, - dst: str, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edges_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edges from a Pandas DataFrame into the graph. @@ -2079,18 +1821,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_parquet( - self, - parquet_path: str, - time: str, - src: str, - dst: str, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - layer: Optional[str] = None, - layer_col: Optional[str] = None, - ) -> None: + def load_edges_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None, layer: Optional[str] = None, layer_col: Optional[str] = None) -> None: """ Load edges from a Parquet file into the graph. @@ -2124,15 +1855,7 @@ class PersistentGraph(GraphView): PersistentGraph: """ - def load_node_props_from_pandas( - self, - df: DataFrame, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_node_props_from_pandas(self, df: DataFrame, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load node properties from a Pandas DataFrame. @@ -2151,15 +1874,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_node_props_from_parquet( - self, - parquet_path: str, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_node_props_from_parquet(self, parquet_path: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load node properties from a parquet file. @@ -2178,17 +1893,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_pandas( - self, - df: DataFrame, - time: str, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_nodes_from_pandas(self, df: DataFrame, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load nodes from a Pandas DataFrame into the graph. @@ -2209,17 +1914,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_parquet( - self, - parquet_path: str, - time: str, - id: str, - node_type: Optional[str] = None, - node_type_col: Optional[str] = None, - properties: Optional[List[str]] = None, - metadata: Optional[List[str]] = None, - shared_metadata: Optional[PropInput] = None, - ) -> None: + def load_nodes_from_parquet(self, parquet_path: str, time: str, id: str, node_type: Optional[str] = None, node_type_col: Optional[str] = None, properties: Optional[List[str]] = None, metadata: Optional[List[str]] = None, shared_metadata: Optional[PropInput] = None) -> None: """ Load nodes from a Parquet file into the graph. @@ -2310,7 +2005,7 @@ class PersistentGraph(GraphView): None: """ -class Node(object): +class Node(object): """A node (or node) in the graph.""" def __eq__(self, value): @@ -2485,9 +2180,7 @@ class Node(object): Node: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -2568,7 +2261,7 @@ class Node(object): """ @property - def id(self) -> str | int: + def id(self) -> (str|int): """ Returns the id of the node. This is a unique identifier for the node. @@ -2732,12 +2425,7 @@ class Node(object): Properties: A list of properties. """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -2868,7 +2556,7 @@ class Node(object): Optional[int]: """ -class Nodes(object): +class Nodes(object): """A list of nodes that can be iterated over.""" def __bool__(self): @@ -3057,9 +2745,7 @@ class Nodes(object): Nodes: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3296,12 +2982,7 @@ class Nodes(object): PropertiesView: A view of the node properties. """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3399,9 +3080,7 @@ class Nodes(object): Optional[datetime]: The earliest datetime that this Nodes is valid or None if the Nodes is valid for all times. """ - def to_df( - self, include_property_history: bool = False, convert_datetime: bool = False - ) -> DataFrame: + def to_df(self, include_property_history: bool = False, convert_datetime: bool = False) -> DataFrame: """ Converts the graph's nodes into a Pandas DataFrame. @@ -3462,7 +3141,8 @@ class Nodes(object): Optional[int]: """ -class PathFromNode(object): +class PathFromNode(object): + def __bool__(self): """True if self else False""" @@ -3619,9 +3299,7 @@ class PathFromNode(object): PathFromNode: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3831,12 +3509,7 @@ class PathFromNode(object): PropertiesView: """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3978,7 +3651,8 @@ class PathFromNode(object): Optional[int]: """ -class PathFromGraph(object): +class PathFromGraph(object): + def __bool__(self): """True if self else False""" @@ -4144,9 +3818,7 @@ class PathFromGraph(object): PathFromGraph: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4381,12 +4053,7 @@ class PathFromGraph(object): NestedPropsIterable: """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -4528,7 +4195,8 @@ class PathFromGraph(object): Optional[int]: """ -class MutableNode(Node): +class MutableNode(Node): + def __repr__(self): """Return repr(self).""" @@ -4545,12 +4213,7 @@ class MutableNode(Node): None: """ - def add_updates( - self, - t: TimeInput, - properties: Optional[PropInput] = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_updates(self, t: TimeInput, properties: Optional[PropInput] = None, secondary_index: Optional[int] = None) -> None: """ Add updates to a node in the graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -4595,7 +4258,7 @@ class MutableNode(Node): None: """ -class Edge(object): +class Edge(object): """ PyEdge is a Python class that represents an edge in the graph. An edge is a directed connection between two nodes. @@ -4782,9 +4445,7 @@ class Edge(object): Edge: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4986,12 +4647,7 @@ class Edge(object): Properties: Properties on the Edge. """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5140,7 +4796,7 @@ class Edge(object): Optional[int]: """ -class Edges(object): +class Edges(object): """A list of edges that can be iterated over.""" def __bool__(self): @@ -5325,9 +4981,7 @@ class Edges(object): Edges: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -5534,12 +5188,7 @@ class Edges(object): PropertiesView: """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5655,12 +5304,7 @@ class Edges(object): I64Iterable: """ - def to_df( - self, - include_property_history: bool = True, - convert_datetime: bool = False, - explode: bool = False, - ) -> DataFrame: + def to_df(self, include_property_history: bool = True, convert_datetime: bool = False, explode: bool = False) -> DataFrame: """ Converts the graph's edges into a Pandas DataFrame. @@ -5713,7 +5357,8 @@ class Edges(object): Optional[int]: """ -class NestedEdges(object): +class NestedEdges(object): + def __bool__(self): """True if self else False""" @@ -5888,9 +5533,7 @@ class NestedEdges(object): NestedEdges: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -6088,12 +5731,7 @@ class NestedEdges(object): PyNestedPropsIterable: """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -6242,7 +5880,8 @@ class NestedEdges(object): Optional[int]: """ -class MutableEdge(Edge): +class MutableEdge(Edge): + def __repr__(self): """Return repr(self).""" @@ -6260,13 +5899,7 @@ class MutableEdge(Edge): None: """ - def add_updates( - self, - t: TimeInput, - properties: Optional[PropInput] = None, - layer: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_updates(self, t: TimeInput, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> None: """ Add updates to an edge in the graph at a specified time. This function allows for the addition of property updates to an edge within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -6313,7 +5946,7 @@ class MutableEdge(Edge): None: """ -class Properties(object): +class Properties(object): """A view of the properties of an entity""" def __contains__(self, key): @@ -6404,7 +6037,8 @@ class Properties(object): list[PropValue]: """ -class PyPropValueList(object): +class PyPropValueList(object): + def __eq__(self, value): """Return self==value.""" @@ -6440,8 +6074,12 @@ class PyPropValueList(object): PropValue: The average of each property values, or None if count is zero. """ - def collect(self): ... - def count(self): ... + def collect(self): + ... + + def count(self): + ... + def drop_none(self) -> list[PropValue]: """ Drop none. @@ -6490,7 +6128,7 @@ class PyPropValueList(object): PropValue: """ -class Metadata(object): +class Metadata(object): """A view of metadata of an entity""" def __contains__(self, key): @@ -6571,7 +6209,7 @@ class Metadata(object): list[PropValue]: """ -class TemporalProperties(object): +class TemporalProperties(object): """A view of the temporal properties of an entity""" def __contains__(self, key): @@ -6666,7 +6304,8 @@ class TemporalProperties(object): list[TemporalProp]: the list of property views """ -class PropertiesView(object): +class PropertiesView(object): + def __contains__(self, key): """Return bool(key in self).""" @@ -6749,7 +6388,7 @@ class PropertiesView(object): list[list[PropValue]]: """ -class TemporalProp(object): +class TemporalProp(object): """A view of a temporal property""" def __eq__(self, value): @@ -6910,7 +6549,8 @@ class TemporalProp(object): NumpyArray: """ -class WindowSet(object): +class WindowSet(object): + def __iter__(self): """Implement iter(self).""" @@ -6928,7 +6568,8 @@ class WindowSet(object): Iterable: The time index. """ -class IndexSpecBuilder(object): +class IndexSpecBuilder(object): + def __new__(cls, graph) -> IndexSpecBuilder: """Create and return a new object. See help(type) for accurate signature.""" @@ -7032,7 +6673,8 @@ class IndexSpecBuilder(object): dict[str, Any]: """ -class IndexSpec(object): +class IndexSpec(object): + def __repr__(self): """Return repr(self).""" diff --git a/python/python/raphtory/algorithms/__init__.pyi b/python/python/raphtory/algorithms/__init__.pyi index ae2892f399..c3005db67e 100644 --- a/python/python/raphtory/algorithms/__init__.pyi +++ b/python/python/raphtory/algorithms/__init__.pyi @@ -1,7 +1,6 @@ """ Algorithmic functions that can be run on Raphtory graphs """ - from __future__ import annotations ############################################################################### @@ -27,59 +26,8 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = [ - "dijkstra_single_source_shortest_paths", - "global_reciprocity", - "betweenness_centrality", - "all_local_reciprocity", - "triplet_count", - "local_triangle_count", - "average_degree", - "directed_graph_density", - "degree_centrality", - "max_degree", - "min_degree", - "max_out_degree", - "max_in_degree", - "min_out_degree", - "min_in_degree", - "pagerank", - "single_source_shortest_path", - "global_clustering_coefficient", - "temporally_reachable_nodes", - "temporal_bipartite_graph_projection", - "local_clustering_coefficient", - "local_clustering_coefficient_batch", - "weakly_connected_components", - "strongly_connected_components", - "in_components", - "in_component", - "out_components", - "out_component", - "fast_rp", - "global_temporal_three_node_motif", - "global_temporal_three_node_motif_multi", - "local_temporal_three_node_motifs", - "hits", - "balance", - "label_propagation", - "k_core", - "temporal_SEIR", - "louvain", - "fruchterman_reingold", - "cohesive_fruchterman_reingold", - "max_weight_matching", - "Matching", - "Infected", -] - -def dijkstra_single_source_shortest_paths( - graph: GraphView, - source: NodeInput, - targets: list[NodeInput], - direction: Direction = "both", - weight: str = "weight", -) -> NodeStateWeightedSP: +__all__ = ['dijkstra_single_source_shortest_paths', 'global_reciprocity', 'betweenness_centrality', 'all_local_reciprocity', 'triplet_count', 'local_triangle_count', 'average_degree', 'directed_graph_density', 'degree_centrality', 'max_degree', 'min_degree', 'max_out_degree', 'max_in_degree', 'min_out_degree', 'min_in_degree', 'pagerank', 'single_source_shortest_path', 'global_clustering_coefficient', 'temporally_reachable_nodes', 'temporal_bipartite_graph_projection', 'local_clustering_coefficient', 'local_clustering_coefficient_batch', 'weakly_connected_components', 'strongly_connected_components', 'in_components', 'in_component', 'out_components', 'out_component', 'fast_rp', 'global_temporal_three_node_motif', 'global_temporal_three_node_motif_multi', 'local_temporal_three_node_motifs', 'hits', 'balance', 'label_propagation', 'k_core', 'temporal_SEIR', 'louvain', 'fruchterman_reingold', 'cohesive_fruchterman_reingold', 'max_weight_matching', 'Matching', 'Infected'] +def dijkstra_single_source_shortest_paths(graph: GraphView, source: NodeInput, targets: list[NodeInput], direction: Direction = "both", weight: str = 'weight') -> NodeStateWeightedSP: """ Finds the shortest paths from a single source to multiple targets in a graph. @@ -109,9 +57,7 @@ def global_reciprocity(graph: GraphView) -> float: float: reciprocity of the graph between 0 and 1. """ -def betweenness_centrality( - graph: GraphView, k: Optional[int] = None, normalized: bool = True -) -> NodeStateF64: +def betweenness_centrality(graph: GraphView, k: Optional[int] = None, normalized: bool = True) -> NodeStateF64: """ Computes the betweenness centrality for nodes in a given graph. @@ -279,13 +225,7 @@ def min_in_degree(graph: GraphView) -> int: int: value of the smallest indegree """ -def pagerank( - graph: GraphView, - iter_count: int = 20, - max_diff: Optional[float] = None, - use_l2_norm: bool = True, - damping_factor: float = 0.85, -) -> NodeStateF64: +def pagerank(graph: GraphView, iter_count: int = 20, max_diff: Optional[float] = None, use_l2_norm: bool = True, damping_factor: float = 0.85) -> NodeStateF64: """ Pagerank -- pagerank centrality value of the nodes in a graph @@ -306,9 +246,7 @@ def pagerank( NodeStateF64: Mapping of nodes to their pagerank value. """ -def single_source_shortest_path( - graph: GraphView, source: NodeInput, cutoff: Optional[int] = None -) -> NodeStateNodes: +def single_source_shortest_path(graph: GraphView, source: NodeInput, cutoff: Optional[int] = None) -> NodeStateNodes: """ Calculates the single source shortest paths from a given source node. @@ -339,13 +277,7 @@ def global_clustering_coefficient(graph: GraphView) -> float: [`Triplet Count`](triplet_count) """ -def temporally_reachable_nodes( - graph: GraphView, - max_hops: int, - start_time: int, - seed_nodes: list[NodeInput], - stop_nodes: Optional[list[NodeInput]] = None, -) -> NodeStateReachability: +def temporally_reachable_nodes(graph: GraphView, max_hops: int, start_time: int, seed_nodes: list[NodeInput], stop_nodes: Optional[list[NodeInput]] = None) -> NodeStateReachability: """ Temporally reachable nodes -- the nodes that are reachable by a time respecting path followed out from a set of seed nodes at a starting time. @@ -364,9 +296,7 @@ def temporally_reachable_nodes( NodeStateReachability: Mapping of nodes to their reachability history. """ -def temporal_bipartite_graph_projection( - graph: GraphView, delta: int, pivot_type: str -) -> Graph: +def temporal_bipartite_graph_projection(graph: GraphView, delta: int, pivot_type: str) -> Graph: """ Projects a temporal bipartite graph into an undirected temporal graph over the pivot node type. Let `G` be a bipartite graph with node types `A` and `B`. Given `delta > 0`, the projection graph `G'` pivoting over type `B` nodes, will make a connection between nodes `n1` and `n2` (of type `A`) at time `(t1 + t2)/2` if they respectively have an edge at time `t1`, `t2` with the same node of type `B` in `G`, and `|t2-t1| < delta`. @@ -479,14 +409,7 @@ def out_component(node: Node) -> NodeStateUsize: NodeStateUsize: A NodeState mapping the nodes in the out-component to their distance from the starting node. """ -def fast_rp( - graph: GraphView, - embedding_dim: int, - normalization_strength: float, - iter_weights: list[float], - seed: Optional[int] = None, - threads: Optional[int] = None, -) -> NodeStateListF64: +def fast_rp(graph: GraphView, embedding_dim: int, normalization_strength: float, iter_weights: list[float], seed: Optional[int] = None, threads: Optional[int] = None) -> NodeStateListF64: """ Computes embedding vectors for each vertex of an undirected/bidirectional graph according to the Fast RP algorithm. Original Paper: https://doi.org/10.48550/arXiv.1908.11512 @@ -502,9 +425,7 @@ def fast_rp( NodeStateListF64: Mapping from nodes to embedding vectors. """ -def global_temporal_three_node_motif( - graph: GraphView, delta: int, threads: Optional[int] = None -) -> list[int]: +def global_temporal_three_node_motif(graph: GraphView, delta: int, threads: Optional[int] = None) -> list[int]: """ Computes the number of three edge, up-to-three node delta-temporal motifs in the graph, using the algorithm of Paranjape et al, Motifs in Temporal Networks (2017). We point the reader to this reference for more information on the algorithm and background, but provide a short summary below. @@ -553,9 +474,7 @@ def global_temporal_three_node_motif( """ -def global_temporal_three_node_motif_multi( - graph: GraphView, deltas: list[int], threads: Optional[int] = None -) -> list[list[int]]: +def global_temporal_three_node_motif_multi(graph: GraphView, deltas: list[int], threads: Optional[int] = None) -> list[list[int]]: """ Computes the global counts of three-edge up-to-three node temporal motifs for a range of timescales. See `global_temporal_three_node_motif` for an interpretation of each row returned. @@ -568,9 +487,7 @@ def global_temporal_three_node_motif_multi( list[list[int]]: A list of 40d arrays, each array is the motif count for a particular value of delta, returned in the order that the deltas were given as input. """ -def local_temporal_three_node_motifs( - graph: GraphView, delta: int, threads=None -) -> NodeStateMotifs: +def local_temporal_three_node_motifs(graph: GraphView, delta: int, threads=None) -> NodeStateMotifs: """ Computes the number of each type of motif that each node participates in. See global_temporal_three_node_motifs for a summary of the motifs involved. @@ -586,9 +503,7 @@ def local_temporal_three_node_motifs( the motif. For two node motifs, both constituent nodes count the motif. For triangles, all three constituent nodes count the motif. """ -def hits( - graph: GraphView, iter_count: int = 20, threads: Optional[int] = None -) -> NodeStateHits: +def hits(graph: GraphView, iter_count: int = 20, threads: Optional[int] = None) -> NodeStateHits: """ HITS (Hubs and Authority) Algorithm: @@ -607,9 +522,7 @@ def hits( NodeStateHits: A mapping from nodes their hub and authority scores """ -def balance( - graph: GraphView, name: str = "weight", direction: Direction = "both" -) -> NodeStateF64: +def balance(graph: GraphView, name: str = "weight", direction: Direction = "both") -> NodeStateF64: """ Sums the weights of edges in the graph based on the specified direction. @@ -628,9 +541,7 @@ def balance( """ -def label_propagation( - graph: GraphView, seed: Optional[bytes] = None -) -> list[set[Node]]: +def label_propagation(graph: GraphView, seed: Optional[bytes] = None) -> list[set[Node]]: """ Computes components using a label propagation algorithm @@ -643,9 +554,7 @@ def label_propagation( """ -def k_core( - graph: GraphView, k: int, iter_count: int, threads: Optional[int] = None -) -> list[Node]: +def k_core(graph: GraphView, k: int, iter_count: int, threads: Optional[int] = None) -> list[Node]: """ Determines which nodes are in the k-core for a given value of k @@ -660,15 +569,7 @@ def k_core( """ -def temporal_SEIR( - graph: GraphView, - seeds: int | float | list[NodeInput], - infection_prob: float, - initial_infection: int | str | datetime, - recovery_rate: float | None = None, - incubation_rate: float | None = None, - rng_seed: int | None = None, -) -> NodeStateSEIR: +def temporal_SEIR(graph: GraphView, seeds: int | float | list[NodeInput], infection_prob: float, initial_infection: int | str | datetime, recovery_rate: float | None = None, incubation_rate: float | None = None, rng_seed: int | None = None) -> NodeStateSEIR: """ Simulate an SEIR dynamic on the network @@ -698,12 +599,7 @@ def temporal_SEIR( """ -def louvain( - graph: GraphView, - resolution: float = 1.0, - weight_prop: str | None = None, - tol: None | float = None, -) -> NodeStateUsize: +def louvain(graph: GraphView, resolution: float = 1.0, weight_prop: str | None = None, tol: None | float = None) -> NodeStateUsize: """ Louvain algorithm for community detection @@ -717,14 +613,7 @@ def louvain( NodeStateUsize: Mapping of nodes to their community assignment """ -def fruchterman_reingold( - graph: GraphView, - iterations: int | None = 100, - scale: float | None = 1.0, - node_start_size: float | None = 1.0, - cooloff_factor: float | None = 0.95, - dt: float | None = 0.1, -) -> NodeLayout: +def fruchterman_reingold(graph: GraphView, iterations: int | None = 100, scale: float | None = 1.0, node_start_size: float | None = 1.0, cooloff_factor: float | None = 0.95, dt: float | None = 0.1) -> NodeLayout: """ Fruchterman Reingold layout algorithm @@ -740,14 +629,7 @@ def fruchterman_reingold( NodeLayout: A mapping from nodes to their [x, y] positions """ -def cohesive_fruchterman_reingold( - graph: GraphView, - iter_count: int = 100, - scale: float = 1.0, - node_start_size: float = 1.0, - cooloff_factor: float = 0.95, - dt: float = 0.1, -) -> NodeLayout: +def cohesive_fruchterman_reingold(graph: GraphView, iter_count: int = 100, scale: float = 1.0, node_start_size: float = 1.0, cooloff_factor: float = 0.95, dt: float = 0.1) -> NodeLayout: """ Cohesive version of `fruchterman_reingold` that adds virtual edges between isolated nodes Arguments: @@ -763,12 +645,7 @@ def cohesive_fruchterman_reingold( """ -def max_weight_matching( - graph: GraphView, - weight_prop: Optional[str] = None, - max_cardinality: bool = True, - verify_optimum_flag: bool = False, -) -> Matching: +def max_weight_matching(graph: GraphView, weight_prop: Optional[str] = None, max_cardinality: bool = True, verify_optimum_flag: bool = False) -> Matching: """ Compute a maximum-weighted matching in the general undirected weighted graph given by "edges". If `max_cardinality` is true, only @@ -805,7 +682,7 @@ def max_weight_matching( Matching: The matching """ -class Matching(object): +class Matching(object): """A Matching (i.e., a set of edges that do not share any nodes)""" def __bool__(self): @@ -877,7 +754,8 @@ class Matching(object): """ -class Infected(object): +class Infected(object): + def __repr__(self): """Return repr(self).""" diff --git a/python/python/raphtory/filter/__init__.pyi b/python/python/raphtory/filter/__init__.pyi index 5f33a18fcb..36d732c413 100644 --- a/python/python/raphtory/filter/__init__.pyi +++ b/python/python/raphtory/filter/__init__.pyi @@ -23,20 +23,9 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = [ - "FilterExpr", - "PropertyFilterOps", - "NodeFilterBuilder", - "Node", - "EdgeFilterOp", - "EdgeEndpoint", - "Edge", - "Property", - "Metadata", - "TemporalPropertyFilterBuilder", -] - -class FilterExpr(object): +__all__ = ['FilterExpr', 'PropertyFilterOps', 'NodeFilterBuilder', 'Node', 'EdgeFilterOp', 'EdgeEndpoint', 'Edge', 'Property', 'Metadata', 'TemporalPropertyFilterBuilder'] +class FilterExpr(object): + def __and__(self, value): """Return self&value.""" @@ -52,7 +41,8 @@ class FilterExpr(object): def __ror__(self, value): """Return value|self.""" -class PropertyFilterOps(object): +class PropertyFilterOps(object): + def __eq__(self, value): """Return self==value.""" @@ -74,7 +64,7 @@ class PropertyFilterOps(object): def contains(self, value) -> filter.FilterExpr: """ Returns a filter expression that checks if this object contains a specified property. - + Arguments: PropValue: @@ -82,9 +72,7 @@ class PropertyFilterOps(object): filter.FilterExpr: """ - def fuzzy_search( - self, prop_value: str, levenshtein_distance: int, prefix_match: bool - ) -> filter.FilterExpr: + def fuzzy_search(self, prop_value: str, levenshtein_distance: int, prefix_match: bool) -> filter.FilterExpr: """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -94,7 +82,7 @@ class PropertyFilterOps(object): prop_value (str): Property to match against. levenshtein_distance (int): Maximum levenshtein distance between the specified prop_value and the result. prefix_match (bool): Enable prefix matching. - + Returns: filter.FilterExpr: """ @@ -102,7 +90,7 @@ class PropertyFilterOps(object): def is_in(self, values: list[PropValue]) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value is in a specified iterable of properties. - + Arguments: values (list[PropValue]): @@ -113,7 +101,7 @@ class PropertyFilterOps(object): def is_none(self) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value is none. - + Returns: filter.FilterExpr: """ @@ -121,7 +109,7 @@ class PropertyFilterOps(object): def is_not_in(self, values: list[PropValue]) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value is not in a specified iterable of properties. - + Arguments: values (list[PropValue]): @@ -132,7 +120,7 @@ class PropertyFilterOps(object): def is_some(self) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value is some. - + Returns: filter.FilterExpr: """ @@ -140,7 +128,7 @@ class PropertyFilterOps(object): def not_contains(self, value) -> filter.FilterExpr: """ Returns a filter expression that checks if this object does not contain a specified property. - + Arguments: PropValue: @@ -148,7 +136,7 @@ class PropertyFilterOps(object): filter.FilterExpr: """ -class NodeFilterBuilder(object): +class NodeFilterBuilder(object): """ A builder for constructing node filters @@ -184,9 +172,7 @@ class NodeFilterBuilder(object): filter.FilterExpr: """ - def fuzzy_search( - self, value, levenshtein_distance: int, prefix_match: bool - ) -> filter.FilterExpr: + def fuzzy_search(self, value, levenshtein_distance: int, prefix_match: bool) -> filter.FilterExpr: """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -227,7 +213,7 @@ class NodeFilterBuilder(object): """ Returns a filter expression that checks if the specified iterable of strings does not contain a given value. - + Arguments: value (str): @@ -235,7 +221,8 @@ class NodeFilterBuilder(object): filter.FilterExpr: """ -class Node(object): +class Node(object): + @staticmethod def name(): """ @@ -254,7 +241,8 @@ class Node(object): NodeFilterBuilder: A filter builder for filtering by node type """ -class EdgeFilterOp(object): +class EdgeFilterOp(object): + def __eq__(self, value): """Return self==value.""" @@ -276,7 +264,7 @@ class EdgeFilterOp(object): def contains(self, value: str) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value contains the specified string. - + Arguments: value (str): @@ -284,9 +272,7 @@ class EdgeFilterOp(object): filter.FilterExpr: """ - def fuzzy_search( - self, value, levenshtein_distance: int, prefix_match: bool - ) -> filter.FilterExpr: + def fuzzy_search(self, value, levenshtein_distance: int, prefix_match: bool) -> filter.FilterExpr: """ Returns a filter expression that checks if the specified properties approximately match the specified string. @@ -296,7 +282,7 @@ class EdgeFilterOp(object): prop_value (str): Property to match against. levenshtein_distance (int): Maximum levenshtein distance between the specified prop_value and the result. prefix_match (bool): Enable prefix matching. - + Returns: filter.FilterExpr: """ @@ -304,7 +290,7 @@ class EdgeFilterOp(object): def is_in(self, values: list[str]) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value is contained within the specified iterable of strings. - + Arguments: values (list[str]): @@ -315,7 +301,7 @@ class EdgeFilterOp(object): def is_not_in(self, values: list[str]) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value is not contained within the provided iterable of strings. - + Arguments: values (list[str]): @@ -326,7 +312,7 @@ class EdgeFilterOp(object): def not_contains(self, value: str) -> filter.FilterExpr: """ Returns a filter expression that checks if a given value does not contain the specified string. - + Arguments: value (str): @@ -334,16 +320,22 @@ class EdgeFilterOp(object): filter.FilterExpr: """ -class EdgeEndpoint(object): - def name(self): ... +class EdgeEndpoint(object): + + def name(self): + ... + +class Edge(object): -class Edge(object): @staticmethod - def dst(): ... + def dst(): + ... + @staticmethod - def src(): ... + def src(): + ... -class Property(PropertyFilterOps): +class Property(PropertyFilterOps): """ Construct a property filter @@ -354,9 +346,10 @@ class Property(PropertyFilterOps): def __new__(cls, name: str) -> Property: """Create and return a new object. See help(type) for accurate signature.""" - def temporal(self): ... + def temporal(self): + ... -class Metadata(PropertyFilterOps): +class Metadata(PropertyFilterOps): """ Construct a metadata filter @@ -367,6 +360,10 @@ class Metadata(PropertyFilterOps): def __new__(cls, name: str) -> Metadata: """Create and return a new object. See help(type) for accurate signature.""" -class TemporalPropertyFilterBuilder(object): - def any(self): ... - def latest(self): ... +class TemporalPropertyFilterBuilder(object): + + def any(self): + ... + + def latest(self): + ... diff --git a/python/python/raphtory/graph_gen/__init__.pyi b/python/python/raphtory/graph_gen/__init__.pyi index 3a9f849f05..3ec394b85c 100644 --- a/python/python/raphtory/graph_gen/__init__.pyi +++ b/python/python/raphtory/graph_gen/__init__.pyi @@ -1,7 +1,6 @@ """ Generate Raphtory graphs from attachment models """ - from __future__ import annotations ############################################################################### @@ -28,8 +27,7 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ["random_attachment", "ba_preferential_attachment"] - +__all__ = ['random_attachment', 'ba_preferential_attachment'] def random_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None): """ Generates a graph using the random attachment model @@ -48,9 +46,7 @@ def random_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any None """ -def ba_preferential_attachment( - g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None -): +def ba_preferential_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None): """ Generates a graph using the preferential attachment model. diff --git a/python/python/raphtory/graph_loader/__init__.pyi b/python/python/raphtory/graph_loader/__init__.pyi index e0b31f720f..10ba033c37 100644 --- a/python/python/raphtory/graph_loader/__init__.pyi +++ b/python/python/raphtory/graph_loader/__init__.pyi @@ -1,7 +1,6 @@ """ Load and save Raphtory graphs from/to file(s) """ - from __future__ import annotations ############################################################################### @@ -28,16 +27,7 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = [ - "lotr_graph", - "lotr_graph_with_props", - "neo4j_movie_graph", - "stable_coin_graph", - "reddit_hyperlink_graph", - "reddit_hyperlink_graph_local", - "karate_club_graph", -] - +__all__ = ['lotr_graph', 'lotr_graph_with_props', 'neo4j_movie_graph', 'stable_coin_graph', 'reddit_hyperlink_graph', 'reddit_hyperlink_graph_local', 'karate_club_graph'] def lotr_graph() -> Graph: """ Load the Lord of the Rings dataset into a graph. @@ -66,9 +56,7 @@ def lotr_graph_with_props() -> Graph: Graph: """ -def neo4j_movie_graph( - uri: str, username: str, password: str, database: str = ... -) -> Graph: +def neo4j_movie_graph(uri: str, username: str, password: str, database: str = ...) -> Graph: """ Returns the neo4j movie graph example. diff --git a/python/python/raphtory/graphql/__init__.pyi b/python/python/raphtory/graphql/__init__.pyi index 4cd4d5c51d..b8315a8395 100644 --- a/python/python/raphtory/graphql/__init__.pyi +++ b/python/python/raphtory/graphql/__init__.pyi @@ -23,26 +23,8 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = [ - "GraphServer", - "RunningGraphServer", - "RaphtoryClient", - "RemoteGraph", - "RemoteEdge", - "RemoteNode", - "RemoteNodeAddition", - "RemoteUpdate", - "RemoteEdgeAddition", - "RemoteIndexSpec", - "PropsInput", - "SomePropertySpec", - "AllPropertySpec", - "encode_graph", - "decode_graph", - "schema", -] - -class GraphServer(object): +__all__ = ['GraphServer', 'RunningGraphServer', 'RaphtoryClient', 'RemoteGraph', 'RemoteEdge', 'RemoteNode', 'RemoteNodeAddition', 'RemoteUpdate', 'RemoteEdgeAddition', 'RemoteIndexSpec', 'PropsInput', 'SomePropertySpec', 'AllPropertySpec', 'encode_graph', 'decode_graph', 'schema'] +class GraphServer(object): """ A class for defining and running a Raphtory GraphQL server @@ -61,21 +43,7 @@ class GraphServer(object): create_index: """ - def __new__( - cls, - work_dir: str | PathLike, - cache_capacity: Optional[int] = None, - cache_tti_seconds: Optional[int] = None, - log_level: Optional[str] = None, - tracing: Optional[bool] = None, - otlp_agent_host: Optional[str] = None, - otlp_agent_port: Optional[str] = None, - otlp_tracing_service_name: Optional[str] = None, - auth_public_key: Any = None, - auth_enabled_for_reads: Any = None, - config_path: Optional[str | PathLike] = None, - create_index: Any = None, - ) -> GraphServer: + def __new__(cls, work_dir: str | PathLike, cache_capacity: Optional[int] = None, cache_tti_seconds: Optional[int] = None, log_level: Optional[str] = None, tracing: Optional[bool] = None, otlp_agent_host: Optional[str] = None, otlp_agent_port: Optional[str] = None, otlp_tracing_service_name: Optional[str] = None, auth_public_key: Any = None, auth_enabled_for_reads: Any = None, config_path: Optional[str | PathLike] = None, create_index: Any = None) -> GraphServer: """Create and return a new object. See help(type) for accurate signature.""" def run(self, port: int = 1736, timeout_ms: int = 180000) -> None: @@ -90,13 +58,7 @@ class GraphServer(object): None: """ - def set_embeddings( - self, - cache: str, - embedding: Optional[Callable] = None, - nodes: bool | str = True, - edges: bool | str = True, - ) -> GraphServer: + def set_embeddings(self, cache: str, embedding: Optional[Callable] = None, nodes: bool | str = True, edges: bool | str = True) -> GraphServer: """ Setup the server to vectorise graphs with a default template. @@ -132,9 +94,7 @@ class GraphServer(object): GraphServer: The server with indexing disabled """ - def with_vectorised_graphs( - self, graph_names: list[str], nodes: bool | str = True, edges: bool | str = True - ) -> GraphServer: + def with_vectorised_graphs(self, graph_names: list[str], nodes: bool | str = True, edges: bool | str = True) -> GraphServer: """ Vectorise a subset of the graphs of the server. @@ -147,11 +107,15 @@ class GraphServer(object): GraphServer: A new server object containing the vectorised graphs. """ -class RunningGraphServer(object): +class RunningGraphServer(object): """A Raphtory server handler that also enables querying the server""" - def __enter__(self): ... - def __exit__(self, _exc_type, _exc_val, _exc_tb): ... + def __enter__(self): + ... + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + ... + def get_client(self): """ Get the client for the server @@ -168,7 +132,7 @@ class RunningGraphServer(object): None: """ -class RaphtoryClient(object): +class RaphtoryClient(object): """ A client for handling GraphQL operations in the context of Raphtory. @@ -250,9 +214,7 @@ class RaphtoryClient(object): """ - def query( - self, query: str, variables: Optional[dict[str, Any]] = None - ) -> dict[str, Any]: + def query(self, query: str, variables: Optional[dict[str, Any]] = None) -> dict[str, Any]: """ Make a GraphQL query against the server. @@ -290,9 +252,7 @@ class RaphtoryClient(object): """ - def send_graph( - self, path: str, graph: Graph | PersistentGraph, overwrite: bool = False - ) -> dict[str, Any]: + def send_graph(self, path: str, graph: Graph | PersistentGraph, overwrite: bool = False) -> dict[str, Any]: """ Send a graph to the server @@ -305,9 +265,7 @@ class RaphtoryClient(object): dict[str, Any]: The data field from the graphQL response after executing the mutation. """ - def upload_graph( - self, path: str, file_path: str, overwrite: bool = False - ) -> dict[str, Any]: + def upload_graph(self, path: str, file_path: str, overwrite: bool = False) -> dict[str, Any]: """ Upload graph file from a path file_path on the client @@ -320,15 +278,9 @@ class RaphtoryClient(object): dict[str, Any]: The data field from the graphQL response after executing the mutation. """ -class RemoteGraph(object): - def add_edge( - self, - timestamp: int | str | datetime, - src: str | int, - dst: str | int, - properties: Optional[dict] = None, - layer: Optional[str] = None, - ) -> RemoteEdge: +class RemoteGraph(object): + + def add_edge(self, timestamp: int | str | datetime, src: str | int, dst: str | int, properties: Optional[dict] = None, layer: Optional[str] = None) -> RemoteEdge: """ Adds a new edge with the given source and destination nodes and properties to the remote graph. @@ -365,13 +317,7 @@ class RemoteGraph(object): None: """ - def add_node( - self, - timestamp: int | str | datetime, - id: str | int, - properties: Optional[dict] = None, - node_type: Optional[str] = None, - ) -> RemoteNode: + def add_node(self, timestamp: int | str | datetime, id: str | int, properties: Optional[dict] = None, node_type: Optional[str] = None) -> RemoteNode: """ Adds a new node with the given id and properties to the remote graph. @@ -408,13 +354,7 @@ class RemoteGraph(object): None: """ - def create_node( - self, - timestamp: int | str | datetime, - id: str | int, - properties: Optional[dict] = None, - node_type: Optional[str] = None, - ) -> RemoteNode: + def create_node(self, timestamp: int | str | datetime, id: str | int, properties: Optional[dict] = None, node_type: Optional[str] = None) -> RemoteNode: """ Create a new node with the given id and properties to the remote graph and fail if the node already exists. @@ -428,13 +368,7 @@ class RemoteGraph(object): RemoteNode: the new remote node """ - def delete_edge( - self, - timestamp: int, - src: str | int, - dst: str | int, - layer: Optional[str] = None, - ) -> RemoteEdge: + def delete_edge(self, timestamp: int, src: str | int, dst: str | int, layer: Optional[str] = None) -> RemoteEdge: """ Deletes an edge in the remote graph, given the timestamp, src and dst nodes and layer (optional) @@ -482,7 +416,7 @@ class RemoteGraph(object): None: """ -class RemoteEdge(object): +class RemoteEdge(object): """ A remote edge reference @@ -491,9 +425,7 @@ class RemoteEdge(object): and [RemoteGraph.delete_edge][raphtory.graphql.RemoteGraph.delete_edge]. """ - def add_metadata( - self, properties: dict[str, PropValue], layer: Optional[str] = None - ) -> None: + def add_metadata(self, properties: dict[str, PropValue], layer: Optional[str] = None) -> None: """ Add metadata to the edge within the remote graph. This function is used to add metadata to an edge that does not @@ -507,12 +439,7 @@ class RemoteEdge(object): None: """ - def add_updates( - self, - t: int | str | datetime, - properties: Optional[dict[str, PropValue]] = None, - layer: Optional[str] = None, - ) -> None: + def add_updates(self, t: int | str | datetime, properties: Optional[dict[str, PropValue]] = None, layer: Optional[str] = None) -> None: """ Add updates to an edge in the remote graph at a specified time. @@ -543,9 +470,7 @@ class RemoteEdge(object): GraphError: If the operation fails. """ - def update_metadata( - self, properties: dict[str, PropValue], layer: Optional[str] = None - ) -> None: + def update_metadata(self, properties: dict[str, PropValue], layer: Optional[str] = None) -> None: """ Update metadata of an edge in the remote graph overwriting existing values. This function is used to add properties to an edge that does not @@ -559,7 +484,8 @@ class RemoteEdge(object): None: """ -class RemoteNode(object): +class RemoteNode(object): + def add_metadata(self, properties: dict[str, PropValue]) -> None: """ Add metadata to a node in the remote graph. @@ -573,9 +499,7 @@ class RemoteNode(object): None: """ - def add_updates( - self, t: int | str | datetime, properties: Optional[dict[str, PropValue]] = None - ) -> None: + def add_updates(self, t: int | str | datetime, properties: Optional[dict[str, PropValue]] = None) -> None: """ Add updates to a node in the remote graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -613,7 +537,7 @@ class RemoteNode(object): None: """ -class RemoteNodeAddition(object): +class RemoteNodeAddition(object): """ Node addition update @@ -624,16 +548,10 @@ class RemoteNodeAddition(object): updates (list[RemoteUpdate], optional): the temporal updates """ - def __new__( - cls, - name: GID, - node_type: Optional[str] = None, - metadata: Optional[PropInput] = None, - updates: Optional[list[RemoteUpdate]] = None, - ) -> RemoteNodeAddition: + def __new__(cls, name: GID, node_type: Optional[str] = None, metadata: Optional[PropInput] = None, updates: Optional[list[RemoteUpdate]] = None) -> RemoteNodeAddition: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteUpdate(object): +class RemoteUpdate(object): """ A temporal update @@ -642,12 +560,10 @@ class RemoteUpdate(object): properties (PropInput, optional): the properties for the update """ - def __new__( - cls, time: TimeInput, properties: Optional[PropInput] = None - ) -> RemoteUpdate: + def __new__(cls, time: TimeInput, properties: Optional[PropInput] = None) -> RemoteUpdate: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteEdgeAddition(object): +class RemoteEdgeAddition(object): """ An edge update @@ -659,17 +575,10 @@ class RemoteEdgeAddition(object): updates (list[RemoteUpdate], optional): the temporal updates for the edge """ - def __new__( - cls, - src: GID, - dst: GID, - layer: Optional[str] = None, - metadata: Optional[PropInput] = None, - updates: Optional[list[RemoteUpdate]] = None, - ) -> RemoteEdgeAddition: + def __new__(cls, src: GID, dst: GID, layer: Optional[str] = None, metadata: Optional[PropInput] = None, updates: Optional[list[RemoteUpdate]] = None) -> RemoteEdgeAddition: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteIndexSpec(object): +class RemoteIndexSpec(object): """ Create a RemoteIndexSpec specifying which node and edge properties to index. @@ -681,7 +590,7 @@ class RemoteIndexSpec(object): def __new__(cls, node_props: PropsInput, edge_props: PropsInput) -> RemoteIndexSpec: """Create and return a new object. See help(type) for accurate signature.""" -class PropsInput(object): +class PropsInput(object): """ Create a PropsInput by choosing to include all/some properties explicitly. @@ -693,14 +602,10 @@ class PropsInput(object): ValueError: If neither all and some are specified. """ - def __new__( - cls, - all: Optional[AllPropertySpec] = None, - some: Optional[SomePropertySpec] = None, - ) -> PropsInput: + def __new__(cls, all: Optional[AllPropertySpec] = None, some: Optional[SomePropertySpec] = None) -> PropsInput: """Create and return a new object. See help(type) for accurate signature.""" -class SomePropertySpec(object): +class SomePropertySpec(object): """ Create a SomePropertySpec by explicitly listing metadata and/or temporal property names. @@ -709,12 +614,10 @@ class SomePropertySpec(object): properties (list[str]): Temporal property names. Defaults to []. """ - def __new__( - cls, metadata: list[str] = [], properties: list[str] = [] - ) -> SomePropertySpec: + def __new__(cls, metadata: list[str] = [], properties: list[str] = []) -> SomePropertySpec: """Create and return a new object. See help(type) for accurate signature.""" -class AllPropertySpec(object): +class AllPropertySpec(object): """ Specifies that **all** properties should be included when creating an index. Use one of the predefined variants: ALL , ALL_METADATA , or ALL_TEMPORAL . diff --git a/python/python/raphtory/iterables/__init__.pyi b/python/python/raphtory/iterables/__init__.pyi index 2a80bbc5cb..ec2c4d6ee9 100644 --- a/python/python/raphtory/iterables/__init__.pyi +++ b/python/python/raphtory/iterables/__init__.pyi @@ -23,33 +23,9 @@ from os import PathLike import networkx as nx # type: ignore import pyvis # type: ignore -__all__ = [ - "NestedUtcDateTimeIterable", - "NestedGIDIterable", - "GIDIterable", - "StringIterable", - "OptionArcStringIterable", - "UsizeIterable", - "OptionI64Iterable", - "NestedOptionArcStringIterable", - "NestedStringIterable", - "NestedOptionI64Iterable", - "NestedI64VecIterable", - "NestedUsizeIterable", - "BoolIterable", - "ArcStringIterable", - "NestedVecUtcDateTimeIterable", - "OptionVecUtcDateTimeIterable", - "GIDGIDIterable", - "NestedGIDGIDIterable", - "NestedBoolIterable", - "U64Iterable", - "OptionUtcDateTimeIterable", - "ArcStringVecIterable", - "NestedArcStringVecIterable", -] - -class NestedUtcDateTimeIterable(object): +__all__ = ['NestedUtcDateTimeIterable', 'NestedGIDIterable', 'GIDIterable', 'StringIterable', 'OptionArcStringIterable', 'UsizeIterable', 'OptionI64Iterable', 'NestedOptionArcStringIterable', 'NestedStringIterable', 'NestedOptionI64Iterable', 'NestedI64VecIterable', 'NestedUsizeIterable', 'BoolIterable', 'ArcStringIterable', 'NestedVecUtcDateTimeIterable', 'OptionVecUtcDateTimeIterable', 'GIDGIDIterable', 'NestedGIDGIDIterable', 'NestedBoolIterable', 'U64Iterable', 'OptionUtcDateTimeIterable', 'ArcStringVecIterable', 'NestedArcStringVecIterable'] +class NestedUtcDateTimeIterable(object): + def __eq__(self, value): """Return self==value.""" @@ -77,9 +53,11 @@ class NestedUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class NestedGIDIterable(object): -class NestedGIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -107,11 +85,17 @@ class NestedGIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def min(self): ... + def collect(self): + ... + + def max(self): + ... + + def min(self): + ... + +class GIDIterable(object): -class GIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -139,11 +123,17 @@ class GIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def min(self): ... + def collect(self): + ... + + def max(self): + ... + + def min(self): + ... + +class StringIterable(object): -class StringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -171,9 +161,11 @@ class StringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class OptionArcStringIterable(object): -class OptionArcStringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -201,9 +193,11 @@ class OptionArcStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class UsizeIterable(object): -class UsizeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -231,13 +225,23 @@ class UsizeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def mean(self): ... - def min(self): ... - def sum(self): ... + def collect(self): + ... + + def max(self): + ... + + def mean(self): + ... + + def min(self): + ... + + def sum(self): + ... + +class OptionI64Iterable(object): -class OptionI64Iterable(object): def __eq__(self, value): """Return self==value.""" @@ -265,11 +269,17 @@ class OptionI64Iterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def min(self): ... + def collect(self): + ... + + def max(self): + ... + + def min(self): + ... + +class NestedOptionArcStringIterable(object): -class NestedOptionArcStringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -297,9 +307,11 @@ class NestedOptionArcStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class NestedStringIterable(object): -class NestedStringIterable(object): def __eq__(self, value): """Return self==value.""" @@ -327,9 +339,11 @@ class NestedStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class NestedOptionI64Iterable(object): -class NestedOptionI64Iterable(object): def __eq__(self, value): """Return self==value.""" @@ -357,11 +371,17 @@ class NestedOptionI64Iterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def min(self): ... + def collect(self): + ... + + def max(self): + ... + + def min(self): + ... + +class NestedI64VecIterable(object): -class NestedI64VecIterable(object): def __eq__(self, value): """Return self==value.""" @@ -389,9 +409,11 @@ class NestedI64VecIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class NestedUsizeIterable(object): -class NestedUsizeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -419,13 +441,23 @@ class NestedUsizeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def mean(self): ... - def min(self): ... - def sum(self): ... + def collect(self): + ... + + def max(self): + ... + + def mean(self): + ... + + def min(self): + ... + + def sum(self): + ... + +class BoolIterable(object): -class BoolIterable(object): def __eq__(self, value): """Return self==value.""" @@ -453,9 +485,11 @@ class BoolIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class ArcStringIterable(object): -class ArcStringIterable(object): def __iter__(self): """Implement iter(self).""" @@ -465,9 +499,11 @@ class ArcStringIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class NestedVecUtcDateTimeIterable(object): -class NestedVecUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -495,9 +531,11 @@ class NestedVecUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class OptionVecUtcDateTimeIterable(object): -class OptionVecUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -525,9 +563,11 @@ class OptionVecUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class GIDGIDIterable(object): -class GIDGIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -555,11 +595,17 @@ class GIDGIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def min(self): ... + def collect(self): + ... + + def max(self): + ... + + def min(self): + ... + +class NestedGIDGIDIterable(object): -class NestedGIDGIDIterable(object): def __eq__(self, value): """Return self==value.""" @@ -587,11 +633,17 @@ class NestedGIDGIDIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def min(self): ... + def collect(self): + ... + + def max(self): + ... + + def min(self): + ... + +class NestedBoolIterable(object): -class NestedBoolIterable(object): def __eq__(self, value): """Return self==value.""" @@ -619,9 +671,11 @@ class NestedBoolIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class U64Iterable(object): -class U64Iterable(object): def __eq__(self, value): """Return self==value.""" @@ -649,13 +703,23 @@ class U64Iterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... - def max(self): ... - def mean(self): ... - def min(self): ... - def sum(self): ... + def collect(self): + ... + + def max(self): + ... + + def mean(self): + ... + + def min(self): + ... + + def sum(self): + ... + +class OptionUtcDateTimeIterable(object): -class OptionUtcDateTimeIterable(object): def __eq__(self, value): """Return self==value.""" @@ -683,9 +747,11 @@ class OptionUtcDateTimeIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class ArcStringVecIterable(object): -class ArcStringVecIterable(object): def __eq__(self, value): """Return self==value.""" @@ -713,9 +779,11 @@ class ArcStringVecIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... + +class NestedArcStringVecIterable(object): -class NestedArcStringVecIterable(object): def __eq__(self, value): """Return self==value.""" @@ -743,4 +811,5 @@ class NestedArcStringVecIterable(object): def __repr__(self): """Return repr(self).""" - def collect(self): ... + def collect(self): + ... diff --git a/python/python/raphtory/node_state/__init__.pyi b/python/python/raphtory/node_state/__init__.pyi index 456f7240dd..469a550b2e 100644 --- a/python/python/raphtory/node_state/__init__.pyi +++ b/python/python/raphtory/node_state/__init__.pyi @@ -23,42 +23,9 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = [ - "NodeGroups", - "DegreeView", - "NodeStateUsize", - "NodeStateU64", - "NodeStateOptionI64", - "IdView", - "NodeStateGID", - "EarliestTimeView", - "LatestTimeView", - "NameView", - "NodeStateString", - "EarliestDateTimeView", - "LatestDateTimeView", - "NodeStateOptionDateTime", - "HistoryView", - "EdgeHistoryCountView", - "NodeStateListI64", - "HistoryDateTimeView", - "NodeStateOptionListDateTime", - "NodeTypeView", - "NodeStateOptionStr", - "NodeStateListDateTime", - "NodeStateWeightedSP", - "NodeStateF64", - "NodeStateNodes", - "NodeStateReachability", - "NodeStateListF64", - "NodeStateMotifs", - "NodeStateHits", - "NodeStateSEIR", - "NodeLayout", - "NodeStateF64String", -] - -class NodeGroups(object): +__all__ = ['NodeGroups', 'DegreeView', 'NodeStateUsize', 'NodeStateU64', 'NodeStateOptionI64', 'IdView', 'NodeStateGID', 'EarliestTimeView', 'LatestTimeView', 'NameView', 'NodeStateString', 'EarliestDateTimeView', 'LatestDateTimeView', 'NodeStateOptionDateTime', 'HistoryView', 'EdgeHistoryCountView', 'NodeStateListI64', 'HistoryDateTimeView', 'NodeStateOptionListDateTime', 'NodeTypeView', 'NodeStateOptionStr', 'NodeStateListDateTime', 'NodeStateWeightedSP', 'NodeStateF64', 'NodeStateNodes', 'NodeStateReachability', 'NodeStateListF64', 'NodeStateMotifs', 'NodeStateHits', 'NodeStateSEIR', 'NodeLayout', 'NodeStateF64String'] +class NodeGroups(object): + def __bool__(self): """True if self else False""" @@ -101,7 +68,7 @@ class NodeGroups(object): Iterator[Tuple[Any, GraphView]]: Iterator over subgraphs with corresponding value """ -class DegreeView(object): +class DegreeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -263,9 +230,7 @@ class DegreeView(object): DegreeView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -420,12 +385,7 @@ class DegreeView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -614,7 +574,8 @@ class DegreeView(object): Optional[int]: """ -class NodeStateUsize(object): +class NodeStateUsize(object): + def __eq__(self, value): """Return self==value.""" @@ -807,7 +768,8 @@ class NodeStateUsize(object): Iterator[int]: Iterator over values """ -class NodeStateU64(object): +class NodeStateU64(object): + def __eq__(self, value): """Return self==value.""" @@ -992,7 +954,8 @@ class NodeStateU64(object): Iterator[int]: Iterator over values """ -class NodeStateOptionI64(object): +class NodeStateOptionI64(object): + def __eq__(self, value): """Return self==value.""" @@ -1034,9 +997,7 @@ class NodeStateOptionI64(object): NodeStateOptionI64: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[Optional[int]] = None - ) -> Optional[Optional[int]]: + def get(self, node: NodeInput, default: Optional[Optional[int]] = None) -> Optional[Optional[int]]: """ Get value for node @@ -1170,7 +1131,7 @@ class NodeStateOptionI64(object): Iterator[Optional[int]]: Iterator over values """ -class IdView(object): +class IdView(object): """A lazy view over node values""" def __eq__(self, value): @@ -1356,7 +1317,8 @@ class IdView(object): Iterator[GID]: Iterator over values """ -class NodeStateGID(object): +class NodeStateGID(object): + def __eq__(self, value): """Return self==value.""" @@ -1524,7 +1486,7 @@ class NodeStateGID(object): Iterator[GID]: Iterator over values """ -class EarliestTimeView(object): +class EarliestTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -1686,9 +1648,7 @@ class EarliestTimeView(object): EarliestTimeView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -1707,9 +1667,7 @@ class EarliestTimeView(object): WindowSet: A `WindowSet` object. """ - def get( - self, node: NodeInput, default: Optional[Optional[int]] = None - ) -> Optional[Optional[int]]: + def get(self, node: NodeInput, default: Optional[Optional[int]] = None) -> Optional[Optional[int]]: """ Get value for node @@ -1837,12 +1795,7 @@ class EarliestTimeView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -2022,7 +1975,7 @@ class EarliestTimeView(object): Optional[int]: """ -class LatestTimeView(object): +class LatestTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2184,9 +2137,7 @@ class LatestTimeView(object): LatestTimeView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -2205,9 +2156,7 @@ class LatestTimeView(object): WindowSet: A `WindowSet` object. """ - def get( - self, node: NodeInput, default: Optional[Optional[int]] = None - ) -> Optional[Optional[int]]: + def get(self, node: NodeInput, default: Optional[Optional[int]] = None) -> Optional[Optional[int]]: """ Get value for node @@ -2335,12 +2284,7 @@ class LatestTimeView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -2520,7 +2464,7 @@ class LatestTimeView(object): Optional[int]: """ -class NameView(object): +class NameView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2714,7 +2658,8 @@ class NameView(object): Iterator[str]: Iterator over values """ -class NodeStateString(object): +class NodeStateString(object): + def __eq__(self, value): """Return self==value.""" @@ -2890,7 +2835,7 @@ class NodeStateString(object): Iterator[str]: Iterator over values """ -class EarliestDateTimeView(object): +class EarliestDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -3052,9 +2997,7 @@ class EarliestDateTimeView(object): EarliestDateTimeView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3073,9 +3016,7 @@ class EarliestDateTimeView(object): WindowSet: A `WindowSet` object. """ - def get( - self, node: NodeInput, default: Optional[Optional[datetime]] = None - ) -> Optional[Optional[datetime]]: + def get(self, node: NodeInput, default: Optional[Optional[datetime]] = None) -> Optional[Optional[datetime]]: """ Get value for node @@ -3203,12 +3144,7 @@ class EarliestDateTimeView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3388,7 +3324,7 @@ class EarliestDateTimeView(object): Optional[int]: """ -class LatestDateTimeView(object): +class LatestDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -3550,9 +3486,7 @@ class LatestDateTimeView(object): LatestDateTimeView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -3571,9 +3505,7 @@ class LatestDateTimeView(object): WindowSet: A `WindowSet` object. """ - def get( - self, node: NodeInput, default: Optional[Optional[datetime]] = None - ) -> Optional[Optional[datetime]]: + def get(self, node: NodeInput, default: Optional[Optional[datetime]] = None) -> Optional[Optional[datetime]]: """ Get value for node @@ -3701,12 +3633,7 @@ class LatestDateTimeView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -3886,7 +3813,8 @@ class LatestDateTimeView(object): Optional[int]: """ -class NodeStateOptionDateTime(object): +class NodeStateOptionDateTime(object): + def __eq__(self, value): """Return self==value.""" @@ -3928,9 +3856,7 @@ class NodeStateOptionDateTime(object): NodeStateOptionDateTime: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[Optional[datetime]] = None - ) -> Optional[Optional[datetime]]: + def get(self, node: NodeInput, default: Optional[Optional[datetime]] = None) -> Optional[Optional[datetime]]: """ Get value for node @@ -4064,7 +3990,7 @@ class NodeStateOptionDateTime(object): Iterator[Optional[datetime]]: Iterator over values """ -class HistoryView(object): +class HistoryView(object): """A lazy view over node values""" def __eq__(self, value): @@ -4226,9 +4152,7 @@ class HistoryView(object): HistoryView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4247,9 +4171,7 @@ class HistoryView(object): WindowSet: A `WindowSet` object. """ - def get( - self, node: NodeInput, default: Optional[list[int]] = None - ) -> Optional[list[int]]: + def get(self, node: NodeInput, default: Optional[list[int]] = None) -> Optional[list[int]]: """ Get value for node @@ -4369,12 +4291,7 @@ class HistoryView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -4554,7 +4471,7 @@ class HistoryView(object): Optional[int]: """ -class EdgeHistoryCountView(object): +class EdgeHistoryCountView(object): """A lazy view over node values""" def __eq__(self, value): @@ -4716,9 +4633,7 @@ class EdgeHistoryCountView(object): EdgeHistoryCountView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -4865,12 +4780,7 @@ class EdgeHistoryCountView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5059,7 +4969,8 @@ class EdgeHistoryCountView(object): Optional[int]: """ -class NodeStateListI64(object): +class NodeStateListI64(object): + def __eq__(self, value): """Return self==value.""" @@ -5101,9 +5012,7 @@ class NodeStateListI64(object): NodeStateListI64: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[list[int]] = None - ) -> Optional[list[int]]: + def get(self, node: NodeInput, default: Optional[list[int]] = None) -> Optional[list[int]]: """ Get value for node @@ -5229,7 +5138,7 @@ class NodeStateListI64(object): Iterator[list[int]]: Iterator over values """ -class HistoryDateTimeView(object): +class HistoryDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -5391,9 +5300,7 @@ class HistoryDateTimeView(object): HistoryDateTimeView: The layered view """ - def expanding( - self, step: int | str, alignment_unit: str | None = None - ) -> WindowSet: + def expanding(self, step: int | str, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `step` size using an expanding window. @@ -5412,9 +5319,7 @@ class HistoryDateTimeView(object): WindowSet: A `WindowSet` object. """ - def get( - self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None - ) -> Optional[Optional[list[datetime]]]: + def get(self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None) -> Optional[Optional[list[datetime]]]: """ Get value for node @@ -5534,12 +5439,7 @@ class HistoryDateTimeView(object): Nodes: The nodes """ - def rolling( - self, - window: int | str, - step: int | str | None = None, - alignment_unit: str | None = None, - ) -> WindowSet: + def rolling(self, window: int | str, step: int | str | None = None, alignment_unit: str | None = None) -> WindowSet: """ Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. If `alignment_unit` is not "unaligned" and a `step` larger than `window` is provided, some time entries @@ -5719,7 +5619,8 @@ class HistoryDateTimeView(object): Optional[int]: """ -class NodeStateOptionListDateTime(object): +class NodeStateOptionListDateTime(object): + def __eq__(self, value): """Return self==value.""" @@ -5761,9 +5662,7 @@ class NodeStateOptionListDateTime(object): NodeStateOptionListDateTime: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None - ) -> Optional[Optional[list[datetime]]]: + def get(self, node: NodeInput, default: Optional[Optional[list[datetime]]] = None) -> Optional[Optional[list[datetime]]]: """ Get value for node @@ -5889,7 +5788,7 @@ class NodeStateOptionListDateTime(object): Iterator[Optional[list[datetime]]]: Iterator over values """ -class NodeTypeView(object): +class NodeTypeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -5949,9 +5848,7 @@ class NodeTypeView(object): NodeStateOptionStr: the computed `NodeState` """ - def get( - self, node: NodeInput, default: Optional[Optional[str]] = None - ) -> Optional[Optional[str]]: + def get(self, node: NodeInput, default: Optional[Optional[str]] = None) -> Optional[Optional[str]]: """ Get value for node @@ -6085,7 +5982,8 @@ class NodeTypeView(object): Iterator[Optional[str]]: Iterator over values """ -class NodeStateOptionStr(object): +class NodeStateOptionStr(object): + def __eq__(self, value): """Return self==value.""" @@ -6127,9 +6025,7 @@ class NodeStateOptionStr(object): NodeStateOptionStr: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[Optional[str]] = None - ) -> Optional[Optional[str]]: + def get(self, node: NodeInput, default: Optional[Optional[str]] = None) -> Optional[Optional[str]]: """ Get value for node @@ -6263,7 +6159,8 @@ class NodeStateOptionStr(object): Iterator[Optional[str]]: Iterator over values """ -class NodeStateListDateTime(object): +class NodeStateListDateTime(object): + def __eq__(self, value): """Return self==value.""" @@ -6305,9 +6202,7 @@ class NodeStateListDateTime(object): NodeStateListDateTime: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[list[datetime]] = None - ) -> Optional[list[datetime]]: + def get(self, node: NodeInput, default: Optional[list[datetime]] = None) -> Optional[list[datetime]]: """ Get value for node @@ -6433,7 +6328,8 @@ class NodeStateListDateTime(object): Iterator[list[datetime]]: Iterator over values """ -class NodeStateWeightedSP(object): +class NodeStateWeightedSP(object): + def __eq__(self, value): """Return self==value.""" @@ -6464,9 +6360,7 @@ class NodeStateWeightedSP(object): def __repr__(self): """Return repr(self).""" - def get( - self, node: NodeInput, default: Optional[Tuple[float, Nodes]] = None - ) -> Optional[Tuple[float, Nodes]]: + def get(self, node: NodeInput, default: Optional[Tuple[float, Nodes]] = None) -> Optional[Tuple[float, Nodes]]: """ Get value for node @@ -6521,7 +6415,8 @@ class NodeStateWeightedSP(object): Iterator[Tuple[float, Nodes]]: Iterator over values """ -class NodeStateF64(object): +class NodeStateF64(object): + def __eq__(self, value): """Return self==value.""" @@ -6706,7 +6601,8 @@ class NodeStateF64(object): Iterator[float]: Iterator over values """ -class NodeStateNodes(object): +class NodeStateNodes(object): + def __eq__(self, value): """Return self==value.""" @@ -6792,7 +6688,8 @@ class NodeStateNodes(object): Iterator[Nodes]: Iterator over values """ -class NodeStateReachability(object): +class NodeStateReachability(object): + def __eq__(self, value): """Return self==value.""" @@ -6823,9 +6720,7 @@ class NodeStateReachability(object): def __repr__(self): """Return repr(self).""" - def get( - self, node: NodeInput, default: Optional[list[Tuple[int, str]]] = None - ) -> Optional[list[Tuple[int, str]]]: + def get(self, node: NodeInput, default: Optional[list[Tuple[int, str]]] = None) -> Optional[list[Tuple[int, str]]]: """ Get value for node @@ -6880,7 +6775,8 @@ class NodeStateReachability(object): Iterator[list[Tuple[int, str]]]: Iterator over values """ -class NodeStateListF64(object): +class NodeStateListF64(object): + def __eq__(self, value): """Return self==value.""" @@ -6911,9 +6807,7 @@ class NodeStateListF64(object): def __repr__(self): """Return repr(self).""" - def get( - self, node: NodeInput, default: Optional[list[float]] = None - ) -> Optional[list[float]]: + def get(self, node: NodeInput, default: Optional[list[float]] = None) -> Optional[list[float]]: """ Get value for node @@ -6968,7 +6862,8 @@ class NodeStateListF64(object): Iterator[list[float]]: Iterator over values """ -class NodeStateMotifs(object): +class NodeStateMotifs(object): + def __eq__(self, value): """Return self==value.""" @@ -7010,9 +6905,7 @@ class NodeStateMotifs(object): NodeStateMotifs: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[list[int]] = None - ) -> Optional[list[int]]: + def get(self, node: NodeInput, default: Optional[list[int]] = None) -> Optional[list[int]]: """ Get value for node @@ -7138,7 +7031,8 @@ class NodeStateMotifs(object): Iterator[list[int]]: Iterator over values """ -class NodeStateHits(object): +class NodeStateHits(object): + def __eq__(self, value): """Return self==value.""" @@ -7180,9 +7074,7 @@ class NodeStateHits(object): NodeStateHits: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[Tuple[float, float]] = None - ) -> Optional[Tuple[float, float]]: + def get(self, node: NodeInput, default: Optional[Tuple[float, float]] = None) -> Optional[Tuple[float, float]]: """ Get value for node @@ -7308,7 +7200,8 @@ class NodeStateHits(object): Iterator[Tuple[float, float]]: Iterator over values """ -class NodeStateSEIR(object): +class NodeStateSEIR(object): + def __eq__(self, value): """Return self==value.""" @@ -7350,9 +7243,7 @@ class NodeStateSEIR(object): NodeStateSEIR: The k smallest values as a node state """ - def get( - self, node: NodeInput, default: Optional[Infected] = None - ) -> Optional[Infected]: + def get(self, node: NodeInput, default: Optional[Infected] = None) -> Optional[Infected]: """ Get value for node @@ -7478,7 +7369,8 @@ class NodeStateSEIR(object): Iterator[Infected]: Iterator over values """ -class NodeLayout(object): +class NodeLayout(object): + def __eq__(self, value): """Return self==value.""" @@ -7509,9 +7401,7 @@ class NodeLayout(object): def __repr__(self): """Return repr(self).""" - def get( - self, node: NodeInput, default: Optional[list[float]] = None - ) -> Optional[list[float]]: + def get(self, node: NodeInput, default: Optional[list[float]] = None) -> Optional[list[float]]: """ Get value for node @@ -7566,7 +7456,8 @@ class NodeLayout(object): Iterator[list[float]]: Iterator over values """ -class NodeStateF64String(object): +class NodeStateF64String(object): + def __eq__(self, value): """Return self==value.""" @@ -7597,9 +7488,7 @@ class NodeStateF64String(object): def __repr__(self): """Return repr(self).""" - def get( - self, node: NodeInput, default: Optional[Tuple[float, str]] = None - ) -> Optional[Tuple[float, str]]: + def get(self, node: NodeInput, default: Optional[Tuple[float, str]] = None) -> Optional[Tuple[float, str]]: """ Get value for node diff --git a/python/python/raphtory/vectors/__init__.pyi b/python/python/raphtory/vectors/__init__.pyi index e235a8dded..cd10944b53 100644 --- a/python/python/raphtory/vectors/__init__.pyi +++ b/python/python/raphtory/vectors/__init__.pyi @@ -23,77 +23,61 @@ import networkx as nx # type: ignore import pyvis # type: ignore from raphtory.iterables import * -__all__ = ["VectorisedGraph", "Document", "Embedding", "VectorSelection"] - -class VectorisedGraph(object): +__all__ = ['VectorisedGraph', 'Document', 'Embedding', 'VectorSelection'] +class VectorisedGraph(object): """VectorisedGraph object that contains embedded documents that correspond to graph entities.""" - def edges_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: + def edges_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ - Search the top scoring edges according to `query` with no more than `limit` edges + Search the top similarity scoring edges according to matching a specified `query` with no more than `limit` edges in the result. Args: query (str | list): The text or the embedding to score against. limit (int): The maximum number of new edges in the results. - window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: VectorSelection: The vector selection resulting from the search. """ def empty_selection(self): - """Return an empty selection of documents""" + """Return an empty selection of entities.""" - def entities_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: + def entities_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ - Search the top scoring entities according to `query` with no more than `limit` entities. + Search the top similarity scoring entities according to matching a specified `query` with no more than `limit` entities in the result. Args: query (str | list): The text or the embedding to score against. limit (int): The maximum number of new entities in the result. - window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: VectorSelection: The vector selection resulting from the search. """ - def nodes_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: + def nodes_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ - Search the top scoring nodes according to `query` with no more than `limit` nodes. + Search the top similarity scoring nodes according to matching a specified `query` with no more than `limit` nodes in the result. Args: query (str | list): The text or the embedding to score against. limit (int): The maximum number of new nodes in the result. - window (Tuple[int | str, int | str], optional): The window where documents need to belong to in order to be considered. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: VectorSelection: The vector selection resulting from the search. """ -class Document(object): +class Document(object): """ A document corresponding to a graph entity. Used to generate embeddings. Args: content (str): The document content. - life (int | Tuple[int, int], optional): the optional lifespan for the document (single value + life (int | Tuple[int, int], optional): The optional lifespan of the document. A single value corresponds to an event, a tuple corresponds to a - window). + window. """ def __repr__(self): @@ -105,32 +89,34 @@ class Document(object): The document content. Returns: - str: + str: Content of the document. """ @property def embedding(self) -> Optional[Embedding]: """ - The embedding. + The embedding of the document. Returns: - Optional[Embedding]: The embedding for the document if it was computed. + Optional[Embedding]: The embedding of the document if it was computed. """ @property def entity(self) -> Optional[Any]: """ - The entity corresponding to the document. + The graph entity corresponding to the document. Returns: Optional[Any]: """ -class Embedding(object): +class Embedding(object): + def __repr__(self): """Return repr(self).""" -class VectorSelection(object): +class VectorSelection(object): + def add_edges(self, edges: list) -> None: """ Add all the documents associated with the specified `edges` to the current selection. @@ -159,13 +145,13 @@ class VectorSelection(object): def append(self, selection: VectorSelection) -> VectorSelection: """ - Add all the documents in `selection` to the current selection. + Add all the documents in a specified `selection` to the current selection. Args: selection (VectorSelection): Selection to be added. Returns: - VectorSelection: The selection with the new documents. + VectorSelection: The combined selection. """ def edges(self) -> list[Edge]: @@ -176,51 +162,39 @@ class VectorSelection(object): list[Edge]: List of edges in the current selection. """ - def expand( - self, hops: int, window: Optional[Tuple[int | str, int | str]] = None - ) -> None: + def expand(self, hops: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: """ - Add all the documents `hops` hops away to the selection + Add all the documents a specified number of `hops` away from the selection. - Two documents A and B are considered to be 1 hop away of each other if they are on the same - entity or if they are on the same node/edge pair. Provided that, two nodes A and C are n - hops away of each other if there is a document B such that A is n - 1 hops away of B and B + Two documents A and B are considered to be 1 hop away from each other if they are on the same + entity or if they are on the same node/edge pair. Provided that two nodes A and C are n + hops away of each other if there is a document B such that A is n - 1 hops away of B and B is 1 hop away of C. Args: - hops (int): the number of hops to carry out the expansion - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + hops (int): The number of hops to carry out the expansion. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: None: """ - def expand_edges_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> None: + def expand_edges_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: """ Add the top `limit` adjacent edges with higher score for `query` to the selection - This function has the same behavior as expand_entities_by_similarity but it only considers edges. + This function has the same behaviour as expand_entities_by_similarity but it only considers edges. Args: - query (str | list): the text or the embedding to score against - limit (int): the maximum number of new edges to add - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + query (str | list): The text or the embedding to score against. + limit (int): The maximum number of new edges to add. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: None: """ - def expand_entities_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> None: + def expand_entities_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: """ Add the top `limit` adjacent entities with higher score for `query` to the selection @@ -235,29 +209,24 @@ class VectorSelection(object): entities or until no more documents are available Args: - query (str | list): the text or the embedding to score against - limit (int): the number of documents to add - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + query (str | list): The text or the embedding to score against. + limit (int): The number of documents to add. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: None: """ - def expand_nodes_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> None: + def expand_nodes_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> None: """ Add the top `limit` adjacent nodes with higher score for `query` to the selection This function has the same behaviour as expand_entities_by_similarity but it only considers nodes. Args: - query (str | list): the text or the embedding to score against - limit (int): the maximum number of new nodes to add - window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered + query (str | list): The text or the embedding to score against. + limit (int): The maximum number of new nodes to add. + window (Tuple[int | str, int | str], optional): The window that documents need to belong to in order to be considered. Returns: None: @@ -273,7 +242,7 @@ class VectorSelection(object): def get_documents_with_scores(self) -> list[Tuple[Document, float]]: """ - Returns the documents alongside their scores present in the current selection. + Returns the documents present in the current selection alongside their scores. Returns: list[Tuple[Document, float]]: List of documents and scores. From 08f0c508fb0b55ec80a46fffb18a9375d543c05f Mon Sep 17 00:00:00 2001 From: James Baross Date: Thu, 27 Nov 2025 11:50:47 +0000 Subject: [PATCH 12/13] Address review comments --- docs/user-guide/algorithms/6_vectorisation.md | 2 +- raphtory/src/python/packages/vectors.rs | 6 +++--- raphtory/src/python/types/wrappers/document.rs | 6 ------ 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 4e7fdbec58..5a362ca230 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -24,7 +24,7 @@ When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically The templates for entity documents follow a subset of [Jinja](https://jinja.palletsprojects.com/en/stable/templates/) and graph attributes and properties are exposed so that you can use them in template expressions. -Most attributes of graph entities are exposed and can be used in Jinja expressions. The nesting of attributes reflects the Python interface and the final result of any chain such as `properties.prop_name` or `src.name` should be a string. +Most attributes of graph entities are exposed and can be used in Jinja expressions. The nesting of attributes reflects the Python interface and the final result of any chain such as `properties.prop_name` or `src.name` should be a standard python type. For datetime values, Raphtory converts them into milliseconds since the Unix epoch by default and provides an optional `datetimeformat` function to convert this to a human readable format. ## Retrieve documents diff --git a/raphtory/src/python/packages/vectors.rs b/raphtory/src/python/packages/vectors.rs index 0545a08860..4e46625913 100644 --- a/raphtory/src/python/packages/vectors.rs +++ b/raphtory/src/python/packages/vectors.rs @@ -218,7 +218,7 @@ impl PyVectorisedGraph { self.0.empty_selection() } - /// Search the top similarity scoring entities according to matching a specified `query` with no more than `limit` entities in the result. + /// Perform a similarity search between each entity's associated document and a specified `query`. Returns a number of entities up to a specified `limit` ranked in descending order of similarity score. /// /// Args: /// query (str | list): The text or the embedding to score against. @@ -240,7 +240,7 @@ impl PyVectorisedGraph { .entities_by_similarity(&embedding, limit, translate_window(window))?) } - /// Search the top similarity scoring nodes according to matching a specified `query` with no more than `limit` nodes in the result. + /// Perform a similarity search between each node's associated document and a specified `query`. Returns a number of nodes up to a specified `limit` ranked in descending order of similarity score. /// /// Args: /// query (str | list): The text or the embedding to score against. @@ -262,7 +262,7 @@ impl PyVectorisedGraph { .nodes_by_similarity(&embedding, limit, translate_window(window))?) } - /// Search the top similarity scoring edges according to matching a specified `query` with no more than `limit` edges in the result. + /// Perform a similarity search between each edge's associated document and a specified `query`. Returns a number of edges up to a specified `limit` ranked in descending order of similarity score. /// /// Args: /// query (str | list): The text or the embedding to score against. diff --git a/raphtory/src/python/types/wrappers/document.rs b/raphtory/src/python/types/wrappers/document.rs index 17000e8366..652cd65e90 100644 --- a/raphtory/src/python/types/wrappers/document.rs +++ b/raphtory/src/python/types/wrappers/document.rs @@ -6,12 +6,6 @@ use crate::{ use pyo3::{prelude::*, IntoPyObjectExt}; /// A document corresponding to a graph entity. Used to generate embeddings. -/// -/// Args: -/// content (str): The document content. -/// life (int | Tuple[int, int], optional): The optional lifespan of the document. A single value -/// corresponds to an event, a tuple corresponds to a -/// window. #[pyclass(name = "Document", module = "raphtory.vectors", frozen)] #[derive(Clone)] pub struct PyDocument(pub(crate) Document); From 92297030c2a41edc6c09f661c7875acf673b49bf Mon Sep 17 00:00:00 2001 From: James Baross Date: Thu, 27 Nov 2025 16:13:08 +0000 Subject: [PATCH 13/13] Add Mini Jinja link --- docs/user-guide/algorithms/6_vectorisation.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user-guide/algorithms/6_vectorisation.md b/docs/user-guide/algorithms/6_vectorisation.md index 5a362ca230..6346b46aa1 100644 --- a/docs/user-guide/algorithms/6_vectorisation.md +++ b/docs/user-guide/algorithms/6_vectorisation.md @@ -22,9 +22,9 @@ When you call [Vectorise()][raphtory.GraphView.vectorise] Raphtory automatically ### Document templates -The templates for entity documents follow a subset of [Jinja](https://jinja.palletsprojects.com/en/stable/templates/) and graph attributes and properties are exposed so that you can use them in template expressions. +The templates for entity documents follow a subset of [Jinja](https://jinja.palletsprojects.com/en/stable/templates/) using [Mini Jinja](https://docs.rs/minijinja/latest/minijinja/). -Most attributes of graph entities are exposed and can be used in Jinja expressions. The nesting of attributes reflects the Python interface and the final result of any chain such as `properties.prop_name` or `src.name` should be a standard python type. For datetime values, Raphtory converts them into milliseconds since the Unix epoch by default and provides an optional `datetimeformat` function to convert this to a human readable format. +Additionally, graph attributes and properties are exposed so that you can use them in template expressions. The nesting of attributes reflects the Python interface and you can perform chains such as `properties.prop_name` or `src.name` which will follow the same typing as in Python. For `datetime` values, by default Raphtory converts these into milliseconds since the Unix epoch but provides an optional `datetimeformat` function to convert this to a human readable format. ## Retrieve documents